diff --git a/README.md b/README.md index 633124ec7..d496d68ed 100644 --- a/README.md +++ b/README.md @@ -245,13 +245,17 @@ To control custom models, use `+` to add a custom model, use `-` to hide a model User `-all` to disable all default models, `+all` to enable all default models. -### `WHITE_WEBDEV_ENDPOINTS` (可选) +### `WHITE_WEBDEV_ENDPOINTS` (optional) You can use this option if you want to increase the number of webdav service addresses you are allowed to access, as required by the format: - Each address must be a complete endpoint > `https://xxxx/yyy` - Multiple addresses are connected by ', ' +### `DEFAULT_INPUT_TEMPLATE` (optional) + +Customize the default template used to initialize the User Input Preprocessing configuration item in Settings. + ## Requirements NodeJS >= 18, Docker >= 20 diff --git a/README_CN.md b/README_CN.md index 10b5fd035..6811102b6 100644 --- a/README_CN.md +++ b/README_CN.md @@ -156,6 +156,9 @@ anthropic claude Api Url. 用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名=展示名` 来自定义模型的展示名,用英文逗号隔开。 +### `DEFAULT_INPUT_TEMPLATE` (可选) +自定义默认的 template,用于初始化『设置』中的『用户输入预处理』配置项 + ## 开发 点击下方按钮,开始二次开发: diff --git a/app/components/chat.tsx b/app/components/chat.tsx index 10967b907..b6abe9273 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -62,9 +62,10 @@ import { getMessageTextContent, getMessageImages, isVisionModel, - compressImage, } from "../utils"; +import { compressImage } from "@/app/utils/chat"; + import dynamic from "next/dynamic"; import { ChatControllerPool } from "../client/controller"; diff --git a/app/config/build.ts b/app/config/build.ts index 35078b3b6..f51a9bd01 100644 --- a/app/config/build.ts +++ b/app/config/build.ts @@ -1,4 +1,5 @@ import tauriConfig from "../../src-tauri/tauri.conf.json"; +import { DEFAULT_INPUT_TEMPLATE } from "../constant"; export const getBuildConfig = () => { if (typeof process === "undefined") { @@ -42,6 +43,7 @@ export const getBuildConfig = () => { ...commitInfo, buildMode, isApp, + template: process.env.DEFAULT_INPUT_TEMPLATE ?? DEFAULT_INPUT_TEMPLATE, }; }; diff --git a/app/config/server.ts b/app/config/server.ts index cde921e5d..68c74f25c 100644 --- a/app/config/server.ts +++ b/app/config/server.ts @@ -36,6 +36,9 @@ declare global { // google tag manager GTM_ID?: string; + + // custom template for preprocessing user input + DEFAULT_INPUT_TEMPLATE?: string; } } } diff --git a/app/store/chat.ts b/app/store/chat.ts index 71859d59f..2c155ebde 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -736,14 +736,13 @@ export const useChatStore = createPersistStore( getMemoryPrompt() { const session = get().currentSession(); - return { - role: "system", - content: - session.memoryPrompt.length > 0 - ? Locale.Store.Prompt.History(session.memoryPrompt) - : "", - date: "", - } as ChatMessage; + if (session.memoryPrompt.length) { + return { + role: "system", + content: Locale.Store.Prompt.History(session.memoryPrompt), + date: "", + } as ChatMessage; + } }, getMessagesWithMemory() { @@ -779,16 +778,15 @@ export const useChatStore = createPersistStore( systemPrompts.at(0)?.content ?? "empty", ); } - + const memoryPrompt = get().getMemoryPrompt(); // long term memory const shouldSendLongTermMemory = modelConfig.sendMemory && session.memoryPrompt && session.memoryPrompt.length > 0 && session.lastSummarizeIndex > clearContextIndex; - const longTermMemoryPrompts = shouldSendLongTermMemory - ? [get().getMemoryPrompt()] - : []; + const longTermMemoryPrompts = + shouldSendLongTermMemory && memoryPrompt ? [memoryPrompt] : []; const longTermMemoryStartIndex = session.lastSummarizeIndex; // short term memory @@ -913,9 +911,11 @@ export const useChatStore = createPersistStore( Math.max(0, n - modelConfig.historyMessageCount), ); } - - // add memory prompt - toBeSummarizedMsgs.unshift(get().getMemoryPrompt()); + const memoryPrompt = get().getMemoryPrompt(); + if (memoryPrompt) { + // add memory prompt + toBeSummarizedMsgs.unshift(memoryPrompt); + } const lastSummarizeIndex = session.messages.length; diff --git a/app/store/config.ts b/app/store/config.ts index 763d12ae5..c2f1cd6e6 100644 --- a/app/store/config.ts +++ b/app/store/config.ts @@ -1,5 +1,4 @@ import { LLMModel } from "../client/api"; -import { isMacOS } from "../utils"; import { getClientConfig } from "../config/client"; import { DEFAULT_INPUT_TEMPLATE, @@ -27,6 +26,8 @@ export enum Theme { Light = "light", } +const config = getClientConfig(); + export const DEFAULT_CONFIG = { lastUpdate: Date.now(), // timestamp, to merge state @@ -35,7 +36,7 @@ export const DEFAULT_CONFIG = { avatar: "1f603", fontSize: 14, theme: Theme.Auto as Theme, - tightBorder: !!getClientConfig()?.isApp, + tightBorder: !!config?.isApp, sendPreviewBubble: false, enableAutoGenerateTitle: true, sidebarWidth: DEFAULT_SIDEBAR_WIDTH, @@ -61,7 +62,7 @@ export const DEFAULT_CONFIG = { historyMessageCount: 5, compressMessageLengthThreshold: 4000, enableInjectSystemPrompts: true, - template: DEFAULT_INPUT_TEMPLATE, + template: config?.template ?? DEFAULT_INPUT_TEMPLATE, }, }; @@ -137,7 +138,7 @@ export const useAppConfig = createPersistStore( }), { name: StoreKey.Config, - version: 3.8997, + version: 3.9, migrate(persistedState, version) { const state = persistedState as ChatConfig; @@ -168,9 +169,16 @@ export const useAppConfig = createPersistStore( if (version < 3.8) { state.lastUpdate = Date.now(); } - if (version < 3.8997) { - state.lastUpdate = Date.now(); - return { ...DEFAULT_CONFIG }; + // if (version < 3.8997) { + // state.lastUpdate = Date.now(); + // return { ...DEFAULT_CONFIG }; + // } + + if (version < 3.9) { + state.modelConfig.template = + state.modelConfig.template !== DEFAULT_INPUT_TEMPLATE + ? state.modelConfig.template + : config?.template ?? DEFAULT_INPUT_TEMPLATE; } return state as any; diff --git a/app/utils.ts b/app/utils.ts index 57d1c9080..d3c45e69a 100644 --- a/app/utils.ts +++ b/app/utils.ts @@ -83,57 +83,57 @@ export async function downloadAs(text: string, filename: string) { } } -export function compressImage(file: File, maxSize: number): Promise { - return new Promise((resolve, reject) => { - const reader = new FileReader(); - reader.onload = () => { - // 直接解析为 Data URL - resolve(reader.result as string); - }; - reader.onerror = reject; - reader.readAsDataURL(file); - }); +// export function compressImage(file: File, maxSize: number): Promise { +// return new Promise((resolve, reject) => { +// const reader = new FileReader(); +// reader.onload = () => { +// // 直接解析为 Data URL +// resolve(reader.result as string); +// }; +// reader.onerror = reject; +// reader.readAsDataURL(file); +// }); - // return new Promise((resolve, reject) => { - // const reader = new FileReader(); - // reader.onload = (readerEvent: any) => { - // const image = new Image(); - // image.onload = () => { - // let canvas = document.createElement("canvas"); - // let ctx = canvas.getContext("2d"); - // let width = image.width; - // let height = image.height; - // let quality = 0.9; - // let dataUrl; - // - // do { - // canvas.width = width; - // canvas.height = height; - // ctx?.clearRect(0, 0, canvas.width, canvas.height); - // ctx?.drawImage(image, 0, 0, width, height); - // dataUrl = canvas.toDataURL("image/jpeg", quality); - // - // if (dataUrl.length < maxSize) break; - // - // if (quality > 0.5) { - // // Prioritize quality reduction - // quality -= 0.1; - // } else { - // // Then reduce the size - // width *= 0.9; - // height *= 0.9; - // } - // } while (dataUrl.length > maxSize); - // - // resolve(dataUrl); - // }; - // image.onerror = reject; - // image.src = readerEvent.target.result; - // }; - // reader.onerror = reject; - // reader.readAsDataURL(file); - // }); -} +// return new Promise((resolve, reject) => { +// const reader = new FileReader(); +// reader.onload = (readerEvent: any) => { +// const image = new Image(); +// image.onload = () => { +// let canvas = document.createElement("canvas"); +// let ctx = canvas.getContext("2d"); +// let width = image.width; +// let height = image.height; +// let quality = 0.9; +// let dataUrl; +// +// do { +// canvas.width = width; +// canvas.height = height; +// ctx?.clearRect(0, 0, canvas.width, canvas.height); +// ctx?.drawImage(image, 0, 0, width, height); +// dataUrl = canvas.toDataURL("image/jpeg", quality); +// +// if (dataUrl.length < maxSize) break; +// +// if (quality > 0.5) { +// // Prioritize quality reduction +// quality -= 0.1; +// } else { +// // Then reduce the size +// width *= 0.9; +// height *= 0.9; +// } +// } while (dataUrl.length > maxSize); +// +// resolve(dataUrl); +// }; +// image.onerror = reject; +// image.src = readerEvent.target.result; +// }; +// reader.onerror = reject; +// reader.readAsDataURL(file); +// }); +// } export function readFromFile() { return new Promise((res, rej) => { @@ -307,6 +307,7 @@ export function isVisionModel(model: string) { "claude-3", "gemini-1.5-pro", "gemini-1.5-flash", + "gpt-4o", ]; const isGpt4Turbo = model.includes("gpt-4-turbo") && !model.includes("preview"); diff --git a/app/utils/chat.ts b/app/utils/chat.ts new file mode 100644 index 000000000..991d06b73 --- /dev/null +++ b/app/utils/chat.ts @@ -0,0 +1,54 @@ +import heic2any from "heic2any"; + +export function compressImage(file: File, maxSize: number): Promise { + return new Promise((resolve, reject) => { + const reader = new FileReader(); + reader.onload = (readerEvent: any) => { + const image = new Image(); + image.onload = () => { + let canvas = document.createElement("canvas"); + let ctx = canvas.getContext("2d"); + let width = image.width; + let height = image.height; + let quality = 0.9; + let dataUrl; + + do { + canvas.width = width; + canvas.height = height; + ctx?.clearRect(0, 0, canvas.width, canvas.height); + ctx?.drawImage(image, 0, 0, width, height); + dataUrl = canvas.toDataURL("image/jpeg", quality); + + if (dataUrl.length < maxSize) break; + + if (quality > 0.5) { + // Prioritize quality reduction + quality -= 0.1; + } else { + // Then reduce the size + width *= 0.9; + height *= 0.9; + } + } while (dataUrl.length > maxSize); + + resolve(dataUrl); + }; + image.onerror = reject; + image.src = readerEvent.target.result; + }; + reader.onerror = reject; + + if (file.type.includes("heic")) { + heic2any({ blob: file, toType: "image/jpeg" }) + .then((blob) => { + reader.readAsDataURL(blob as Blob); + }) + .catch((e) => { + reject(e); + }); + } + + reader.readAsDataURL(file); + }); +} diff --git a/app/utils/cloud/upstash.ts b/app/utils/cloud/upstash.ts index bf6147bd4..8d84adbde 100644 --- a/app/utils/cloud/upstash.ts +++ b/app/utils/cloud/upstash.ts @@ -93,14 +93,17 @@ export function createUpstashClient(store: SyncStore) { } let url; - if (proxyUrl.length > 0 || proxyUrl === "/") { - let u = new URL(proxyUrl + "/api/upstash/" + path); + const pathPrefix = "/api/upstash/"; + + try { + let u = new URL(proxyUrl + pathPrefix + path); // add query params u.searchParams.append("endpoint", config.endpoint); url = u.toString(); - } else { - url = "/api/upstash/" + path + "?endpoint=" + config.endpoint; + } catch (e) { + url = pathPrefix + path + "?endpoint=" + config.endpoint; } + return url; }, }; diff --git a/package.json b/package.json index 9e71d741f..26aa065bb 100644 --- a/package.json +++ b/package.json @@ -32,6 +32,7 @@ "echarts": "^5.4.3", "emoji-picker-react": "^4.9.2", "fuse.js": "^7.0.0", + "heic2any": "^0.0.4", "html-to-image": "^1.11.11", "mermaid": "^10.7.0", "microsoft-cognitiveservices-speech-sdk": "^1.36.0",