Merge remote-tracking branch 'upstream/main' into dev

# Conflicts:
#	app/store/config.ts
#	app/utils.ts
#	yarn.lock
This commit is contained in:
sijinhui 2024-05-17 09:59:38 +08:00
commit aac77eb980
11 changed files with 158 additions and 78 deletions

View File

@ -245,13 +245,17 @@ To control custom models, use `+` to add a custom model, use `-` to hide a model
User `-all` to disable all default models, `+all` to enable all default models.
### `WHITE_WEBDEV_ENDPOINTS` (可选)
### `WHITE_WEBDEV_ENDPOINTS` (optional)
You can use this option if you want to increase the number of webdav service addresses you are allowed to access, as required by the format
- Each address must be a complete endpoint
> `https://xxxx/yyy`
- Multiple addresses are connected by ', '
### `DEFAULT_INPUT_TEMPLATE` (optional)
Customize the default template used to initialize the User Input Preprocessing configuration item in Settings.
## Requirements
NodeJS >= 18, Docker >= 20

View File

@ -156,6 +156,9 @@ anthropic claude Api Url.
用来控制模型列表,使用 `+` 增加一个模型,使用 `-` 来隐藏一个模型,使用 `模型名=展示名` 来自定义模型的展示名,用英文逗号隔开。
### `DEFAULT_INPUT_TEMPLATE` (可选)
自定义默认的 template用于初始化『设置』中的『用户输入预处理』配置项
## 开发
点击下方按钮,开始二次开发:

View File

@ -62,9 +62,10 @@ import {
getMessageTextContent,
getMessageImages,
isVisionModel,
compressImage,
} from "../utils";
import { compressImage } from "@/app/utils/chat";
import dynamic from "next/dynamic";
import { ChatControllerPool } from "../client/controller";

View File

@ -1,4 +1,5 @@
import tauriConfig from "../../src-tauri/tauri.conf.json";
import { DEFAULT_INPUT_TEMPLATE } from "../constant";
export const getBuildConfig = () => {
if (typeof process === "undefined") {
@ -42,6 +43,7 @@ export const getBuildConfig = () => {
...commitInfo,
buildMode,
isApp,
template: process.env.DEFAULT_INPUT_TEMPLATE ?? DEFAULT_INPUT_TEMPLATE,
};
};

View File

@ -36,6 +36,9 @@ declare global {
// google tag manager
GTM_ID?: string;
// custom template for preprocessing user input
DEFAULT_INPUT_TEMPLATE?: string;
}
}
}

View File

@ -736,14 +736,13 @@ export const useChatStore = createPersistStore(
getMemoryPrompt() {
const session = get().currentSession();
return {
role: "system",
content:
session.memoryPrompt.length > 0
? Locale.Store.Prompt.History(session.memoryPrompt)
: "",
date: "",
} as ChatMessage;
if (session.memoryPrompt.length) {
return {
role: "system",
content: Locale.Store.Prompt.History(session.memoryPrompt),
date: "",
} as ChatMessage;
}
},
getMessagesWithMemory() {
@ -779,16 +778,15 @@ export const useChatStore = createPersistStore(
systemPrompts.at(0)?.content ?? "empty",
);
}
const memoryPrompt = get().getMemoryPrompt();
// long term memory
const shouldSendLongTermMemory =
modelConfig.sendMemory &&
session.memoryPrompt &&
session.memoryPrompt.length > 0 &&
session.lastSummarizeIndex > clearContextIndex;
const longTermMemoryPrompts = shouldSendLongTermMemory
? [get().getMemoryPrompt()]
: [];
const longTermMemoryPrompts =
shouldSendLongTermMemory && memoryPrompt ? [memoryPrompt] : [];
const longTermMemoryStartIndex = session.lastSummarizeIndex;
// short term memory
@ -913,9 +911,11 @@ export const useChatStore = createPersistStore(
Math.max(0, n - modelConfig.historyMessageCount),
);
}
// add memory prompt
toBeSummarizedMsgs.unshift(get().getMemoryPrompt());
const memoryPrompt = get().getMemoryPrompt();
if (memoryPrompt) {
// add memory prompt
toBeSummarizedMsgs.unshift(memoryPrompt);
}
const lastSummarizeIndex = session.messages.length;

View File

@ -1,5 +1,4 @@
import { LLMModel } from "../client/api";
import { isMacOS } from "../utils";
import { getClientConfig } from "../config/client";
import {
DEFAULT_INPUT_TEMPLATE,
@ -27,6 +26,8 @@ export enum Theme {
Light = "light",
}
const config = getClientConfig();
export const DEFAULT_CONFIG = {
lastUpdate: Date.now(), // timestamp, to merge state
@ -35,7 +36,7 @@ export const DEFAULT_CONFIG = {
avatar: "1f603",
fontSize: 14,
theme: Theme.Auto as Theme,
tightBorder: !!getClientConfig()?.isApp,
tightBorder: !!config?.isApp,
sendPreviewBubble: false,
enableAutoGenerateTitle: true,
sidebarWidth: DEFAULT_SIDEBAR_WIDTH,
@ -61,7 +62,7 @@ export const DEFAULT_CONFIG = {
historyMessageCount: 5,
compressMessageLengthThreshold: 4000,
enableInjectSystemPrompts: true,
template: DEFAULT_INPUT_TEMPLATE,
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
},
};
@ -137,7 +138,7 @@ export const useAppConfig = createPersistStore(
}),
{
name: StoreKey.Config,
version: 3.8997,
version: 3.9,
migrate(persistedState, version) {
const state = persistedState as ChatConfig;
@ -168,9 +169,16 @@ export const useAppConfig = createPersistStore(
if (version < 3.8) {
state.lastUpdate = Date.now();
}
if (version < 3.8997) {
state.lastUpdate = Date.now();
return { ...DEFAULT_CONFIG };
// if (version < 3.8997) {
// state.lastUpdate = Date.now();
// return { ...DEFAULT_CONFIG };
// }
if (version < 3.9) {
state.modelConfig.template =
state.modelConfig.template !== DEFAULT_INPUT_TEMPLATE
? state.modelConfig.template
: config?.template ?? DEFAULT_INPUT_TEMPLATE;
}
return state as any;

View File

@ -83,57 +83,57 @@ export async function downloadAs(text: string, filename: string) {
}
}
export function compressImage(file: File, maxSize: number): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => {
// 直接解析为 Data URL
resolve(reader.result as string);
};
reader.onerror = reject;
reader.readAsDataURL(file);
});
// export function compressImage(file: File, maxSize: number): Promise<string> {
// return new Promise((resolve, reject) => {
// const reader = new FileReader();
// reader.onload = () => {
// // 直接解析为 Data URL
// resolve(reader.result as string);
// };
// reader.onerror = reject;
// reader.readAsDataURL(file);
// });
// return new Promise((resolve, reject) => {
// const reader = new FileReader();
// reader.onload = (readerEvent: any) => {
// const image = new Image();
// image.onload = () => {
// let canvas = document.createElement("canvas");
// let ctx = canvas.getContext("2d");
// let width = image.width;
// let height = image.height;
// let quality = 0.9;
// let dataUrl;
//
// do {
// canvas.width = width;
// canvas.height = height;
// ctx?.clearRect(0, 0, canvas.width, canvas.height);
// ctx?.drawImage(image, 0, 0, width, height);
// dataUrl = canvas.toDataURL("image/jpeg", quality);
//
// if (dataUrl.length < maxSize) break;
//
// if (quality > 0.5) {
// // Prioritize quality reduction
// quality -= 0.1;
// } else {
// // Then reduce the size
// width *= 0.9;
// height *= 0.9;
// }
// } while (dataUrl.length > maxSize);
//
// resolve(dataUrl);
// };
// image.onerror = reject;
// image.src = readerEvent.target.result;
// };
// reader.onerror = reject;
// reader.readAsDataURL(file);
// });
}
// return new Promise((resolve, reject) => {
// const reader = new FileReader();
// reader.onload = (readerEvent: any) => {
// const image = new Image();
// image.onload = () => {
// let canvas = document.createElement("canvas");
// let ctx = canvas.getContext("2d");
// let width = image.width;
// let height = image.height;
// let quality = 0.9;
// let dataUrl;
//
// do {
// canvas.width = width;
// canvas.height = height;
// ctx?.clearRect(0, 0, canvas.width, canvas.height);
// ctx?.drawImage(image, 0, 0, width, height);
// dataUrl = canvas.toDataURL("image/jpeg", quality);
//
// if (dataUrl.length < maxSize) break;
//
// if (quality > 0.5) {
// // Prioritize quality reduction
// quality -= 0.1;
// } else {
// // Then reduce the size
// width *= 0.9;
// height *= 0.9;
// }
// } while (dataUrl.length > maxSize);
//
// resolve(dataUrl);
// };
// image.onerror = reject;
// image.src = readerEvent.target.result;
// };
// reader.onerror = reject;
// reader.readAsDataURL(file);
// });
// }
export function readFromFile() {
return new Promise<string>((res, rej) => {
@ -307,6 +307,7 @@ export function isVisionModel(model: string) {
"claude-3",
"gemini-1.5-pro",
"gemini-1.5-flash",
"gpt-4o",
];
const isGpt4Turbo =
model.includes("gpt-4-turbo") && !model.includes("preview");

54
app/utils/chat.ts Normal file
View File

@ -0,0 +1,54 @@
import heic2any from "heic2any";
export function compressImage(file: File, maxSize: number): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (readerEvent: any) => {
const image = new Image();
image.onload = () => {
let canvas = document.createElement("canvas");
let ctx = canvas.getContext("2d");
let width = image.width;
let height = image.height;
let quality = 0.9;
let dataUrl;
do {
canvas.width = width;
canvas.height = height;
ctx?.clearRect(0, 0, canvas.width, canvas.height);
ctx?.drawImage(image, 0, 0, width, height);
dataUrl = canvas.toDataURL("image/jpeg", quality);
if (dataUrl.length < maxSize) break;
if (quality > 0.5) {
// Prioritize quality reduction
quality -= 0.1;
} else {
// Then reduce the size
width *= 0.9;
height *= 0.9;
}
} while (dataUrl.length > maxSize);
resolve(dataUrl);
};
image.onerror = reject;
image.src = readerEvent.target.result;
};
reader.onerror = reject;
if (file.type.includes("heic")) {
heic2any({ blob: file, toType: "image/jpeg" })
.then((blob) => {
reader.readAsDataURL(blob as Blob);
})
.catch((e) => {
reject(e);
});
}
reader.readAsDataURL(file);
});
}

View File

@ -93,14 +93,17 @@ export function createUpstashClient(store: SyncStore) {
}
let url;
if (proxyUrl.length > 0 || proxyUrl === "/") {
let u = new URL(proxyUrl + "/api/upstash/" + path);
const pathPrefix = "/api/upstash/";
try {
let u = new URL(proxyUrl + pathPrefix + path);
// add query params
u.searchParams.append("endpoint", config.endpoint);
url = u.toString();
} else {
url = "/api/upstash/" + path + "?endpoint=" + config.endpoint;
} catch (e) {
url = pathPrefix + path + "?endpoint=" + config.endpoint;
}
return url;
},
};

View File

@ -32,6 +32,7 @@
"echarts": "^5.4.3",
"emoji-picker-react": "^4.9.2",
"fuse.js": "^7.0.0",
"heic2any": "^0.0.4",
"html-to-image": "^1.11.11",
"mermaid": "^10.7.0",
"microsoft-cognitiveservices-speech-sdk": "^1.36.0",