Compare commits

...

6 Commits

Author SHA1 Message Date
JiangYingjin
88f8ca822f 新增 webdav 一键填入 2025-03-02 14:04:57 +08:00
JiangYingjin
1cccaa2e80 更新同步模块 2025-03-02 12:25:33 +08:00
JiangYingjin
d08af47342 优化一键填入过程 2025-03-02 02:27:17 +08:00
JiangYingjin
a5289b39d0 一键填入 code 2025-03-02 01:46:03 +08:00
JiangYingjin
1aa647688f 调整初始化参数 2025-03-02 01:23:27 +08:00
JiangYinjin
fb5e9e5aed fix: allow isVisionModel function read runtime env var VISION_MODELS 2024-12-26 03:33:24 +08:00
20 changed files with 180 additions and 96 deletions

View File

@@ -13,6 +13,7 @@ const DANGER_CONFIG = {
hideBalanceQuery: serverConfig.hideBalanceQuery,
disableFastLink: serverConfig.disableFastLink,
customModels: serverConfig.customModels,
visionModels: serverConfig.visionModels,
defaultModel: serverConfig.defaultModel,
};

View File

@@ -84,10 +84,13 @@ export class ClaudeApi implements LLMApi {
return res?.content?.[0]?.text;
}
async chat(options: ChatOptions): Promise<void> {
const visionModel = isVisionModel(options.config.model);
const accessStore = useAccessStore.getState();
const visionModel = isVisionModel(
options.config.model,
accessStore.visionModels,
);
const shouldStream = !!options.config.stream;
const modelConfig = {

View File

@@ -83,7 +83,7 @@ export class GeminiProApi implements LLMApi {
}
const messages = _messages.map((v) => {
let parts: any[] = [{ text: getMessageTextContent(v) }];
if (isVisionModel(options.config.model)) {
if (isVisionModel(options.config.model, accessStore.visionModels)) {
const images = getMessageImages(v);
if (images.length > 0) {
multimodal = true;

View File

@@ -194,6 +194,8 @@ export class ChatGPTApi implements LLMApi {
let requestPayload: RequestPayload | DalleRequestPayload;
const accessStore = useAccessStore.getState();
const isDalle3 = _isDalle3(options.config.model);
const isO1 = options.config.model.startsWith("o1");
if (isDalle3) {
@@ -211,7 +213,10 @@ export class ChatGPTApi implements LLMApi {
style: options.config?.style ?? "vivid",
};
} else {
const visionModel = isVisionModel(options.config.model);
const visionModel = isVisionModel(
options.config.model,
accessStore.visionModels,
);
const messages: ChatOptions["messages"] = [];
for (const v of options.messages) {
const content = visionModel

View File

@@ -94,7 +94,11 @@ export class HunyuanApi implements LLMApi {
}
async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const accessStore = useAccessStore.getState();
const visionModel = isVisionModel(
options.config.model,
accessStore.visionModels,
);
const messages = options.messages.map((v, index) => ({
// "Messages 中 system 角色必须位于列表的最开始"
role: index !== 0 && v.role === "system" ? "user" : v.role,

View File

@@ -107,6 +107,7 @@ import {
} from "../constant";
import { Avatar } from "./emoji";
import { ContextPrompts, MaskAvatar, MaskConfig } from "./mask";
import { useSyncStore } from "../store/sync";
import { useMaskStore } from "../store/mask";
import { ChatCommandPrefix, useChatCommand, useCommand } from "../command";
import { prettyObject } from "../utils/format";
@@ -490,6 +491,7 @@ export function ChatActions(props: {
const currentProviderName =
session.mask.modelConfig?.providerName || ServiceProvider.OpenAI;
const allModels = useAllModels();
const customVisionModels = useAccessStore().visionModels;
const models = useMemo(() => {
const filteredModels = allModels.filter((m) => m.available);
const defaultModel = filteredModels.find((m) => m.isDefault);
@@ -529,7 +531,7 @@ export function ChatActions(props: {
const isMobileScreen = useMobileScreen();
useEffect(() => {
const show = isVisionModel(currentModel);
const show = isVisionModel(currentModel, customVisionModels);
setShowUploadImage(show);
if (!show) {
props.setAttachImages([]);
@@ -947,6 +949,8 @@ function _Chat() {
const fontSize = config.fontSize;
const fontFamily = config.fontFamily;
const syncStore = useSyncStore();
const [showExport, setShowExport] = useState(false);
const inputRef = useRef<HTMLTextAreaElement>(null);
@@ -1394,42 +1398,51 @@ function _Chat() {
submit: (text) => {
doSubmit(text);
},
code: (text) => {
if (accessStore.disableFastLink) return;
console.log("[Command] got code from url: ", text);
showConfirm(Locale.URLCommand.Code + `code = ${text}`).then((res) => {
if (res) {
accessStore.update((access) => (access.accessCode = text));
}
});
},
// code: (text) => {
// if (accessStore.disableFastLink) return;
// console.log("[Command] got code from url: ", text);
// showConfirm(Locale.URLCommand.Code + `code = ${text}`).then((res) => {
// if (res) {
// accessStore.update((access) => (access.accessCode = text));
// }
// });
// },
settings: (text) => {
if (accessStore.disableFastLink) return;
try {
const payload = JSON.parse(text) as {
key?: string;
url?: string;
code?: string;
username?: string;
password?: string;
};
console.log("[Command] got settings from url: ", payload);
if (payload.key || payload.url) {
showConfirm(
Locale.URLCommand.Settings +
`\n${JSON.stringify(payload, null, 4)}`,
).then((res) => {
if (!res) return;
if (payload.key) {
accessStore.update(
(access) => (access.openaiApiKey = payload.key!),
);
}
if (payload.url) {
accessStore.update((access) => (access.openaiUrl = payload.url!));
}
accessStore.update((access) => (access.useCustomConfig = true));
});
if (payload.code) {
accessStore.update((access) => (access.accessCode = payload.code!));
if (accessStore.isAuthorized()) {
context.pop();
const copiedHello = Object.assign({}, BOT_HELLO);
context.push(copiedHello);
setUserInput(" ");
}
}
if (payload.username) {
syncStore.update(
(config) => (config.webdav.username = payload.username!),
);
}
if (payload.password) {
syncStore.update(
(config) => (config.webdav.password = payload.password!),
);
}
if (payload.username && payload.password) {
syncStore.sync();
}
} catch {
console.error("[Command] failed to get settings from url: ", text);
@@ -1457,10 +1470,12 @@ function _Chat() {
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const customVisionModels = useAccessStore().visionModels;
const handlePaste = useCallback(
async (event: React.ClipboardEvent<HTMLTextAreaElement>) => {
const currentModel = chatStore.currentSession().mask.modelConfig.model;
if (!isVisionModel(currentModel)) {
if (!isVisionModel(currentModel, customVisionModels)) {
return;
}
const items = (event.clipboardData || window.clipboardData).items;
@@ -1497,7 +1512,7 @@ function _Chat() {
}
}
},
[attachImages, chatStore],
[attachImages, chatStore, customVisionModels],
);
async function uploadImage() {
@@ -1545,7 +1560,7 @@ function _Chat() {
setAttachImages(images);
}
// 捷键 shortcut keys
// 捷键 shortcut keys
const [showShortcutKeyModal, setShowShortcutKeyModal] = useState(false);
useEffect(() => {

View File

@@ -528,6 +528,21 @@ function SyncItems() {
setShowSyncConfigModal(true);
}}
/>
{couldSync && (
<IconButton
icon={<UploadIcon />}
text={Locale.UI.Overwrite}
onClick={async () => {
try {
await syncStore.overwrite();
showToast(Locale.Settings.Sync.Success);
} catch (e) {
showToast(Locale.Settings.Sync.Fail);
console.error("[Sync]", e);
}
}}
/>
)}
{couldSync && (
<IconButton
icon={<ResetIcon />}

View File

@@ -21,6 +21,7 @@ declare global {
ENABLE_BALANCE_QUERY?: string; // allow user to query balance or not
DISABLE_FAST_LINK?: string; // disallow parse settings from url or not
CUSTOM_MODELS?: string; // to control custom models
VISION_MODELS?: string; // to control vision models
DEFAULT_MODEL?: string; // to control default model in every new chat window
// stability only
@@ -123,13 +124,16 @@ export const getServerSideConfig = () => {
const disableGPT4 = !!process.env.DISABLE_GPT4;
let customModels = process.env.CUSTOM_MODELS ?? "";
let visionModels = process.env.VISION_MODELS ?? "";
let defaultModel = process.env.DEFAULT_MODEL ?? "";
if (disableGPT4) {
if (customModels) customModels += ",";
customModels += DEFAULT_MODELS.filter(
(m) =>
(m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o") || m.name.startsWith("o1")) &&
(m.name.startsWith("gpt-4") ||
m.name.startsWith("chatgpt-4o") ||
m.name.startsWith("o1")) &&
!m.name.startsWith("gpt-4o-mini"),
)
.map((m) => "-" + m.name)
@@ -247,6 +251,7 @@ export const getServerSideConfig = () => {
hideBalanceQuery: !process.env.ENABLE_BALANCE_QUERY,
disableFastLink: !!process.env.DISABLE_FAST_LINK,
customModels,
visionModels,
defaultModel,
allowedWebDavEndpoints,
};

View File

@@ -757,6 +757,7 @@ const cn = {
Export: "导出",
Import: "导入",
Sync: "同步",
Overwrite: "覆盖",
Config: "配置",
},
Exporter: {

View File

@@ -762,6 +762,7 @@ const en: LocaleType = {
Edit: "Edit",
Export: "Export",
Import: "Import",
Overwrite: "Overwrite",
Sync: "Sync",
Config: "Config",
},

View File

@@ -589,6 +589,7 @@ const fr: PartialLocaleType = {
Edit: "Modifier",
Export: "Exporter",
Import: "Importer",
Overwrite: "Remplacer",
Sync: "Synchroniser",
Config: "Configurer",
},

View File

@@ -590,6 +590,7 @@ const it: PartialLocaleType = {
Edit: "Modifica",
Export: "Esporta",
Import: "Importa",
Overwrite: "Sostituisci",
Sync: "Sincronizza",
Config: "Configura",
},

View File

@@ -505,6 +505,7 @@ const pt: PartialLocaleType = {
Edit: "Editar",
Export: "Exportar",
Import: "Importar",
Overwrite: "Substituir",
Sync: "Sincronizar",
Config: "Configurar",
},

View File

@@ -123,6 +123,7 @@ const DEFAULT_ACCESS_STATE = {
disableGPT4: false,
disableFastLink: false,
customModels: "",
visionModels: "",
defaultModel: "",
// tts config

View File

@@ -4,7 +4,6 @@ import { getClientConfig } from "../config/client";
import {
DEFAULT_INPUT_TEMPLATE,
DEFAULT_MODELS,
DEFAULT_SIDEBAR_WIDTH,
DEFAULT_TTS_ENGINE,
DEFAULT_TTS_ENGINES,
DEFAULT_TTS_MODEL,
@@ -46,18 +45,20 @@ export const DEFAULT_CONFIG = {
fontSize: 14,
fontFamily: "",
theme: Theme.Auto as Theme,
tightBorder: !!config?.isApp,
sendPreviewBubble: true,
// tightBorder: !!config?.isApp,
tightBorder: true,
sendPreviewBubble: false,
enableAutoGenerateTitle: true,
sidebarWidth: DEFAULT_SIDEBAR_WIDTH,
// sidebarWidth: DEFAULT_SIDEBAR_WIDTH,
sidebarWidth: 100,
enableArtifacts: true, // show artifacts config
enableCodeFold: true, // code fold config
disablePromptHint: false,
disablePromptHint: true,
dontShowMaskSplashScreen: false, // dont show splash screen when create chat
dontShowMaskSplashScreen: true, // dont show splash screen when create chat
hideBuiltinMasks: false, // dont add builtin masks
customModels: "",
@@ -68,12 +69,12 @@ export const DEFAULT_CONFIG = {
providerName: "OpenAI" as ServiceProvider,
temperature: 0.5,
top_p: 1,
max_tokens: 4000,
max_tokens: 8000,
presence_penalty: 0,
frequency_penalty: 0,
sendMemory: true,
historyMessageCount: 4,
compressMessageLengthThreshold: 1000,
historyMessageCount: 16,
compressMessageLengthThreshold: 1000000,
compressModel: "",
compressProviderName: "",
enableInjectSystemPrompts: true,

View File

@@ -28,7 +28,7 @@ const DEFAULT_SYNC_STATE = {
proxyUrl: ApiPath.Cors as string,
webdav: {
endpoint: "",
endpoint: "https://dav.jyj.cx",
username: "",
password: "",
},
@@ -88,7 +88,7 @@ export const useSyncStore = createPersistStore(
return client;
},
async sync() {
async sync(overwrite = false) {
const localState = getLocalAppState();
const provider = get().provider;
const config = get()[provider];
@@ -103,11 +103,13 @@ export const useSyncStore = createPersistStore(
);
return;
} else {
const parsedRemoteState = JSON.parse(
await client.get(config.username),
) as AppState;
mergeAppState(localState, parsedRemoteState);
setLocalAppState(localState);
if (!overwrite) {
const parsedRemoteState = JSON.parse(
await client.get(config.username),
) as AppState;
mergeAppState(localState, parsedRemoteState);
setLocalAppState(localState);
}
}
} catch (e) {
console.log("[Sync] failed to get remote state", e);
@@ -119,6 +121,10 @@ export const useSyncStore = createPersistStore(
this.markSyncTime();
},
async overwrite() {
await this.sync(true);
},
async check() {
const client = this.getClient();
return await client.check();

View File

@@ -7,6 +7,7 @@ import { ServiceProvider } from "./constant";
import { fetch as tauriStreamFetch } from "./utils/stream";
import { VISION_MODEL_REGEXES, EXCLUDE_VISION_MODEL_REGEXES } from "./constant";
import { getClientConfig } from "./config/client";
import { getModelProvider } from "./utils/model";
export function trimTopic(topic: string) {
// Fix an issue where double quotes still show in the Indonesian language
@@ -253,12 +254,15 @@ export function getMessageImages(message: RequestMessage): string[] {
return urls;
}
export function isVisionModel(model: string) {
export function isVisionModel(model: string, customVisionModels: string) {
const clientConfig = getClientConfig();
const envVisionModels = clientConfig?.visionModels
?.split(",")
.map((m) => m.trim());
if (envVisionModels?.includes(model)) {
const allVisionModelsList = [customVisionModels, clientConfig?.visionModels]
?.join(",")
.split(",")
.map((m) => m.trim())
.filter(Boolean)
.map((m) => getModelProvider(m)[0]);
if (allVisionModelsList?.includes(model)) {
return true;
}
return (

View File

@@ -1,11 +1,11 @@
import {
ChatSession,
useAccessStore,
useAppConfig,
// useAccessStore,
// useAppConfig,
useChatStore,
} from "../store";
import { useMaskStore } from "../store/mask";
import { usePromptStore } from "../store/prompt";
// import { useMaskStore } from "../store/mask";
// import { usePromptStore } from "../store/prompt";
import { StoreKey } from "../constant";
import { merge } from "./merge";
@@ -32,18 +32,18 @@ export type GetStoreState<T> = T extends { getState: () => infer U }
const LocalStateSetters = {
[StoreKey.Chat]: useChatStore.setState,
[StoreKey.Access]: useAccessStore.setState,
[StoreKey.Config]: useAppConfig.setState,
[StoreKey.Mask]: useMaskStore.setState,
[StoreKey.Prompt]: usePromptStore.setState,
// [StoreKey.Access]: useAccessStore.setState,
// [StoreKey.Config]: useAppConfig.setState,
// [StoreKey.Mask]: useMaskStore.setState,
// [StoreKey.Prompt]: usePromptStore.setState,
} as const;
const LocalStateGetters = {
[StoreKey.Chat]: () => getNonFunctionFileds(useChatStore.getState()),
[StoreKey.Access]: () => getNonFunctionFileds(useAccessStore.getState()),
[StoreKey.Config]: () => getNonFunctionFileds(useAppConfig.getState()),
[StoreKey.Mask]: () => getNonFunctionFileds(useMaskStore.getState()),
[StoreKey.Prompt]: () => getNonFunctionFileds(usePromptStore.getState()),
// [StoreKey.Access]: () => getNonFunctionFileds(useAccessStore.getState()),
// [StoreKey.Config]: () => getNonFunctionFileds(useAppConfig.getState()),
// [StoreKey.Mask]: () => getNonFunctionFileds(useMaskStore.getState()),
// [StoreKey.Prompt]: () => getNonFunctionFileds(usePromptStore.getState()),
} as const;
export type AppState = {
@@ -100,22 +100,22 @@ const MergeStates: StateMerger = {
return localState;
},
[StoreKey.Prompt]: (localState, remoteState) => {
localState.prompts = {
...remoteState.prompts,
...localState.prompts,
};
return localState;
},
[StoreKey.Mask]: (localState, remoteState) => {
localState.masks = {
...remoteState.masks,
...localState.masks,
};
return localState;
},
[StoreKey.Config]: mergeWithUpdate<AppState[StoreKey.Config]>,
[StoreKey.Access]: mergeWithUpdate<AppState[StoreKey.Access]>,
// [StoreKey.Prompt]: (localState, remoteState) => {
// localState.prompts = {
// ...remoteState.prompts,
// ...localState.prompts,
// };
// return localState;
// },
// [StoreKey.Mask]: (localState, remoteState) => {
// localState.masks = {
// ...remoteState.masks,
// ...localState.masks,
// };
// return localState;
// },
// [StoreKey.Config]: mergeWithUpdate<AppState[StoreKey.Config]>,
// [StoreKey.Access]: mergeWithUpdate<AppState[StoreKey.Access]>,
};
export function getLocalAppState() {

16
nextchat.json Normal file
View File

@@ -0,0 +1,16 @@
{
"name": "nextchat",
"cwd": "/www/nextchat",
"script": "server.js",
"env": {
"PORT": 8032,
"CODE": "scut",
"BASE_URL": "https://oneapi.jyj.cx",
"OPENAI_API_KEY": "sk-jiangyj",
"HIDE_USER_API_KEY": true,
"CUSTOM_MODELS": "-all,gemini-2.0-pro-exp-02-05@openai,gemini-2.0-flash-thinking-exp-01-21@openai,gemini-2.0-flash-exp@openai,gemini-2.0-flash@openai,gemini-2.0-flash-lite@openai,gpt-4o-2024-11-20@openai,o3-mini@openai,deepseek-ai/deepseek-v3@openai,deepseek-ai/deepseek-r1@openai,deepseek-chat@openai,deepseek-reasoner@openai,ep-20250124104315-zsg4p@openai",
"DEFAULT_MODEL": "gemini-2.0-pro-exp-02-05@openai",
"WHITE_WEBDAV_ENDPOINTS": "https://dav.jyj.cx",
"VISION_MODELS": "gemini-2.0-flash-thinking-exp-01-21@openai,gemini-2.0-pro-exp-02-05@openai,gemini-2.0-flash-exp@openai,gemini-2.0-flash@openai,gemini-2.0-flash-lite@openai,gpt-4o-2024-11-20@openai,o3-mini@openai,deepseek-ai/DeepSeek-V3@openai,deepseek-ai/DeepSeek-R1@openai,deepseek-chat@openai,deepseek-reasoner@openai,ep-20250124104315-zsg4p@openai"
}
}

View File

@@ -2,6 +2,7 @@ import { isVisionModel } from "../app/utils";
describe("isVisionModel", () => {
const originalEnv = process.env;
const customVisionModels = "custom-vlm,another-vlm";
beforeEach(() => {
jest.resetModules();
@@ -27,12 +28,12 @@ describe("isVisionModel", () => {
];
visionModels.forEach((model) => {
expect(isVisionModel(model)).toBe(true);
expect(isVisionModel(model, customVisionModels)).toBe(true);
});
});
test("should exclude specific models", () => {
expect(isVisionModel("claude-3-5-haiku-20241022")).toBe(false);
expect(isVisionModel("claude-3-5-haiku-20241022", customVisionModels)).toBe(false);
});
test("should not identify non-vision models", () => {
@@ -44,24 +45,26 @@ describe("isVisionModel", () => {
];
nonVisionModels.forEach((model) => {
expect(isVisionModel(model)).toBe(false);
expect(isVisionModel(model, customVisionModels)).toBe(false);
});
});
test("should identify models from VISION_MODELS env var", () => {
process.env.VISION_MODELS = "custom-vision-model,another-vision-model";
expect(isVisionModel("custom-vision-model")).toBe(true);
expect(isVisionModel("another-vision-model")).toBe(true);
expect(isVisionModel("unrelated-model")).toBe(false);
expect(isVisionModel("custom-vision-model", customVisionModels)).toBe(true);
expect(isVisionModel("another-vision-model", customVisionModels)).toBe(true);
expect(isVisionModel("custom-vlm", customVisionModels)).toBe(true);
expect(isVisionModel("another-vlm", customVisionModels)).toBe(true);
expect(isVisionModel("unrelated-model", customVisionModels)).toBe(false);
});
test("should handle empty or missing VISION_MODELS", () => {
process.env.VISION_MODELS = "";
expect(isVisionModel("unrelated-model")).toBe(false);
expect(isVisionModel("unrelated-model", customVisionModels)).toBe(false);
delete process.env.VISION_MODELS;
expect(isVisionModel("unrelated-model")).toBe(false);
expect(isVisionModel("gpt-4-vision")).toBe(true);
expect(isVisionModel("unrelated-model", customVisionModels)).toBe(false);
expect(isVisionModel("gpt-4-vision", customVisionModels)).toBe(true);
});
});