fix multi-model confusion

This commit is contained in:
DirkSchlossmacher 2024-08-07 18:09:07 +02:00
parent 2e6ab1385c
commit b898a2df25
2 changed files with 6 additions and 3 deletions

View File

@ -358,7 +358,9 @@ export const DEFAULT_MODELS = [
providerType: "openai",
sorted: 1, // 这里是固定的,确保顺序与之前内置的版本一致
},
})),
}))
/*
,
...openaiModels.map((name) => ({
name,
available: false,
@ -458,6 +460,7 @@ export const DEFAULT_MODELS = [
sorted: 10,
},
})),
*/
] as const;
export const CHAT_PAGE_SIZE = 15;

View File

@ -50,7 +50,7 @@ export const DEFAULT_CONFIG = {
models: DEFAULT_MODELS as any as LLMModel[],
modelConfig: {
model: "gpt-4o" as ModelType,
model: "gpt-4o-mini" as ModelType,
providerName: "OpenAI" as ServiceProvider,
temperature: 0.5,
top_p: 1,
@ -59,7 +59,7 @@ export const DEFAULT_CONFIG = {
frequency_penalty: 0,
sendMemory: false,
historyMessageCount: 42,
compressMessageLengthThreshold: 90001,
compressMessageLengthThreshold: 90002,
enableInjectSystemPrompts: true,
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
size: "1024x1024" as DalleSize,