修改消息压缩使用的模型url

This commit is contained in:
sijinhui 2024-07-19 18:03:38 +08:00
parent 69c434c047
commit 60813d448f
2 changed files with 21 additions and 6 deletions

View File

@ -119,6 +119,7 @@ export class ChatGPTApi implements LLMApi {
providerName: options.config.providerName, providerName: options.config.providerName,
}, },
}; };
console.log('-------', modelConfig, options)
const requestPayload: RequestPayload = { const requestPayload: RequestPayload = {
messages, messages,
stream: options.config.stream, stream: options.config.stream,

View File

@ -97,7 +97,10 @@ function createEmptySession(): ChatSession {
const ChatFetchTaskPool: Record<string, any> = {}; const ChatFetchTaskPool: Record<string, any> = {};
function getSummarizeModel(currentModel: string) { function getSummarizeModel(currentModel: string): {
name: string,
providerName: string | undefined,
} {
// if it is using gpt-* models, force to use 3.5 to summarize // if it is using gpt-* models, force to use 3.5 to summarize
if (currentModel.startsWith("gpt")) { if (currentModel.startsWith("gpt")) {
const configStore = useAppConfig.getState(); const configStore = useAppConfig.getState();
@ -110,12 +113,21 @@ function getSummarizeModel(currentModel: string) {
const summarizeModel = allModel.find( const summarizeModel = allModel.find(
(m) => m.name === SUMMARIZE_MODEL && m.available, (m) => m.name === SUMMARIZE_MODEL && m.available,
); );
return summarizeModel?.name ?? currentModel; return {
name: summarizeModel?.name ?? currentModel,
providerName: summarizeModel?.provider?.providerName,
}
} }
if (currentModel.startsWith("gemini")) { if (currentModel.startsWith("gemini")) {
return GEMINI_SUMMARIZE_MODEL; return {
name: GEMINI_SUMMARIZE_MODEL,
providerName: ServiceProvider.Google,
}
}
return {
name: currentModel,
providerName: undefined,
} }
return currentModel;
} }
function countMessages(msgs: ChatMessage[]) { function countMessages(msgs: ChatMessage[]) {
@ -905,7 +917,8 @@ export const useChatStore = createPersistStore(
api.llm.chat({ api.llm.chat({
messages: topicMessages, messages: topicMessages,
config: { config: {
model: getSummarizeModel(session.mask.modelConfig.model), model: getSummarizeModel(session.mask.modelConfig.model).name,
providerName: getSummarizeModel(session.mask.modelConfig.model).providerName,
stream: false, stream: false,
}, },
onFinish(message) { onFinish(message) {
@ -967,7 +980,8 @@ export const useChatStore = createPersistStore(
config: { config: {
...modelcfg, ...modelcfg,
stream: true, stream: true,
model: getSummarizeModel(session.mask.modelConfig.model), model: getSummarizeModel(session.mask.modelConfig.model).name,
providerName: getSummarizeModel(session.mask.modelConfig.model).providerName,
}, },
onUpdate(message) { onUpdate(message) {
session.memoryPrompt = message; session.memoryPrompt = message;