Merge remote-tracking branch 'upstream/main'

This commit is contained in:
Hk-Gosuto
2023-12-25 12:39:24 +08:00
35 changed files with 894 additions and 816 deletions

View File

@@ -8,10 +8,11 @@ import {
DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE,
KnowledgeCutOffDate,
ModelProvider,
StoreKey,
SUMMARIZE_MODEL,
} from "../constant";
import { api, RequestMessage } from "../client/api";
import { ClientApi, RequestMessage } from "../client/api";
import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format";
import { estimateTokenLength } from "../utils/token";
@@ -319,6 +320,8 @@ export const useChatStore = createPersistStore(
session.messages.push(savedUserMessage);
session.messages.push(botMessage);
});
var api: ClientApi;
api = new ClientApi(ModelProvider.GPT);
if (
config.pluginConfig.enable &&
session.mask.usePlugins &&
@@ -392,8 +395,10 @@ export const useChatStore = createPersistStore(
},
});
} else {
if (modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
}
// make request
api.switch(modelConfig.model);
api.llm.chat({
messages: sendMessages,
config: { ...modelConfig, stream: true },
@@ -472,7 +477,9 @@ export const useChatStore = createPersistStore(
// system prompts, to get close to OpenAI Web ChatGPT
const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts;
const systemPrompts = shouldInjectSystemPrompts
var systemPrompts: ChatMessage[] = [];
systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
@@ -566,6 +573,14 @@ export const useChatStore = createPersistStore(
summarizeSession() {
const config = useAppConfig.getState();
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
// remove error messages if any
const messages = session.messages;
@@ -583,7 +598,6 @@ export const useChatStore = createPersistStore(
content: Locale.Store.Prompt.Topic,
}),
);
api.switch(session.mask.modelConfig.model);
api.llm.chat({
messages: topicMessages,
config: {
@@ -598,8 +612,6 @@ export const useChatStore = createPersistStore(
},
});
}
const modelConfig = session.mask.modelConfig;
const summarizeIndex = Math.max(
session.lastSummarizeIndex,
session.clearContextIndex ?? 0,
@@ -633,7 +645,6 @@ export const useChatStore = createPersistStore(
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
modelConfig.sendMemory
) {
api.switch(modelConfig.model);
api.llm.chat({
messages: toBeSummarizedMsgs.concat(
createMessage({