merge upstream

This commit is contained in:
sijinhui
2023-12-27 22:47:01 +08:00
32 changed files with 1460 additions and 296 deletions

View File

@@ -8,13 +8,14 @@ import {
DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE,
KnowledgeCutOffDate,
ModelProvider,
StoreKey,
SUMMARIZE_MODEL,
} from "../constant";
import {
api,
getHeaders,
useGetMidjourneySelfProxyUrl,
ClientApi,
RequestMessage,
} from "../client/api";
import { ChatControllerPool } from "../client/controller";
@@ -440,7 +441,6 @@ export const useChatStore = createPersistStore(
botMessage,
]);
});
if (
content.toLowerCase().startsWith("/mj") ||
content.toLowerCase().startsWith("/MJ")
@@ -604,6 +604,13 @@ export const useChatStore = createPersistStore(
set(() => ({}));
extAttr?.setAutoScroll(true);
} else {
var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
// make request
api.llm.chat({
messages: sendMessages,
@@ -627,12 +634,11 @@ export const useChatStore = createPersistStore(
},
onError(error) {
const isAborted = error.message.includes("aborted");
botMessage.content =
botMessage.content +=
"\n\n" +
prettyObject({
error: true,
message: error.message,
message2: "用上面刷新按钮试试。",
});
botMessage.streaming = false;
userMessage.isError = !isAborted;
@@ -684,7 +690,9 @@ export const useChatStore = createPersistStore(
// system prompts, to get close to OpenAI Web ChatGPT
const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts;
const systemPrompts = shouldInjectSystemPrompts
var systemPrompts: ChatMessage[] = [];
systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
@@ -778,6 +786,14 @@ export const useChatStore = createPersistStore(
summarizeSession() {
const config = useAppConfig.getState();
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
// remove error messages if any
const messages = session.messages;
@@ -809,8 +825,6 @@ export const useChatStore = createPersistStore(
},
});
}
const modelConfig = session.mask.modelConfig;
const summarizeIndex = Math.max(
session.lastSummarizeIndex,
session.clearContextIndex ?? 0,