resolve conflict

This commit is contained in:
dakai
2023-04-03 03:07:16 +08:00
28 changed files with 1147 additions and 765 deletions

View File

@@ -57,6 +57,8 @@ export interface ChatConfig {
export type ModelConfig = ChatConfig["modelConfig"];
export const ROLES: Message["role"][] = ["system", "user", "assistant"];
const ENABLE_GPT4 = true;
export const ALL_MODELS = [
@@ -155,6 +157,7 @@ export interface ChatSession {
id: number;
topic: string;
memoryPrompt: string;
context: Message[];
messages: Message[];
stat: ChatStat;
lastUpdate: string;
@@ -162,6 +165,11 @@ export interface ChatSession {
}
const DEFAULT_TOPIC = Locale.Store.DefaultTopic;
export const BOT_HELLO: Message = {
role: "assistant",
content: Locale.Store.BotHello,
date: "",
};
function createEmptySession(): ChatSession {
const createDate = new Date().toLocaleString();
@@ -170,13 +178,8 @@ function createEmptySession(): ChatSession {
id: Date.now(),
topic: DEFAULT_TOPIC,
memoryPrompt: "",
messages: [
{
role: "assistant",
content: Locale.Store.BotHello,
date: createDate,
},
],
context: [],
messages: [],
stat: {
tokenCount: 0,
wordCount: 0,
@@ -386,16 +389,17 @@ export const useChatStore = create<ChatStore>()(
const session = get().currentSession();
const config = get().config;
const n = session.messages.length;
const recentMessages = session.messages.slice(
Math.max(0, n - config.historyMessageCount),
);
const context = session.context.slice();
const memoryPrompt = get().getMemoryPrompt();
if (session.memoryPrompt) {
recentMessages.unshift(memoryPrompt);
if (session.memoryPrompt && session.memoryPrompt.length > 0) {
const memoryPrompt = get().getMemoryPrompt();
context.push(memoryPrompt);
}
const recentMessages = context.concat(
session.messages.slice(Math.max(0, n - config.historyMessageCount)),
);
return recentMessages;
},
@@ -433,11 +437,13 @@ export const useChatStore = create<ChatStore>()(
let toBeSummarizedMsgs = session.messages.slice(
session.lastSummarizeIndex,
);
const historyMsgLength = countMessages(toBeSummarizedMsgs);
if (historyMsgLength > 4000) {
if (historyMsgLength > get().config?.modelConfig?.max_tokens ?? 4000) {
const n = toBeSummarizedMsgs.length;
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
-config.historyMessageCount,
Math.max(0, n - config.historyMessageCount),
);
}
@@ -506,7 +512,16 @@ export const useChatStore = create<ChatStore>()(
}),
{
name: LOCAL_KEY,
version: 1,
version: 1.1,
migrate(persistedState, version) {
const state = persistedState as ChatStore;
if (version === 1) {
state.sessions.forEach((s) => (s.context = []));
}
return state;
},
},
),
);