mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-23 17:26:47 +08:00
Merge remote-tracking branch 'upstream/main'
This commit is contained in:
@@ -38,7 +38,6 @@ export interface ChatStat {
|
||||
|
||||
export interface ChatSession {
|
||||
id: number;
|
||||
|
||||
topic: string;
|
||||
|
||||
memoryPrompt: string;
|
||||
@@ -46,6 +45,7 @@ export interface ChatSession {
|
||||
stat: ChatStat;
|
||||
lastUpdate: number;
|
||||
lastSummarizeIndex: number;
|
||||
clearContextIndex?: number;
|
||||
|
||||
mask: Mask;
|
||||
}
|
||||
@@ -69,6 +69,7 @@ function createEmptySession(): ChatSession {
|
||||
},
|
||||
lastUpdate: Date.now(),
|
||||
lastSummarizeIndex: 0,
|
||||
|
||||
mask: createEmptyMask(),
|
||||
};
|
||||
}
|
||||
@@ -277,13 +278,17 @@ export const useChatStore = create<ChatStore>()(
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
botMessage.content = message;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
set(() => ({}));
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
}
|
||||
ChatControllerPool.remove(
|
||||
sessionIndex,
|
||||
botMessage.id ?? messageIndex,
|
||||
@@ -292,12 +297,12 @@ export const useChatStore = create<ChatStore>()(
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message.includes("aborted");
|
||||
if (
|
||||
botMessage.content !== Locale.Error.Unauthorized &&
|
||||
!isAborted
|
||||
) {
|
||||
botMessage.content += "\n\n" + prettyObject(error);
|
||||
}
|
||||
botMessage.content =
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
@@ -308,7 +313,7 @@ export const useChatStore = create<ChatStore>()(
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
|
||||
console.error("[Chat] error ", error);
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
@@ -337,7 +342,12 @@ export const useChatStore = create<ChatStore>()(
|
||||
getMessagesWithMemory() {
|
||||
const session = get().currentSession();
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
const messages = session.messages.filter((msg) => !msg.isError);
|
||||
|
||||
// wont send cleared context messages
|
||||
const clearedContextMessages = session.messages.slice(
|
||||
(session.clearContextIndex ?? -1) + 1,
|
||||
);
|
||||
const messages = clearedContextMessages.filter((msg) => !msg.isError);
|
||||
const n = messages.length;
|
||||
|
||||
const context = session.mask.context.slice();
|
||||
@@ -358,17 +368,17 @@ export const useChatStore = create<ChatStore>()(
|
||||
n - modelConfig.historyMessageCount,
|
||||
);
|
||||
const longTermMemoryMessageIndex = session.lastSummarizeIndex;
|
||||
const oldestIndex = Math.max(
|
||||
const mostRecentIndex = Math.max(
|
||||
shortTermMemoryMessageIndex,
|
||||
longTermMemoryMessageIndex,
|
||||
);
|
||||
const threshold = modelConfig.compressMessageLengthThreshold;
|
||||
const threshold = modelConfig.compressMessageLengthThreshold * 2;
|
||||
|
||||
// get recent messages as many as possible
|
||||
const reversedRecentMessages = [];
|
||||
for (
|
||||
let i = n - 1, count = 0;
|
||||
i >= oldestIndex && count < threshold;
|
||||
i >= mostRecentIndex && count < threshold;
|
||||
i -= 1
|
||||
) {
|
||||
const msg = messages[i];
|
||||
@@ -406,15 +416,15 @@ export const useChatStore = create<ChatStore>()(
|
||||
const session = get().currentSession();
|
||||
|
||||
// remove error messages if any
|
||||
const cleanMessages = session.messages.filter((msg) => !msg.isError);
|
||||
const messages = session.messages;
|
||||
|
||||
// should summarize topic after chating more than 50 words
|
||||
const SUMMARIZE_MIN_LEN = 50;
|
||||
if (
|
||||
session.topic === DEFAULT_TOPIC &&
|
||||
countMessages(cleanMessages) >= SUMMARIZE_MIN_LEN
|
||||
countMessages(messages) >= SUMMARIZE_MIN_LEN
|
||||
) {
|
||||
const topicMessages = cleanMessages.concat(
|
||||
const topicMessages = messages.concat(
|
||||
createMessage({
|
||||
role: "user",
|
||||
content: Locale.Store.Prompt.Topic,
|
||||
@@ -436,9 +446,13 @@ export const useChatStore = create<ChatStore>()(
|
||||
}
|
||||
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
let toBeSummarizedMsgs = cleanMessages.slice(
|
||||
const summarizeIndex = Math.max(
|
||||
session.lastSummarizeIndex,
|
||||
session.clearContextIndex ?? 0,
|
||||
);
|
||||
let toBeSummarizedMsgs = messages
|
||||
.filter((msg) => !msg.isError)
|
||||
.slice(summarizeIndex);
|
||||
|
||||
const historyMsgLength = countMessages(toBeSummarizedMsgs);
|
||||
|
||||
@@ -463,7 +477,7 @@ export const useChatStore = create<ChatStore>()(
|
||||
|
||||
if (
|
||||
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
|
||||
session.mask.modelConfig.sendMemory
|
||||
modelConfig.sendMemory
|
||||
) {
|
||||
api.llm.chat({
|
||||
messages: toBeSummarizedMsgs.concat({
|
||||
|
||||
@@ -68,6 +68,14 @@ export const ALL_MODELS = [
|
||||
name: "gpt-4-32k-0314",
|
||||
available: ENABLE_GPT4,
|
||||
},
|
||||
{
|
||||
name: "gpt-4-mobile",
|
||||
available: ENABLE_GPT4,
|
||||
},
|
||||
{
|
||||
name: "text-davinci-002-render-sha-mobile",
|
||||
available: true,
|
||||
},
|
||||
{
|
||||
name: "gpt-3.5-turbo",
|
||||
available: true,
|
||||
|
||||
@@ -10,7 +10,9 @@ export type Mask = {
|
||||
id: number;
|
||||
avatar: string;
|
||||
name: string;
|
||||
hideContext?: boolean;
|
||||
context: ChatMessage[];
|
||||
syncGlobalConfig?: boolean;
|
||||
modelConfig: ModelConfig;
|
||||
lang: Lang;
|
||||
builtin: boolean;
|
||||
@@ -39,6 +41,7 @@ export const createEmptyMask = () =>
|
||||
avatar: DEFAULT_MASK_AVATAR,
|
||||
name: DEFAULT_TOPIC,
|
||||
context: [],
|
||||
syncGlobalConfig: true, // use global config as default
|
||||
modelConfig: { ...useAppConfig.getState().modelConfig },
|
||||
lang: getLang(),
|
||||
builtin: false,
|
||||
|
||||
Reference in New Issue
Block a user