This commit is contained in:
GH Action - Upstream Sync
2023-05-21 01:05:17 +00:00
20 changed files with 452 additions and 276 deletions

View File

@@ -5,7 +5,7 @@ import { trimTopic } from "../utils";
import Locale from "../locales";
import { showToast } from "../components/ui-lib";
import { ModelType, useAppConfig } from "./config";
import { ModelType } from "./config";
import { createEmptyMask, Mask } from "./mask";
import { StoreKey } from "../constant";
import { api, RequestMessage } from "../client/api";
@@ -45,6 +45,7 @@ export interface ChatSession {
stat: ChatStat;
lastUpdate: number;
lastSummarizeIndex: number;
clearContextIndex?: number;
mask: Mask;
}
@@ -277,13 +278,17 @@ export const useChatStore = create<ChatStore>()(
config: { ...modelConfig, stream: true },
onUpdate(message) {
botMessage.streaming = true;
botMessage.content = message;
if (message) {
botMessage.content = message;
}
set(() => ({}));
},
onFinish(message) {
botMessage.streaming = false;
botMessage.content = message;
get().onNewMessage(botMessage);
if (message) {
botMessage.content = message;
get().onNewMessage(botMessage);
}
ChatControllerPool.remove(
sessionIndex,
botMessage.id ?? messageIndex,
@@ -292,12 +297,12 @@ export const useChatStore = create<ChatStore>()(
},
onError(error) {
const isAborted = error.message.includes("aborted");
if (
botMessage.content !== Locale.Error.Unauthorized &&
!isAborted
) {
botMessage.content += "\n\n" + prettyObject(error);
}
botMessage.content =
"\n\n" +
prettyObject({
error: true,
message: error.message,
});
botMessage.streaming = false;
userMessage.isError = !isAborted;
botMessage.isError = !isAborted;
@@ -308,7 +313,7 @@ export const useChatStore = create<ChatStore>()(
botMessage.id ?? messageIndex,
);
console.error("[Chat] error ", error);
console.error("[Chat] failed ", error);
},
onController(controller) {
// collect controller for stop/retry
@@ -337,7 +342,12 @@ export const useChatStore = create<ChatStore>()(
getMessagesWithMemory() {
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
const messages = session.messages.filter((msg) => !msg.isError);
// wont send cleared context messages
const clearedContextMessages = session.messages.slice(
(session.clearContextIndex ?? -1) + 1,
);
const messages = clearedContextMessages.filter((msg) => !msg.isError);
const n = messages.length;
const context = session.mask.context.slice();
@@ -358,17 +368,17 @@ export const useChatStore = create<ChatStore>()(
n - modelConfig.historyMessageCount,
);
const longTermMemoryMessageIndex = session.lastSummarizeIndex;
const oldestIndex = Math.max(
const mostRecentIndex = Math.max(
shortTermMemoryMessageIndex,
longTermMemoryMessageIndex,
);
const threshold = modelConfig.compressMessageLengthThreshold;
const threshold = modelConfig.compressMessageLengthThreshold * 2;
// get recent messages as many as possible
const reversedRecentMessages = [];
for (
let i = n - 1, count = 0;
i >= oldestIndex && count < threshold;
i >= mostRecentIndex && count < threshold;
i -= 1
) {
const msg = messages[i];
@@ -406,15 +416,15 @@ export const useChatStore = create<ChatStore>()(
const session = get().currentSession();
// remove error messages if any
const cleanMessages = session.messages.filter((msg) => !msg.isError);
const messages = session.messages;
// should summarize topic after chating more than 50 words
const SUMMARIZE_MIN_LEN = 50;
if (
session.topic === DEFAULT_TOPIC &&
countMessages(cleanMessages) >= SUMMARIZE_MIN_LEN
countMessages(messages) >= SUMMARIZE_MIN_LEN
) {
const topicMessages = cleanMessages.concat(
const topicMessages = messages.concat(
createMessage({
role: "user",
content: Locale.Store.Prompt.Topic,
@@ -436,9 +446,13 @@ export const useChatStore = create<ChatStore>()(
}
const modelConfig = session.mask.modelConfig;
let toBeSummarizedMsgs = cleanMessages.slice(
const summarizeIndex = Math.max(
session.lastSummarizeIndex,
session.clearContextIndex ?? 0,
);
let toBeSummarizedMsgs = messages
.filter((msg) => !msg.isError)
.slice(summarizeIndex);
const historyMsgLength = countMessages(toBeSummarizedMsgs);

View File

@@ -73,7 +73,7 @@ export const ALL_MODELS = [
available: ENABLE_GPT4,
},
{
name: "ext-davinci-002-render-sha-mobile",
name: "text-davinci-002-render-sha-mobile",
available: true,
},
{
@@ -106,13 +106,13 @@ export const ALL_MODELS = [
},
] as const;
export type ModelType = typeof ALL_MODELS[number]["name"];
export type ModelType = (typeof ALL_MODELS)[number]["name"];
export function limitNumber(
x: number,
min: number,
max: number,
defaultValue: number
defaultValue: number,
) {
if (typeof x !== "number" || isNaN(x)) {
return defaultValue;
@@ -171,6 +171,6 @@ export const useAppConfig = create<ChatConfigStore>()(
return state;
},
}
)
},
),
);

View File

@@ -10,6 +10,7 @@ export type Mask = {
id: number;
avatar: string;
name: string;
hideContext?: boolean;
context: ChatMessage[];
syncGlobalConfig?: boolean;
modelConfig: ModelConfig;