feat: user can continue their message due to max_token limit or manual abortation

This commit is contained in:
skymkmk 2024-09-15 20:25:55 +08:00
parent 9e5d92dc58
commit b47e5cea01
No known key found for this signature in database
GPG Key ID: 6F4CA5A97C68BD71
9 changed files with 163 additions and 14 deletions

View File

@ -59,7 +59,7 @@ export interface ChatOptions {
config: LLMConfig;
onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string) => void;
onFinish: (message: string, finishedReason?: string) => void;
onError?: (err: Error) => void;
onController?: (controller: AbortController) => void;
onBeforeTool?: (tool: ChatMessageTool) => void;

View File

@ -26,6 +26,10 @@ export const ChatControllerPool = {
return Object.values(this.controllers).length > 0;
},
getPendingMessageId() {
return Object.keys(this.controllers).map((v) => v.split(",").at(-1));
},
remove(sessionId: string, messageId: string) {
const key = this.key(sessionId, messageId);
delete this.controllers[key];

View File

@ -262,7 +262,7 @@ export class ClaudeApi implements LLMApi {
runTools[index]["function"]["arguments"] +=
chunkJson?.delta?.partial_json;
}
return chunkJson?.delta?.text;
return { delta: chunkJson?.delta?.text };
},
// processToolMessage, include tool_calls message and tool call results
(

View File

@ -163,7 +163,7 @@ export class MoonshotApi implements LLMApi {
runTools[index]["function"]["arguments"] += args;
}
}
return choices[0]?.delta?.content;
return { delta: choices[0]?.delta?.content };
},
// processToolMessage, include tool_calls message and tool call results
(

View File

@ -266,6 +266,7 @@ export class ChatGPTApi implements LLMApi {
content: string;
tool_calls: ChatMessageTool[];
};
finish_reason?: string;
}>;
const tool_calls = choices[0]?.delta?.tool_calls;
if (tool_calls?.length > 0) {
@ -286,7 +287,10 @@ export class ChatGPTApi implements LLMApi {
runTools[index]["function"]["arguments"] += args;
}
}
return choices[0]?.delta?.content;
return {
delta: choices[0]?.delta?.content,
finishReason: choices[0]?.finish_reason,
};
},
// processToolMessage, include tool_calls message and tool call results
(

View File

@ -9,6 +9,7 @@ import React, {
RefObject,
} from "react";
import ContinueIcon from "../icons/continue.svg";
import SendWhiteIcon from "../icons/send-white.svg";
import BrainIcon from "../icons/brain.svg";
import RenameIcon from "../icons/rename.svg";
@ -461,7 +462,16 @@ export function ChatActions(props: {
// stop all responses
const couldStop = ChatControllerPool.hasPending();
const stopAll = () => ChatControllerPool.stopAll();
const stopAll = () => {
const stopList = ChatControllerPool.getPendingMessageId();
ChatControllerPool.stopAll();
chatStore.updateCurrentSession(
(session) =>
(session.messages = session.messages.map((v) =>
stopList.includes(v.id) ? { ...v, finishedReason: "aborted" } : v,
)),
);
};
// switch model
const currentModel = chatStore.currentSession().mask.modelConfig.model;
@ -1045,6 +1055,12 @@ function _Chat() {
// stop response
const onUserStop = (messageId: string) => {
ChatControllerPool.stop(session.id, messageId);
chatStore.updateCurrentSession(
(session) =>
(session.messages = session.messages.map((v) =>
v.id === messageId ? { ...v, finishedReason: "aborted" } : v,
)),
);
};
useEffect(() => {
@ -1171,6 +1187,18 @@ function _Chat() {
inputRef.current?.focus();
};
const onContinue = (messageID: string) => {
chatStore.updateCurrentSession(
(session) =>
(session.messages = session.messages.map((v) =>
v.id === messageID ? { ...v, streaming: true } : v,
)),
);
chatStore
.onContinueBotMessage(messageID)
.finally(() => setIsLoading(false));
};
const onPinMessage = (message: ChatMessage) => {
chatStore.updateCurrentSession((session) =>
session.mask.context.push(message),
@ -1724,6 +1752,15 @@ function _Chat() {
)
}
/>
{["length", "aborted"].includes(
message.finishedReason ?? "",
) ? (
<ChatAction
text={Locale.Chat.Actions.Continue}
icon={<ContinueIcon />}
onClick={() => onContinue(message.id)}
/>
) : null}
</>
)}
</div>

1
app/icons/continue.svg Normal file
View File

@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1726395286651" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="10075" xmlns:xlink="http://www.w3.org/1999/xlink" width="16" height="16"><path d="M427.84 911.648a79.616 79.616 0 0 1-79.68-79.712V191.328a79.68 79.68 0 0 1 122.24-67.36l506.784 320.448a79.296 79.296 0 0 1 37.056 67.328c0 27.488-13.888 52.672-37.12 67.328L470.368 899.328a79.424 79.424 0 0 1-42.528 12.32z m16.32-690.688v581.376l459.808-290.624L444.16 220.96zM65.728 911.648a48 48 0 0 1-48-48v-704a48 48 0 1 1 96 0v704a48 48 0 0 1-48 48z" fill="#040000" p-id="10076"></path></svg>

After

Width:  |  Height:  |  Size: 730 B

View File

@ -46,6 +46,7 @@ export type ChatMessage = RequestMessage & {
id: string;
model?: ModelType;
tools?: ChatMessageTool[];
finishedReason?: string;
};
export function createMessage(override: Partial<ChatMessage>): ChatMessage {
@ -373,8 +374,10 @@ export const useChatStore = createPersistStore(
session.messages = session.messages.concat();
});
},
onFinish(message) {
onFinish(message, finishedReason) {
botMessage.streaming = false;
if (finishedReason !== null && finishedReason !== undefined)
botMessage.finishedReason = finishedReason;
if (message) {
botMessage.content = message;
get().onNewMessage(botMessage);
@ -429,6 +432,94 @@ export const useChatStore = createPersistStore(
});
},
async onContinueBotMessage(messageID: string) {
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
// get recent messages
const recentMessages = get().getMessagesWithMemory(messageID);
const messageIndex = get().currentSession().messages.length + 1;
const botMessage = session.messages.find((v) => v.id === messageID);
if (!botMessage) {
console.error("[Chat] failed to find bot message");
return;
}
const baseContent = botMessage.content;
const api: ClientApi = getClientApi(modelConfig.providerName);
// make request
api.llm.chat({
messages: recentMessages,
config: { ...modelConfig, stream: true },
onUpdate(message) {
botMessage.streaming = true;
if (message) {
botMessage.content = baseContent + message;
}
get().updateCurrentSession((session) => {
session.messages = session.messages.concat();
});
},
onFinish(message, finishedReason) {
botMessage.streaming = false;
if (finishedReason !== null && finishedReason !== undefined)
botMessage.finishedReason = finishedReason;
if (message) {
botMessage.content = baseContent + message;
get().onNewMessage(botMessage);
}
ChatControllerPool.remove(session.id, botMessage.id);
},
onBeforeTool(tool: ChatMessageTool) {
(botMessage.tools = botMessage?.tools || []).push(tool);
get().updateCurrentSession((session) => {
session.messages = session.messages.concat();
});
},
onAfterTool(tool: ChatMessageTool) {
botMessage?.tools?.forEach((t, i, tools) => {
if (tool.id == t.id) {
tools[i] = { ...tool };
}
});
get().updateCurrentSession((session) => {
session.messages = session.messages.concat();
});
},
onError(error) {
const isAborted = error.message?.includes?.("aborted");
botMessage.content +=
"\n\n" +
prettyObject({
error: true,
message: error.message,
});
botMessage.streaming = false;
botMessage.isError = !isAborted;
get().updateCurrentSession((session) => {
session.messages = session.messages.concat();
});
ChatControllerPool.remove(
session.id,
botMessage.id ?? messageIndex,
);
console.error("[Chat] failed ", error);
},
onController(controller) {
// collect controller for stop/retry
ChatControllerPool.addController(
session.id,
botMessage.id ?? messageIndex,
controller,
);
},
});
},
getMemoryPrompt() {
const session = get().currentSession();
@ -441,12 +532,17 @@ export const useChatStore = createPersistStore(
}
},
getMessagesWithMemory() {
getMessagesWithMemory(messageID?: string) {
const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
const clearContextIndex = session.clearContextIndex ?? 0;
const messages = session.messages.slice();
const totalMessageCount = session.messages.length;
let messageIdx = session.messages.findIndex((v) => v.id === messageID);
if (messageIdx === -1) messageIdx = session.messages.length;
const totalMessageCount = Math.min(
messageIdx + 1,
session.messages.length,
);
// in-context prompts
const contextPrompts = session.mask.context.slice();

View File

@ -3,7 +3,7 @@ import {
UPLOAD_URL,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
import { RequestMessage } from "@/app/client/api";
import { ChatOptions, RequestMessage } from "@/app/client/api";
import Locale from "@/app/locales";
import {
EventStreamContentType,
@ -160,17 +160,21 @@ export function stream(
tools: any[],
funcs: Record<string, Function>,
controller: AbortController,
parseSSE: (text: string, runTools: any[]) => string | undefined,
parseSSE: (
text: string,
runTools: any[],
) => { delta?: string; finishReason?: string },
processToolMessage: (
requestPayload: any,
toolCallMessage: any,
toolCallResult: any[],
) => void,
options: any,
options: ChatOptions,
) {
let responseText = "";
let remainText = "";
let finished = false;
let finishedReason: string | undefined;
let running = false;
let runTools: any[] = [];
@ -254,14 +258,13 @@ export function stream(
chatApi(chatPath, headers, requestPayload, tools); // call fetchEventSource
}, 60);
});
return;
}
if (running) {
return;
}
console.debug("[ChatAPI] end");
finished = true;
options.onFinish(responseText + remainText);
options.onFinish(responseText + remainText, finishedReason);
}
};
@ -333,7 +336,11 @@ export function stream(
try {
const chunk = parseSSE(msg.data, runTools);
if (chunk) {
remainText += chunk;
if (typeof chunk === "string") remainText += chunk;
else {
if (chunk.delta) remainText += chunk.delta;
finishedReason = chunk.finishReason;
}
}
} catch (e) {
console.error("[Request] parse error", text, msg, e);