Enable to set historyMessageCount for each chat

This commit is contained in:
Reekin 2024-03-17 17:34:09 +08:00
parent cc0eae7153
commit c66c50908c
2 changed files with 43 additions and 4 deletions

View File

@ -97,6 +97,9 @@ import { ExportMessageModal } from "./exporter";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { useAllModels } from "../utils/hooks"; import { useAllModels } from "../utils/hooks";
import { MultimodalContent } from "../client/api"; import { MultimodalContent } from "../client/api";
import { listen } from "@tauri-apps/api/event";
import { InputRange } from "./input-range";
import { config } from "process";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />, loading: () => <LoadingIcon />,
@ -625,6 +628,31 @@ export function EditMessageModal(props: { onClose: () => void }) {
} }
></input> ></input>
</ListItem> </ListItem>
<ListItem
title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle}
>
<InputRange
title={(
session.overrideModelConfig?.historyMessageCount ??
session.mask.modelConfig.historyMessageCount
).toString()}
value={
session.overrideModelConfig?.historyMessageCount ??
session.mask.modelConfig.historyMessageCount
}
min="0"
max="64"
step="1"
onChange={(e) =>
chatStore.updateCurrentSession(
(session) =>
((session.overrideModelConfig ??= {}).historyMessageCount =
e.currentTarget.valueAsNumber),
)
}
></InputRange>
</ListItem>
</List> </List>
<ContextPrompts <ContextPrompts
context={messages} context={messages}
@ -1100,11 +1128,13 @@ function _Chat() {
}; };
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []); }, []);
const handlePaste = useCallback( const handlePaste = useCallback(
async (event: React.ClipboardEvent<HTMLTextAreaElement>) => { async (event: React.ClipboardEvent<HTMLTextAreaElement>) => {
const currentModel = chatStore.currentSession().mask.modelConfig.model; const currentModel = chatStore.currentSession().mask.modelConfig.model;
if(!isVisionModel(currentModel)){return;} if (!isVisionModel(currentModel)) {
return;
}
const items = (event.clipboardData || window.clipboardData).items; const items = (event.clipboardData || window.clipboardData).items;
for (const item of items) { for (const item of items) {
if (item.kind === "file" && item.type.startsWith("image/")) { if (item.kind === "file" && item.type.startsWith("image/")) {

View File

@ -57,6 +57,8 @@ export interface ChatSession {
clearContextIndex?: number; clearContextIndex?: number;
mask: Mask; mask: Mask;
overrideModelConfig?: Partial<ModelConfig>;
} }
export const DEFAULT_TOPIC = Locale.Store.DefaultTopic; export const DEFAULT_TOPIC = Locale.Store.DefaultTopic;
@ -466,7 +468,9 @@ export const useChatStore = createPersistStore(
// short term memory // short term memory
const shortTermMemoryStartIndex = Math.max( const shortTermMemoryStartIndex = Math.max(
0, 0,
totalMessageCount - modelConfig.historyMessageCount, totalMessageCount -
(session.overrideModelConfig?.historyMessageCount ??
modelConfig.historyMessageCount),
); );
// lets concat send messages, including 4 parts: // lets concat send messages, including 4 parts:
@ -580,7 +584,12 @@ export const useChatStore = createPersistStore(
if (historyMsgLength > modelConfig?.max_tokens ?? 4000) { if (historyMsgLength > modelConfig?.max_tokens ?? 4000) {
const n = toBeSummarizedMsgs.length; const n = toBeSummarizedMsgs.length;
toBeSummarizedMsgs = toBeSummarizedMsgs.slice( toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
Math.max(0, n - modelConfig.historyMessageCount), Math.max(
0,
n -
(session.overrideModelConfig?.historyMessageCount ??
modelConfig.historyMessageCount),
),
); );
} }