This commit is contained in:
Reekin 2025-01-03 14:05:15 +08:00 committed by GitHub
commit 8e11769d70
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 38 additions and 2 deletions

View File

@ -116,6 +116,8 @@ import { ExportMessageModal } from "./exporter";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { useAllModels } from "../utils/hooks"; import { useAllModels } from "../utils/hooks";
import { MultimodalContent } from "../client/api"; import { MultimodalContent } from "../client/api";
import { InputRange } from "./input-range";
import { config } from "process";
import { ClientApi } from "../client/api"; import { ClientApi } from "../client/api";
import { createTTSPlayer } from "../utils/audio"; import { createTTSPlayer } from "../utils/audio";
@ -858,6 +860,31 @@ export function EditMessageModal(props: { onClose: () => void }) {
} }
></input> ></input>
</ListItem> </ListItem>
<ListItem
title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle}
>
<InputRange
title={(
session.overrideModelConfig?.historyMessageCount ??
session.mask.modelConfig.historyMessageCount
).toString()}
value={
session.overrideModelConfig?.historyMessageCount ??
session.mask.modelConfig.historyMessageCount
}
min="0"
max="64"
step="1"
onChange={(e) =>
chatStore.updateCurrentSession(
(session) =>
((session.overrideModelConfig ??= {}).historyMessageCount =
e.currentTarget.valueAsNumber),
)
}
></InputRange>
</ListItem>
</List> </List>
<ContextPrompts <ContextPrompts
context={messages} context={messages}

View File

@ -83,6 +83,8 @@ export interface ChatSession {
clearContextIndex?: number; clearContextIndex?: number;
mask: Mask; mask: Mask;
overrideModelConfig?: Partial<ModelConfig>;
} }
export const DEFAULT_TOPIC = Locale.Store.DefaultTopic; export const DEFAULT_TOPIC = Locale.Store.DefaultTopic;
@ -542,7 +544,9 @@ export const useChatStore = createPersistStore(
// short term memory // short term memory
const shortTermMemoryStartIndex = Math.max( const shortTermMemoryStartIndex = Math.max(
0, 0,
totalMessageCount - modelConfig.historyMessageCount, totalMessageCount -
(session.overrideModelConfig?.historyMessageCount ??
modelConfig.historyMessageCount),
); );
// lets concat send messages, including 4 parts: // lets concat send messages, including 4 parts:
@ -679,7 +683,12 @@ export const useChatStore = createPersistStore(
if (historyMsgLength > (modelConfig?.max_tokens || 4000)) { if (historyMsgLength > (modelConfig?.max_tokens || 4000)) {
const n = toBeSummarizedMsgs.length; const n = toBeSummarizedMsgs.length;
toBeSummarizedMsgs = toBeSummarizedMsgs.slice( toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
Math.max(0, n - modelConfig.historyMessageCount), Math.max(
0,
n -
(session.overrideModelConfig?.historyMessageCount ??
modelConfig.historyMessageCount),
),
); );
} }
const memoryPrompt = get().getMemoryPrompt(); const memoryPrompt = get().getMemoryPrompt();