mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-01 23:56:39 +08:00
Merge aa23458c6a
into 0af04e0f2f
This commit is contained in:
commit
8e11769d70
@ -116,6 +116,8 @@ import { ExportMessageModal } from "./exporter";
|
|||||||
import { getClientConfig } from "../config/client";
|
import { getClientConfig } from "../config/client";
|
||||||
import { useAllModels } from "../utils/hooks";
|
import { useAllModels } from "../utils/hooks";
|
||||||
import { MultimodalContent } from "../client/api";
|
import { MultimodalContent } from "../client/api";
|
||||||
|
import { InputRange } from "./input-range";
|
||||||
|
import { config } from "process";
|
||||||
|
|
||||||
import { ClientApi } from "../client/api";
|
import { ClientApi } from "../client/api";
|
||||||
import { createTTSPlayer } from "../utils/audio";
|
import { createTTSPlayer } from "../utils/audio";
|
||||||
@ -858,6 +860,31 @@ export function EditMessageModal(props: { onClose: () => void }) {
|
|||||||
}
|
}
|
||||||
></input>
|
></input>
|
||||||
</ListItem>
|
</ListItem>
|
||||||
|
<ListItem
|
||||||
|
title={Locale.Settings.HistoryCount.Title}
|
||||||
|
subTitle={Locale.Settings.HistoryCount.SubTitle}
|
||||||
|
>
|
||||||
|
<InputRange
|
||||||
|
title={(
|
||||||
|
session.overrideModelConfig?.historyMessageCount ??
|
||||||
|
session.mask.modelConfig.historyMessageCount
|
||||||
|
).toString()}
|
||||||
|
value={
|
||||||
|
session.overrideModelConfig?.historyMessageCount ??
|
||||||
|
session.mask.modelConfig.historyMessageCount
|
||||||
|
}
|
||||||
|
min="0"
|
||||||
|
max="64"
|
||||||
|
step="1"
|
||||||
|
onChange={(e) =>
|
||||||
|
chatStore.updateCurrentSession(
|
||||||
|
(session) =>
|
||||||
|
((session.overrideModelConfig ??= {}).historyMessageCount =
|
||||||
|
e.currentTarget.valueAsNumber),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
></InputRange>
|
||||||
|
</ListItem>
|
||||||
</List>
|
</List>
|
||||||
<ContextPrompts
|
<ContextPrompts
|
||||||
context={messages}
|
context={messages}
|
||||||
|
@ -83,6 +83,8 @@ export interface ChatSession {
|
|||||||
clearContextIndex?: number;
|
clearContextIndex?: number;
|
||||||
|
|
||||||
mask: Mask;
|
mask: Mask;
|
||||||
|
|
||||||
|
overrideModelConfig?: Partial<ModelConfig>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const DEFAULT_TOPIC = Locale.Store.DefaultTopic;
|
export const DEFAULT_TOPIC = Locale.Store.DefaultTopic;
|
||||||
@ -542,7 +544,9 @@ export const useChatStore = createPersistStore(
|
|||||||
// short term memory
|
// short term memory
|
||||||
const shortTermMemoryStartIndex = Math.max(
|
const shortTermMemoryStartIndex = Math.max(
|
||||||
0,
|
0,
|
||||||
totalMessageCount - modelConfig.historyMessageCount,
|
totalMessageCount -
|
||||||
|
(session.overrideModelConfig?.historyMessageCount ??
|
||||||
|
modelConfig.historyMessageCount),
|
||||||
);
|
);
|
||||||
|
|
||||||
// lets concat send messages, including 4 parts:
|
// lets concat send messages, including 4 parts:
|
||||||
@ -679,7 +683,12 @@ export const useChatStore = createPersistStore(
|
|||||||
if (historyMsgLength > (modelConfig?.max_tokens || 4000)) {
|
if (historyMsgLength > (modelConfig?.max_tokens || 4000)) {
|
||||||
const n = toBeSummarizedMsgs.length;
|
const n = toBeSummarizedMsgs.length;
|
||||||
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
|
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
|
||||||
Math.max(0, n - modelConfig.historyMessageCount),
|
Math.max(
|
||||||
|
0,
|
||||||
|
n -
|
||||||
|
(session.overrideModelConfig?.historyMessageCount ??
|
||||||
|
modelConfig.historyMessageCount),
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
const memoryPrompt = get().getMemoryPrompt();
|
const memoryPrompt = get().getMemoryPrompt();
|
||||||
|
Loading…
Reference in New Issue
Block a user