mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-01 15:46:39 +08:00
Enable to set historyMessageCount for each chat
This commit is contained in:
parent
cc0eae7153
commit
c66c50908c
@ -97,6 +97,9 @@ import { ExportMessageModal } from "./exporter";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { useAllModels } from "../utils/hooks";
|
||||
import { MultimodalContent } from "../client/api";
|
||||
import { listen } from "@tauri-apps/api/event";
|
||||
import { InputRange } from "./input-range";
|
||||
import { config } from "process";
|
||||
|
||||
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
||||
loading: () => <LoadingIcon />,
|
||||
@ -625,6 +628,31 @@ export function EditMessageModal(props: { onClose: () => void }) {
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.HistoryCount.Title}
|
||||
subTitle={Locale.Settings.HistoryCount.SubTitle}
|
||||
>
|
||||
<InputRange
|
||||
title={(
|
||||
session.overrideModelConfig?.historyMessageCount ??
|
||||
session.mask.modelConfig.historyMessageCount
|
||||
).toString()}
|
||||
value={
|
||||
session.overrideModelConfig?.historyMessageCount ??
|
||||
session.mask.modelConfig.historyMessageCount
|
||||
}
|
||||
min="0"
|
||||
max="64"
|
||||
step="1"
|
||||
onChange={(e) =>
|
||||
chatStore.updateCurrentSession(
|
||||
(session) =>
|
||||
((session.overrideModelConfig ??= {}).historyMessageCount =
|
||||
e.currentTarget.valueAsNumber),
|
||||
)
|
||||
}
|
||||
></InputRange>
|
||||
</ListItem>
|
||||
</List>
|
||||
<ContextPrompts
|
||||
context={messages}
|
||||
@ -1100,11 +1128,13 @@ function _Chat() {
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
|
||||
const handlePaste = useCallback(
|
||||
async (event: React.ClipboardEvent<HTMLTextAreaElement>) => {
|
||||
const currentModel = chatStore.currentSession().mask.modelConfig.model;
|
||||
if(!isVisionModel(currentModel)){return;}
|
||||
if (!isVisionModel(currentModel)) {
|
||||
return;
|
||||
}
|
||||
const items = (event.clipboardData || window.clipboardData).items;
|
||||
for (const item of items) {
|
||||
if (item.kind === "file" && item.type.startsWith("image/")) {
|
||||
|
@ -57,6 +57,8 @@ export interface ChatSession {
|
||||
clearContextIndex?: number;
|
||||
|
||||
mask: Mask;
|
||||
|
||||
overrideModelConfig?: Partial<ModelConfig>;
|
||||
}
|
||||
|
||||
export const DEFAULT_TOPIC = Locale.Store.DefaultTopic;
|
||||
@ -466,7 +468,9 @@ export const useChatStore = createPersistStore(
|
||||
// short term memory
|
||||
const shortTermMemoryStartIndex = Math.max(
|
||||
0,
|
||||
totalMessageCount - modelConfig.historyMessageCount,
|
||||
totalMessageCount -
|
||||
(session.overrideModelConfig?.historyMessageCount ??
|
||||
modelConfig.historyMessageCount),
|
||||
);
|
||||
|
||||
// lets concat send messages, including 4 parts:
|
||||
@ -580,7 +584,12 @@ export const useChatStore = createPersistStore(
|
||||
if (historyMsgLength > modelConfig?.max_tokens ?? 4000) {
|
||||
const n = toBeSummarizedMsgs.length;
|
||||
toBeSummarizedMsgs = toBeSummarizedMsgs.slice(
|
||||
Math.max(0, n - modelConfig.historyMessageCount),
|
||||
Math.max(
|
||||
0,
|
||||
n -
|
||||
(session.overrideModelConfig?.historyMessageCount ??
|
||||
modelConfig.historyMessageCount),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user