mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-03 07:43:41 +08:00
fix: remove the condition that uses max_token to reduce the context
This commit is contained in:
@@ -494,13 +494,13 @@ export const useChatStore = createPersistStore(
|
|||||||
: shortTermMemoryStartIndex;
|
: shortTermMemoryStartIndex;
|
||||||
// and if user has cleared history messages, we should exclude the memory too.
|
// and if user has cleared history messages, we should exclude the memory too.
|
||||||
const contextStartIndex = Math.max(clearContextIndex, memoryStartIndex);
|
const contextStartIndex = Math.max(clearContextIndex, memoryStartIndex);
|
||||||
const maxTokenThreshold = modelConfig.max_tokens;
|
// const maxTokenThreshold = modelConfig.max_tokens;
|
||||||
|
|
||||||
// get recent messages as much as possible
|
// get recent messages as much as possible
|
||||||
const reversedRecentMessages = [];
|
const reversedRecentMessages = [];
|
||||||
for (
|
for (
|
||||||
let i = totalMessageCount - 1, tokenCount = 0;
|
let i = totalMessageCount - 1, tokenCount = 0;
|
||||||
i >= contextStartIndex && tokenCount < maxTokenThreshold;
|
i >= contextStartIndex ;//&& tokenCount < maxTokenThreshold;
|
||||||
i -= 1
|
i -= 1
|
||||||
) {
|
) {
|
||||||
const msg = messages[i];
|
const msg = messages[i];
|
||||||
|
|||||||
Reference in New Issue
Block a user