Compare commits

...

5 Commits

Author SHA1 Message Date
Q.A.zh
b39cb64440
Merge 3939ff47ef into d958441d7f 2025-07-17 15:09:19 +08:00
RiverRay
d958441d7f
Merge pull request #6570 from zhang-zhonggui/main
Some checks failed
Run Tests / test (push) Has been cancelled
Add gemini-2.5-pro tag, you can use free gemini-2.5-pro
2025-07-17 11:30:34 +08:00
zzg
1600b96454
Update constant.ts
添加gemini-2.5-pro,可以免费使用Google Gemini
2025-07-17 08:07:15 +08:00
zzg
47047a60b2
Update constant.ts
添加免费的Gemini 2.5 Pro进行使用
2025-07-17 07:48:12 +08:00
Q.A.zh
3939ff47ef fix: remove the condition that uses max_token to reduce the context 2024-08-14 02:33:39 +00:00
2 changed files with 3 additions and 2 deletions

View File

@ -555,6 +555,7 @@ const googleModels = [
"gemini-2.0-pro-exp",
"gemini-2.0-pro-exp-02-05",
"gemini-2.5-pro-preview-06-05",
"gemini-2.5-pro"
];
const anthropicModels = [

View File

@ -614,13 +614,13 @@ export const useChatStore = createPersistStore(
: shortTermMemoryStartIndex;
// and if user has cleared history messages, we should exclude the memory too.
const contextStartIndex = Math.max(clearContextIndex, memoryStartIndex);
const maxTokenThreshold = modelConfig.max_tokens;
// const maxTokenThreshold = modelConfig.max_tokens;
// get recent messages as much as possible
const reversedRecentMessages = [];
for (
let i = totalMessageCount - 1, tokenCount = 0;
i >= contextStartIndex && tokenCount < maxTokenThreshold;
i >= contextStartIndex ;//&& tokenCount < maxTokenThreshold;
i -= 1
) {
const msg = messages[i];