change max tokens

This commit is contained in:
AC
2025-06-11 15:25:38 +08:00
parent f682b1f4de
commit cd0366392a
5 changed files with 5 additions and 5 deletions

View File

@@ -31,7 +31,7 @@ export class BedrockApi implements LLMApi {
messages,
temperature: modelConfig.temperature,
stream: !!modelConfig.stream,
max_tokens: (modelConfig as any).max_tokens || 4096, // Cast to access max_tokens from ModelConfig
max_tokens: (modelConfig as any).max_tokens || 8000, // Cast to access max_tokens from ModelConfig
}),
signal: controller.signal,
headers: getHeaders(), // getHeaders should handle Bedrock (no auth needed)

View File

@@ -244,7 +244,7 @@ export class ChatGPTApi implements LLMApi {
// add max_tokens to vision model
if (visionModel) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 8000);
}
}