fix include_reasoning from propagating in non openrouter endpoints

This commit is contained in:
richardclim 2025-02-19 03:45:18 -05:00
parent fd9f074e30
commit 0534b21acd
2 changed files with 2 additions and 1 deletions

1
.gitignore vendored
View File

@ -49,3 +49,4 @@ masks.json
# mcp config # mcp config
app/mcp/mcp_config.json app/mcp/mcp_config.json
.aider*

View File

@ -234,7 +234,7 @@ export class ChatGPTApi implements LLMApi {
presence_penalty: !isO1OrO3 ? modelConfig.presence_penalty : 0, presence_penalty: !isO1OrO3 ? modelConfig.presence_penalty : 0,
frequency_penalty: !isO1OrO3 ? modelConfig.frequency_penalty : 0, frequency_penalty: !isO1OrO3 ? modelConfig.frequency_penalty : 0,
top_p: !isO1OrO3 ? modelConfig.top_p : 1, top_p: !isO1OrO3 ? modelConfig.top_p : 1,
include_reasoning: true, ...(modelConfig.providerName !== ServiceProvider.Azure && {include_reasoning: true}),
// max_tokens: Math.max(modelConfig.max_tokens, 1024), // max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
}; };