From bb5b93e5620a07e55a57145bf34717f349261318 Mon Sep 17 00:00:00 2001 From: richardclim <61879224+richardclim@users.noreply.github.com> Date: Sun, 16 Feb 2025 03:57:50 -0500 Subject: [PATCH] Update openai.ts allow thinking from openrouter. --- app/client/platforms/openai.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index c6f3fc425..10bb5f48f 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -67,6 +67,7 @@ export interface RequestPayload { top_p: number; max_tokens?: number; max_completion_tokens?: number; + include_reasoning?: boolean; } export interface DalleRequestPayload { @@ -233,6 +234,7 @@ export class ChatGPTApi implements LLMApi { presence_penalty: !isO1OrO3 ? modelConfig.presence_penalty : 0, frequency_penalty: !isO1OrO3 ? modelConfig.frequency_penalty : 0, top_p: !isO1OrO3 ? modelConfig.top_p : 1, + include_reasoning: true, // max_tokens: Math.max(modelConfig.max_tokens, 1024), // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. }; @@ -335,7 +337,7 @@ export class ChatGPTApi implements LLMApi { } } - const reasoning = choices[0]?.delta?.reasoning_content; + const reasoning = choices[0]?.delta?.reasoning || choices[0]?.delta?.reasoning_content; const content = choices[0]?.delta?.content; // Skip if both content and reasoning_content are empty or null