Update openai.ts

allow thinking from openrouter.
This commit is contained in:
richardclim 2025-02-16 03:57:50 -05:00
parent 57be29375a
commit bb5b93e562

View File

@ -67,6 +67,7 @@ export interface RequestPayload {
top_p: number;
max_tokens?: number;
max_completion_tokens?: number;
include_reasoning?: boolean;
}
export interface DalleRequestPayload {
@ -233,6 +234,7 @@ export class ChatGPTApi implements LLMApi {
presence_penalty: !isO1OrO3 ? modelConfig.presence_penalty : 0,
frequency_penalty: !isO1OrO3 ? modelConfig.frequency_penalty : 0,
top_p: !isO1OrO3 ? modelConfig.top_p : 1,
include_reasoning: true,
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};
@ -335,7 +337,7 @@ export class ChatGPTApi implements LLMApi {
}
}
const reasoning = choices[0]?.delta?.reasoning_content;
const reasoning = choices[0]?.delta?.reasoning || choices[0]?.delta?.reasoning_content;
const content = choices[0]?.delta?.content;
// Skip if both content and reasoning_content are empty or null