From 9117a10b472aa89b5ea11a154f851df761112f6d Mon Sep 17 00:00:00 2001 From: AndrewS Date: Mon, 3 Feb 2025 21:53:55 +0100 Subject: [PATCH] fix: remove parameter stream for o3 from the request --- app/client/platforms/openai.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index c35ad1040..254e0d6f6 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -196,6 +196,7 @@ export class ChatGPTApi implements LLMApi { const isDalle3 = _isDalle3(options.config.model); const isO1 = options.config.model.startsWith("o1"); + const isO1Exact = options.config.model === "o1"; const isO3 = options.config.model.startsWith("o3"); if (isDalle3) { const prompt = getMessageTextContent( @@ -225,7 +226,7 @@ export class ChatGPTApi implements LLMApi { // O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet. requestPayload = { messages, - stream: options.config.stream, + stream: isO1Exact ? false : options.config.stream, model: modelConfig.model, temperature: !(isO1 || isO3)? modelConfig.temperature : 1, presence_penalty: !(isO1 || isO3) ? modelConfig.presence_penalty : 0,