From 4762a08e4e9fa5b845f17934ad352eb3adf284b2 Mon Sep 17 00:00:00 2001 From: DirkSchlossmacher <62424946+DirkSchlossmacher@users.noreply.github.com> Date: Tue, 6 Aug 2024 22:14:56 +0200 Subject: [PATCH] openai.ts aktualisieren finalModel --- app/client/platforms/openai.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 993d75fec..4f4a4e125 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -201,7 +201,7 @@ export class ChatGPTApi implements LLMApi { size: options.config?.size ?? "1024x1024", }; } else { - const visionModel = isVisionModel(options.config.model); + const visionModel = isVisionModel(finalModel); const messages: ChatOptions["messages"] = []; for (const v of options.messages) { const content = visionModel @@ -213,7 +213,7 @@ export class ChatGPTApi implements LLMApi { requestPayload = { messages, stream: options.config.stream, - model: modelConfig.model, + model: finalModel, temperature: modelConfig.temperature, presence_penalty: modelConfig.presence_penalty, frequency_penalty: modelConfig.frequency_penalty, @@ -223,14 +223,14 @@ export class ChatGPTApi implements LLMApi { }; // add max_tokens to vision model - if (visionModel && modelConfig.model.includes("preview")) { - requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000); + if (visionModel && finalModel.includes("preview")) { + requestPayload["max_tokens"] = Math.max(finalModel.max_tokens, 4000); } } console.log("[Request] openai payload: ", requestPayload); - const modelIdentifier = modelConfig.model; + const modelIdentifier = finalModel; console.log("API Call: session or email is not available - model: ", modelIdentifier); /*