diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 6054c7a47..b6cf6f9d5 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -1,4 +1,4 @@ -import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant"; +import { Google, REQUEST_TIMEOUT_MS, ApiPath } from "@/app/constant"; import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; import { getClientConfig } from "@/app/config/client"; @@ -110,7 +110,7 @@ export class GeminiProApi implements LLMApi { const accessStore = useAccessStore.getState(); - let baseUrl = ""; + let baseUrl: string = ApiPath.Google; if (accessStore.useCustomConfig) { baseUrl = accessStore.googleUrl; diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 83ce5e235..d0819c77c 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -173,6 +173,7 @@ export class ChatGPTApi implements LLMApi { } else { chatPath = this.path(OpenaiPath.ChatPath); } + console.log('333333', chatPath) const chatPayload = { method: "POST", body: JSON.stringify(requestPayload), diff --git a/app/store/chat.ts b/app/store/chat.ts index 7d19d7e8d..1d077a59b 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -706,7 +706,9 @@ export const useChatStore = createPersistStore( set(() => ({})); extAttr?.setAutoScroll(true); } else { - const api: ClientApi = getClientApi(modelConfig.providerName); + const api: ClientApi = getClientApi(modelConfig.providerName) + console.log('-------', modelConfig, '-----', api) + // make request api.llm.chat({ messages: sendMessages,