fix: gemini

This commit is contained in:
Hk-Gosuto
2024-02-09 12:58:54 +08:00
parent 0ed7402494
commit fee45f7b23
4 changed files with 16 additions and 31 deletions

View File

@@ -13,8 +13,7 @@ import {
LLMUsage,
} from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { getClientConfig } from "@/app/config/client";
import { DEFAULT_API_HOST } from "@/app/constant";
export class GeminiProApi implements LLMApi {
toolAgentChat(options: AgentChatOptions): Promise<void> {
throw new Error("Method not implemented.");
@@ -29,7 +28,7 @@ export class GeminiProApi implements LLMApi {
);
}
async chat(options: ChatOptions): Promise<void> {
// const apiClient = this;
const apiClient = this;
const messages = options.messages.map((v) => ({
role: v.role.replace("assistant", "model").replace("system", "user"),
parts: [{ text: v.content }],
@@ -87,27 +86,13 @@ export class GeminiProApi implements LLMApi {
],
};
const accessStore = useAccessStore.getState();
let baseUrl = accessStore.googleBaseUrl;
const isApp = !!getClientConfig()?.isApp;
console.log("[Request] google payload: ", requestPayload);
let shouldStream = !!options.config.stream;
const shouldStream = !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
try {
let chatPath = this.path(Google.ChatPath);
// let baseUrl = accessStore.googleUrl;
if (!baseUrl) {
baseUrl = isApp
? DEFAULT_API_HOST + "/api/proxy/google/" + Google.ChatPath
: chatPath;
}
if (isApp) {
baseUrl += `?key=${accessStore.googleApiKey}`;
}
const chatPath = this.path(Google.ChatPath);
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
@@ -123,6 +108,10 @@ export class GeminiProApi implements LLMApi {
if (shouldStream) {
let responseText = "";
let remainText = "";
let streamChatPath = chatPath.replace(
"generateContent",
"streamGenerateContent",
);
let finished = false;
let existingTexts: string[] = [];
@@ -152,11 +141,7 @@ export class GeminiProApi implements LLMApi {
// start animaion
animateResponseText();
fetch(
baseUrl.replace("generateContent", "streamGenerateContent"),
chatPayload,
)
fetch(streamChatPath, chatPayload)
.then((response) => {
const reader = response?.body?.getReader();
const decoder = new TextDecoder();
@@ -207,9 +192,11 @@ export class GeminiProApi implements LLMApi {
console.error("Error:", error);
});
} else {
const res = await fetch(baseUrl, chatPayload);
const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
if (resJson?.promptFeedback?.blockReason) {
// being blocked
options.onError?.(