mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-01 15:46:39 +08:00
using tauriFetch fro fetchEventSource
This commit is contained in:
parent
7f857284bb
commit
cc33b7b6bf
@ -22,7 +22,7 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, fetch } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@ -178,6 +178,7 @@ export class QwenApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
@ -23,7 +23,7 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, fetch } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@ -197,6 +197,7 @@ export class ErnieApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
@ -22,7 +22,7 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, fetch } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@ -165,6 +165,7 @@ export class DoubaoApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
@ -20,6 +20,7 @@ import {
|
||||
getMessageTextContent,
|
||||
getMessageImages,
|
||||
isVisionModel,
|
||||
fetch,
|
||||
} from "@/app/utils";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
|
||||
@ -217,6 +218,7 @@ export class GeminiProApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
@ -21,7 +21,7 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, fetch } from "@/app/utils";
|
||||
|
||||
import { RequestPayload } from "./openai";
|
||||
|
||||
@ -149,6 +149,7 @@ export class SparkApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
@ -17,7 +17,7 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||
import { getMessageTextContent, isVisionModel, fetch } from "@/app/utils";
|
||||
import mapKeys from "lodash-es/mapKeys";
|
||||
import mapValues from "lodash-es/mapValues";
|
||||
import isArray from "lodash-es/isArray";
|
||||
@ -179,6 +179,7 @@ export class HunyuanApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
10
app/utils.ts
10
app/utils.ts
@ -287,6 +287,16 @@ export function showPlugins(provider: ServiceProvider, model: string) {
|
||||
return false;
|
||||
}
|
||||
|
||||
export function fetch(
|
||||
url: string,
|
||||
options?: Record<string, unknown>,
|
||||
): Promise<any> {
|
||||
if (window.__TAURI__) {
|
||||
return tauriFetch(url, options);
|
||||
}
|
||||
return window.fetch(url, options);
|
||||
}
|
||||
|
||||
export function adapter(config: Record<string, unknown>) {
|
||||
const { baseURL, url, params, ...rest } = config;
|
||||
const path = baseURL ? `${baseURL}${url}` : url;
|
||||
|
@ -10,6 +10,7 @@ import {
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "./format";
|
||||
import { fetch } from "@/app/utils";
|
||||
|
||||
export function compressImage(file: Blob, maxSize: number): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
@ -287,6 +288,7 @@ export function stream(
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any, // using tauriFetch or window.fetch
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
1
src-tauri/Cargo.lock
generated
1
src-tauri/Cargo.lock
generated
@ -2033,6 +2033,7 @@ dependencies = [
|
||||
"tauri-build",
|
||||
"tauri-plugin-http",
|
||||
"tauri-plugin-log",
|
||||
"wry 0.43.1 (git+https://github.com/lloydzhou/wry?branch=webkitgtk-data_manager-directory)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
Loading…
Reference in New Issue
Block a user