mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2026-02-08 15:34:26 +08:00
Compare commits
41 Commits
v2.15.3
...
edfa6d14ee
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
edfa6d14ee | ||
|
|
b6d9ba93fa | ||
|
|
6293b95a3b | ||
|
|
ef4665cd8b | ||
|
|
8030e71a5a | ||
|
|
f42488d4cb | ||
|
|
af49ed4fdc | ||
|
|
b174a40634 | ||
|
|
3c01738c29 | ||
|
|
9be58f3eb4 | ||
|
|
a50c282d01 | ||
|
|
5141145e4d | ||
|
|
b5f6e5a598 | ||
|
|
7df308d655 | ||
|
|
f9d4105170 | ||
|
|
9e6ee50fa6 | ||
|
|
dd77ad5d74 | ||
|
|
3898c507c4 | ||
|
|
fcba50f041 | ||
|
|
452fc86ad1 | ||
|
|
5bdf411399 | ||
|
|
2d920f7ccc | ||
|
|
d84d51b475 | ||
|
|
f9d6f4f9da | ||
|
|
a13bd624e8 | ||
|
|
8fb019b2e2 | ||
|
|
2f3457e73d | ||
|
|
c6ebd6e73c | ||
|
|
2333a47c55 | ||
|
|
b35895b551 | ||
|
|
19c4ed4463 | ||
|
|
22aa1698b4 | ||
|
|
07d089a2bd | ||
|
|
870ad913cc | ||
|
|
3fb389551b | ||
|
|
d12a4adfb5 | ||
|
|
702e17c96b | ||
|
|
93ff7d26cc | ||
|
|
13777786c4 | ||
|
|
6655c64e55 | ||
|
|
b7892b58f5 |
@@ -12,15 +12,18 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4
|
||||
|
||||
一键免费部署你的跨平台私人 ChatGPT 应用, 支持 GPT3, GPT4 & Gemini Pro 模型。
|
||||
|
||||
[![Saas][Saas-image]][saas-url]
|
||||
[![Web][Web-image]][web-url]
|
||||
[![Windows][Windows-image]][download-url]
|
||||
[![MacOS][MacOS-image]][download-url]
|
||||
[![Linux][Linux-image]][download-url]
|
||||
|
||||
[Web App](https://app.nextchat.dev/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev)
|
||||
[NextChatAI](https://nextchat.dev/chat) / [Web App](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev)
|
||||
|
||||
[网页版](https://app.nextchat.dev/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [企业版](#%E4%BC%81%E4%B8%9A%E7%89%88) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues)
|
||||
[NextChatAI](https://nextchat.dev/chat) / [网页版](https://app.nextchat.dev) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [企业版](#%E4%BC%81%E4%B8%9A%E7%89%88) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues)
|
||||
|
||||
[saas-url]: https://nextchat.dev/chat
|
||||
[saas-image]: https://img.shields.io/badge/NextChat-Saas-green?logo=microsoftedge
|
||||
[web-url]: https://app.nextchat.dev/
|
||||
[download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases
|
||||
[Web-image]: https://img.shields.io/badge/Web-PWA-orange?logo=microsoftedge
|
||||
@@ -172,7 +175,7 @@ We recommend that you follow the steps below to re-deploy:
|
||||
|
||||
### Enable Automatic Updates
|
||||
|
||||
> If you encounter a failure of Upstream Sync execution, please manually sync fork once.
|
||||
> If you encounter a failure of Upstream Sync execution, please [manually update code](./README.md#manually-updating-code).
|
||||
|
||||
After forking the project, due to the limitations imposed by GitHub, you need to manually enable Workflows and Upstream Sync Action on the Actions page of the forked project. Once enabled, automatic updates will be scheduled every hour:
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
一键免费部署你的私人 ChatGPT 网页应用,支持 GPT3, GPT4 & Gemini Pro 模型。
|
||||
|
||||
[企业版](#%E4%BC%81%E4%B8%9A%E7%89%88) /[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N)
|
||||
[NextChatAI](https://nextchat.dev/chat) / [企业版](#%E4%BC%81%E4%B8%9A%E7%89%88) / [演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N)
|
||||
|
||||
[<img src="https://vercel.com/button" alt="Deploy on Zeabur" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
|
||||
### 打开自动更新
|
||||
|
||||
> 如果你遇到了 Upstream Sync 执行错误,请手动 Sync Fork 一次!
|
||||
> 如果你遇到了 Upstream Sync 执行错误,请[手动 Sync Fork 一次](./README_CN.md#手动更新代码)!
|
||||
|
||||
当你 fork 项目之后,由于 Github 的限制,需要手动去你 fork 后的项目的 Actions 页面启用 Workflows,并启用 Upstream Sync Action,启用之后即可开启每小时定时自动更新:
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
ワンクリックで無料であなた専用の ChatGPT ウェブアプリをデプロイ。GPT3、GPT4 & Gemini Pro モデルをサポート。
|
||||
|
||||
[企業版](#企業版) / [デモ](https://chat-gpt-next-web.vercel.app/) / [フィードバック](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [Discordに参加](https://discord.gg/zrhvHCr79N)
|
||||
[NextChatAI](https://nextchat.dev/chat) / [企業版](#企業版) / [デモ](https://chat-gpt-next-web.vercel.app/) / [フィードバック](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [Discordに参加](https://discord.gg/zrhvHCr79N)
|
||||
|
||||
[<img src="https://vercel.com/button" alt="Zeaburでデプロイ" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat) [<img src="https://zeabur.com/button.svg" alt="Zeaburでデプロイ" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Gitpodで開く" height="30">](https://gitpod.io/#https://github.com/Yidadaa/ChatGPT-Next-Web)
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
|
||||
### 自動更新を開く
|
||||
|
||||
> Upstream Sync の実行エラーが発生した場合は、手動で Sync Fork してください!
|
||||
> Upstream Sync の実行エラーが発生した場合は、[手動で Sync Fork](./README_JA.md#手動でコードを更新する) してください!
|
||||
|
||||
プロジェクトを fork した後、GitHub の制限により、fork 後のプロジェクトの Actions ページで Workflows を手動で有効にし、Upstream Sync Action を有効にする必要があります。有効化後、毎時の定期自動更新が可能になります:
|
||||
|
||||
|
||||
@@ -23,7 +23,8 @@ export async function handle(
|
||||
});
|
||||
}
|
||||
|
||||
const bearToken = req.headers.get("Authorization") ?? "";
|
||||
const bearToken =
|
||||
req.headers.get("x-goog-api-key") || req.headers.get("Authorization") || "";
|
||||
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
|
||||
|
||||
const apiKey = token ? token : serverConfig.googleApiKey;
|
||||
@@ -91,8 +92,8 @@ async function request(req: NextRequest, apiKey: string) {
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
const fetchUrl = `${baseUrl}${path}?key=${apiKey}${
|
||||
req?.nextUrl?.searchParams?.get("alt") === "sse" ? "&alt=sse" : ""
|
||||
const fetchUrl = `${baseUrl}${path}${
|
||||
req?.nextUrl?.searchParams?.get("alt") === "sse" ? "?alt=sse" : ""
|
||||
}`;
|
||||
|
||||
console.log("[Fetch Url] ", fetchUrl);
|
||||
@@ -100,6 +101,9 @@ async function request(req: NextRequest, apiKey: string) {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Cache-Control": "no-store",
|
||||
"x-goog-api-key":
|
||||
req.headers.get("x-goog-api-key") ||
|
||||
(req.headers.get("Authorization") ?? "").replace("Bearer ", ""),
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
|
||||
@@ -272,7 +272,13 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
||||
}
|
||||
|
||||
function getAuthHeader(): string {
|
||||
return isAzure ? "api-key" : isAnthropic ? "x-api-key" : "Authorization";
|
||||
return isAzure
|
||||
? "api-key"
|
||||
: isAnthropic
|
||||
? "x-api-key"
|
||||
: isGoogle
|
||||
? "x-goog-api-key"
|
||||
: "Authorization";
|
||||
}
|
||||
|
||||
const {
|
||||
@@ -283,14 +289,15 @@ export function getHeaders(ignoreHeaders: boolean = false) {
|
||||
apiKey,
|
||||
isEnabledAccessControl,
|
||||
} = getConfig();
|
||||
// when using google api in app, not set auth header
|
||||
if (isGoogle && clientConfig?.isApp) return headers;
|
||||
// when using baidu api in app, not set auth header
|
||||
if (isBaidu && clientConfig?.isApp) return headers;
|
||||
|
||||
const authHeader = getAuthHeader();
|
||||
|
||||
const bearerToken = getBearerToken(apiKey, isAzure || isAnthropic);
|
||||
const bearerToken = getBearerToken(
|
||||
apiKey,
|
||||
isAzure || isAnthropic || isGoogle,
|
||||
);
|
||||
|
||||
if (bearerToken) {
|
||||
headers[authHeader] = bearerToken;
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@@ -178,6 +179,7 @@ export class QwenApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
ChatMessageTool,
|
||||
} from "@/app/store";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||
import { ANTHROPIC_BASE_URL } from "@/app/constant";
|
||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||
import { preProcessImageContent, stream } from "@/app/utils/chat";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
@@ -388,9 +388,7 @@ export class ClaudeApi implements LLMApi {
|
||||
if (baseUrl.trim().length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
|
||||
baseUrl = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/anthropic"
|
||||
: ApiPath.Anthropic;
|
||||
baseUrl = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
|
||||
}
|
||||
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
|
||||
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@@ -197,6 +198,7 @@ export class ErnieApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@@ -165,6 +166,7 @@ export class DoubaoApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
} from "../api";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||
import { GEMINI_BASE_URL } from "@/app/constant";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
isVisionModel,
|
||||
} from "@/app/utils";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export class GeminiProApi implements LLMApi {
|
||||
path(path: string): string {
|
||||
@@ -34,7 +35,7 @@ export class GeminiProApi implements LLMApi {
|
||||
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
if (baseUrl.length === 0) {
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + `/api/proxy/google` : ApiPath.Google;
|
||||
baseUrl = isApp ? GEMINI_BASE_URL : ApiPath.Google;
|
||||
}
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
@@ -48,10 +49,6 @@ export class GeminiProApi implements LLMApi {
|
||||
let chatPath = [baseUrl, path].join("/");
|
||||
|
||||
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
|
||||
// if chatPath.startsWith('http') then add key in query string
|
||||
if (chatPath.startsWith("http") && accessStore.googleApiKey) {
|
||||
chatPath += `&key=${accessStore.googleApiKey}`;
|
||||
}
|
||||
return chatPath;
|
||||
}
|
||||
extractMessage(res: any) {
|
||||
@@ -217,6 +214,7 @@ export class GeminiProApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"use client";
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
IFLYTEK_BASE_URL,
|
||||
Iflytek,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
import { RequestPayload } from "./openai";
|
||||
|
||||
@@ -40,7 +41,7 @@ export class SparkApi implements LLMApi {
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.Iflytek;
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
||||
baseUrl = isApp ? IFLYTEK_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
@@ -149,6 +150,7 @@ export class SparkApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
MOONSHOT_BASE_URL,
|
||||
Moonshot,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
@@ -40,7 +40,7 @@ export class MoonshotApi implements LLMApi {
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.Moonshot;
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
||||
baseUrl = isApp ? MOONSHOT_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
OPENAI_BASE_URL,
|
||||
DEFAULT_MODELS,
|
||||
OpenaiPath,
|
||||
Azure,
|
||||
@@ -98,7 +98,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
||||
baseUrl = isApp ? OPENAI_BASE_URL : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"use client";
|
||||
import { ApiPath, DEFAULT_API_HOST, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import { ApiPath, TENCENT_BASE_URL, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
|
||||
import {
|
||||
@@ -22,6 +22,7 @@ import mapKeys from "lodash-es/mapKeys";
|
||||
import mapValues from "lodash-es/mapValues";
|
||||
import isArray from "lodash-es/isArray";
|
||||
import isObject from "lodash-es/isObject";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@@ -70,9 +71,7 @@ export class HunyuanApi implements LLMApi {
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
baseUrl = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/tencent"
|
||||
: ApiPath.Tencent;
|
||||
baseUrl = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
@@ -179,6 +178,7 @@ export class HunyuanApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -11,7 +11,6 @@ export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
|
||||
|
||||
export const STABILITY_BASE_URL = "https://api.stability.ai";
|
||||
|
||||
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
|
||||
export const OPENAI_BASE_URL = "https://api.openai.com";
|
||||
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
|
||||
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
GoogleSafetySettingsThreshold,
|
||||
ServiceProvider,
|
||||
StoreKey,
|
||||
ApiPath,
|
||||
OPENAI_BASE_URL,
|
||||
ANTHROPIC_BASE_URL,
|
||||
GEMINI_BASE_URL,
|
||||
BAIDU_BASE_URL,
|
||||
BYTEDANCE_BASE_URL,
|
||||
ALIBABA_BASE_URL,
|
||||
TENCENT_BASE_URL,
|
||||
MOONSHOT_BASE_URL,
|
||||
STABILITY_BASE_URL,
|
||||
IFLYTEK_BASE_URL,
|
||||
} from "../constant";
|
||||
import { getHeaders } from "../client/api";
|
||||
import { getClientConfig } from "../config/client";
|
||||
@@ -15,45 +24,25 @@ let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
|
||||
|
||||
const isApp = getClientConfig()?.buildMode === "export";
|
||||
|
||||
const DEFAULT_OPENAI_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/openai"
|
||||
: ApiPath.OpenAI;
|
||||
const DEFAULT_OPENAI_URL = isApp ? OPENAI_BASE_URL : ApiPath.OpenAI;
|
||||
|
||||
const DEFAULT_GOOGLE_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/google"
|
||||
: ApiPath.Google;
|
||||
const DEFAULT_GOOGLE_URL = isApp ? GEMINI_BASE_URL : ApiPath.Google;
|
||||
|
||||
const DEFAULT_ANTHROPIC_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/anthropic"
|
||||
: ApiPath.Anthropic;
|
||||
const DEFAULT_ANTHROPIC_URL = isApp ? ANTHROPIC_BASE_URL : ApiPath.Anthropic;
|
||||
|
||||
const DEFAULT_BAIDU_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/baidu"
|
||||
: ApiPath.Baidu;
|
||||
const DEFAULT_BAIDU_URL = isApp ? BAIDU_BASE_URL : ApiPath.Baidu;
|
||||
|
||||
const DEFAULT_BYTEDANCE_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/bytedance"
|
||||
: ApiPath.ByteDance;
|
||||
const DEFAULT_BYTEDANCE_URL = isApp ? BYTEDANCE_BASE_URL : ApiPath.ByteDance;
|
||||
|
||||
const DEFAULT_ALIBABA_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/alibaba"
|
||||
: ApiPath.Alibaba;
|
||||
const DEFAULT_ALIBABA_URL = isApp ? ALIBABA_BASE_URL : ApiPath.Alibaba;
|
||||
|
||||
const DEFAULT_TENCENT_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/tencent"
|
||||
: ApiPath.Tencent;
|
||||
const DEFAULT_TENCENT_URL = isApp ? TENCENT_BASE_URL : ApiPath.Tencent;
|
||||
|
||||
const DEFAULT_MOONSHOT_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/moonshot"
|
||||
: ApiPath.Moonshot;
|
||||
const DEFAULT_MOONSHOT_URL = isApp ? MOONSHOT_BASE_URL : ApiPath.Moonshot;
|
||||
|
||||
const DEFAULT_STABILITY_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/stability"
|
||||
: ApiPath.Stability;
|
||||
const DEFAULT_STABILITY_URL = isApp ? STABILITY_BASE_URL : ApiPath.Stability;
|
||||
|
||||
const DEFAULT_IFLYTEK_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/iflytek"
|
||||
: ApiPath.Iflytek;
|
||||
const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek;
|
||||
|
||||
const DEFAULT_ACCESS_STATE = {
|
||||
accessCode: "",
|
||||
@@ -211,10 +200,13 @@ export const useAccessStore = createPersistStore(
|
||||
})
|
||||
.then((res) => res.json())
|
||||
.then((res) => {
|
||||
// Set default model from env request
|
||||
let defaultModel = res.defaultModel ?? "";
|
||||
if (defaultModel !== "")
|
||||
DEFAULT_CONFIG.modelConfig.model = defaultModel;
|
||||
const defaultModel = res.defaultModel ?? "";
|
||||
if (defaultModel !== "") {
|
||||
const [model, providerName] = defaultModel.split("@");
|
||||
DEFAULT_CONFIG.modelConfig.model = model;
|
||||
DEFAULT_CONFIG.modelConfig.providerName = providerName;
|
||||
}
|
||||
|
||||
return res;
|
||||
})
|
||||
.then((res: DangerConfig) => {
|
||||
|
||||
@@ -4,10 +4,10 @@ import { nanoid } from "nanoid";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import yaml from "js-yaml";
|
||||
import { adapter } from "../utils";
|
||||
import { adapter, getOperationId } from "../utils";
|
||||
import { useAccessStore } from "./access";
|
||||
|
||||
const isApp = getClientConfig()?.isApp;
|
||||
const isApp = getClientConfig()?.isApp !== false;
|
||||
|
||||
export type Plugin = {
|
||||
id: string;
|
||||
@@ -116,7 +116,7 @@ export const FunctionToolService = {
|
||||
return {
|
||||
type: "function",
|
||||
function: {
|
||||
name: o.operationId,
|
||||
name: getOperationId(o),
|
||||
description: o.description || o.summary,
|
||||
parameters: parameters,
|
||||
},
|
||||
@@ -124,7 +124,7 @@ export const FunctionToolService = {
|
||||
}),
|
||||
funcs: operations.reduce((s, o) => {
|
||||
// @ts-ignore
|
||||
s[o.operationId] = function (args) {
|
||||
s[getOperationId(o)] = function (args) {
|
||||
const parameters: Record<string, any> = {};
|
||||
if (o.parameters instanceof Array) {
|
||||
o.parameters.forEach((p) => {
|
||||
@@ -139,8 +139,8 @@ export const FunctionToolService = {
|
||||
} else if (authLocation == "body") {
|
||||
args[headerName] = tokenValue;
|
||||
}
|
||||
// @ts-ignore
|
||||
return api.client[o.operationId](
|
||||
// @ts-ignore if o.operationId is null, then using o.path and o.method
|
||||
return api.client.paths[o.path][o.method](
|
||||
parameters,
|
||||
args,
|
||||
api.axiosConfigDefaults,
|
||||
|
||||
@@ -12,7 +12,6 @@ import { downloadAs, readFromFile } from "../utils";
|
||||
import { showToast } from "../components/ui-lib";
|
||||
import Locale from "../locales";
|
||||
import { createSyncClient, ProviderType } from "../utils/cloud";
|
||||
import { corsPath } from "../utils/cors";
|
||||
|
||||
export interface WebDavConfig {
|
||||
server: string;
|
||||
@@ -26,7 +25,7 @@ export type SyncStore = GetStoreState<typeof useSyncStore>;
|
||||
const DEFAULT_SYNC_STATE = {
|
||||
provider: ProviderType.WebDAV,
|
||||
useProxy: true,
|
||||
proxyUrl: corsPath(ApiPath.Cors),
|
||||
proxyUrl: ApiPath.Cors as string,
|
||||
|
||||
webdav: {
|
||||
endpoint: "",
|
||||
|
||||
48
app/utils.ts
48
app/utils.ts
@@ -2,8 +2,9 @@ import { useEffect, useState } from "react";
|
||||
import { showToast } from "./components/ui-lib";
|
||||
import Locale from "./locales";
|
||||
import { RequestMessage } from "./client/api";
|
||||
import { ServiceProvider, REQUEST_TIMEOUT_MS } from "./constant";
|
||||
import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
|
||||
import { ServiceProvider } from "./constant";
|
||||
// import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
|
||||
import { fetch as tauriStreamFetch } from "./utils/stream";
|
||||
|
||||
export function trimTopic(topic: string) {
|
||||
// Fix an issue where double quotes still show in the Indonesian language
|
||||
@@ -292,19 +293,20 @@ export function fetch(
|
||||
options?: Record<string, unknown>,
|
||||
): Promise<any> {
|
||||
if (window.__TAURI__) {
|
||||
const payload = options?.body || options?.data;
|
||||
return tauriFetch(url, {
|
||||
...options,
|
||||
body:
|
||||
payload &&
|
||||
({
|
||||
type: "Text",
|
||||
payload,
|
||||
} as any),
|
||||
timeout: ((options?.timeout as number) || REQUEST_TIMEOUT_MS) / 1000,
|
||||
responseType:
|
||||
options?.responseType == "text" ? ResponseType.Text : ResponseType.JSON,
|
||||
} as any);
|
||||
return tauriStreamFetch(url, options);
|
||||
// const payload = options?.body || options?.data;
|
||||
// return tauriFetch(url, {
|
||||
// ...options,
|
||||
// body:
|
||||
// payload &&
|
||||
// ({
|
||||
// type: "Text",
|
||||
// payload,
|
||||
// } as any),
|
||||
// timeout: ((options?.timeout as number) || REQUEST_TIMEOUT_MS) / 1000,
|
||||
// responseType:
|
||||
// options?.responseType == "text" ? ResponseType.Text : ResponseType.JSON,
|
||||
// } as any);
|
||||
}
|
||||
return window.fetch(url, options);
|
||||
}
|
||||
@@ -315,7 +317,9 @@ export function adapter(config: Record<string, unknown>) {
|
||||
const fetchUrl = params
|
||||
? `${path}?${new URLSearchParams(params as any).toString()}`
|
||||
: path;
|
||||
return fetch(fetchUrl as string, { ...rest, responseType: "text" });
|
||||
return fetch(fetchUrl as string, { ...rest, responseType: "text" })
|
||||
.then((res) => res.text())
|
||||
.then((data) => ({ data }));
|
||||
}
|
||||
|
||||
export function safeLocalStorage(): {
|
||||
@@ -377,3 +381,15 @@ export function safeLocalStorage(): {
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function getOperationId(operation: {
|
||||
operationId?: string;
|
||||
method: string;
|
||||
path: string;
|
||||
}) {
|
||||
// pattern '^[a-zA-Z0-9_-]+$'
|
||||
return (
|
||||
operation?.operationId ||
|
||||
`${operation.method.toUpperCase()}${operation.path.replaceAll("/", "_")}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "./format";
|
||||
import { fetch as tauriFetch } from "./stream";
|
||||
|
||||
export function compressImage(file: Blob, maxSize: number): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -287,6 +288,7 @@ export function stream(
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: tauriFetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { DEFAULT_API_HOST } from "../constant";
|
||||
|
||||
export function corsPath(path: string) {
|
||||
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
|
||||
|
||||
if (baseUrl === "" && path === "") {
|
||||
return "";
|
||||
}
|
||||
if (!path.startsWith("/")) {
|
||||
path = "/" + path;
|
||||
}
|
||||
|
||||
if (!path.endsWith("/")) {
|
||||
path += "/";
|
||||
}
|
||||
|
||||
return `${baseUrl}${path}`;
|
||||
}
|
||||
104
app/utils/stream.ts
Normal file
104
app/utils/stream.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
// using tauri command to send request
|
||||
// see src-tauri/src/stream.rs, and src-tauri/src/main.rs
|
||||
// 1. invoke('stream_fetch', {url, method, headers, body}), get response with headers.
|
||||
// 2. listen event: `stream-response` multi times to get body
|
||||
|
||||
type ResponseEvent = {
|
||||
id: number;
|
||||
payload: {
|
||||
request_id: number;
|
||||
status?: number;
|
||||
chunk?: number[];
|
||||
};
|
||||
};
|
||||
|
||||
type StreamResponse = {
|
||||
request_id: number;
|
||||
status: number;
|
||||
status_text: string;
|
||||
headers: Record<string, string>;
|
||||
};
|
||||
|
||||
export function fetch(url: string, options?: RequestInit): Promise<any> {
|
||||
if (window.__TAURI__) {
|
||||
const {
|
||||
signal,
|
||||
method = "GET",
|
||||
headers: _headers = {},
|
||||
body = [],
|
||||
} = options || {};
|
||||
let unlisten: Function | undefined;
|
||||
let request_id = 0;
|
||||
const ts = new TransformStream();
|
||||
const writer = ts.writable.getWriter();
|
||||
|
||||
let closed = false;
|
||||
const close = () => {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
unlisten && unlisten();
|
||||
writer.ready.then(() => {
|
||||
writer.close().catch((e) => console.error(e));
|
||||
});
|
||||
};
|
||||
|
||||
if (signal) {
|
||||
signal.addEventListener("abort", () => close());
|
||||
}
|
||||
// @ts-ignore 2. listen response multi times, and write to Response.body
|
||||
window.__TAURI__.event
|
||||
.listen("stream-response", (e: ResponseEvent) => {
|
||||
const { request_id: rid, chunk, status } = e?.payload || {};
|
||||
if (request_id != rid) {
|
||||
return;
|
||||
}
|
||||
if (chunk) {
|
||||
writer.ready.then(() => {
|
||||
writer.write(new Uint8Array(chunk));
|
||||
});
|
||||
} else if (status === 0) {
|
||||
// end of body
|
||||
close();
|
||||
}
|
||||
})
|
||||
.then((u: Function) => (unlisten = u));
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
Accept: "application/json, text/plain, */*",
|
||||
"Accept-Language": "en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7",
|
||||
"User-Agent": navigator.userAgent,
|
||||
};
|
||||
for (const item of new Headers(_headers || {})) {
|
||||
headers[item[0]] = item[1];
|
||||
}
|
||||
return window.__TAURI__
|
||||
.invoke("stream_fetch", {
|
||||
method: method.toUpperCase(),
|
||||
url,
|
||||
headers,
|
||||
// TODO FormData
|
||||
body:
|
||||
typeof body === "string"
|
||||
? Array.from(new TextEncoder().encode(body))
|
||||
: [],
|
||||
})
|
||||
.then((res: StreamResponse) => {
|
||||
request_id = res.request_id;
|
||||
const { status, status_text: statusText, headers } = res;
|
||||
const response = new Response(ts.readable, {
|
||||
status,
|
||||
statusText,
|
||||
headers,
|
||||
});
|
||||
if (status >= 300) {
|
||||
setTimeout(close, 50);
|
||||
}
|
||||
return response;
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error("stream error", e);
|
||||
throw e;
|
||||
});
|
||||
}
|
||||
return window.fetch(url, options);
|
||||
}
|
||||
52
src-tauri/Cargo.lock
generated
52
src-tauri/Cargo.lock
generated
@@ -348,9 +348,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.4.0"
|
||||
version = "1.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
|
||||
checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -942,9 +942,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
|
||||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.1.0"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
|
||||
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
|
||||
dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
@@ -970,9 +970,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "futures-core"
|
||||
version = "0.3.28"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c"
|
||||
checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
|
||||
|
||||
[[package]]
|
||||
name = "futures-executor"
|
||||
@@ -987,9 +987,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.28"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
|
||||
checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
|
||||
|
||||
[[package]]
|
||||
name = "futures-lite"
|
||||
@@ -1008,9 +1008,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "futures-macro"
|
||||
version = "0.3.28"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
|
||||
checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1019,21 +1019,21 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "futures-sink"
|
||||
version = "0.3.29"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817"
|
||||
checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
|
||||
|
||||
[[package]]
|
||||
name = "futures-task"
|
||||
version = "0.3.28"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65"
|
||||
checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
|
||||
|
||||
[[package]]
|
||||
name = "futures-util"
|
||||
version = "0.3.28"
|
||||
version = "0.3.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
|
||||
checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-io",
|
||||
@@ -1555,9 +1555,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "0.3.0"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
|
||||
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
|
||||
dependencies = [
|
||||
"unicode-bidi",
|
||||
"unicode-normalization",
|
||||
@@ -1986,6 +1986,10 @@ checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
|
||||
name = "nextchat"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"percent-encoding",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tauri",
|
||||
@@ -2281,9 +2285,9 @@ checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd"
|
||||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "2.2.0"
|
||||
version = "2.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
|
||||
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
@@ -2545,9 +2549,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.58"
|
||||
version = "1.0.86"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa1fb82fc0c281dd9671101b66b771ebbe1eaf967b96ac8740dcba4b70005ca8"
|
||||
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
@@ -3889,9 +3893,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36"
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.3.1"
|
||||
version = "2.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
|
||||
checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
|
||||
dependencies = [
|
||||
"form_urlencoded",
|
||||
"idna",
|
||||
|
||||
@@ -35,8 +35,13 @@ tauri = { version = "1.5.4", features = [ "http-all",
|
||||
"window-start-dragging",
|
||||
"window-unmaximize",
|
||||
"window-unminimize",
|
||||
"linux-protocol-headers",
|
||||
] }
|
||||
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
percent-encoding = "2.3.1"
|
||||
reqwest = "0.11.18"
|
||||
futures-util = "0.3.30"
|
||||
bytes = "1.7.2"
|
||||
|
||||
[features]
|
||||
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||
|
||||
mod stream;
|
||||
|
||||
fn main() {
|
||||
tauri::Builder::default()
|
||||
.invoke_handler(tauri::generate_handler![stream::stream_fetch])
|
||||
.plugin(tauri_plugin_window_state::Builder::default().build())
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
132
src-tauri/src/stream.rs
Normal file
132
src-tauri/src/stream.rs
Normal file
@@ -0,0 +1,132 @@
|
||||
//
|
||||
//
|
||||
|
||||
use std::error::Error;
|
||||
use std::sync::atomic::{AtomicU32, Ordering};
|
||||
use std::collections::HashMap;
|
||||
use futures_util::{StreamExt};
|
||||
use reqwest::Client;
|
||||
use reqwest::header::{HeaderName, HeaderMap};
|
||||
|
||||
static REQUEST_COUNTER: AtomicU32 = AtomicU32::new(0);
|
||||
|
||||
#[derive(Debug, Clone, serde::Serialize)]
|
||||
pub struct StreamResponse {
|
||||
request_id: u32,
|
||||
status: u16,
|
||||
status_text: String,
|
||||
headers: HashMap<String, String>
|
||||
}
|
||||
|
||||
#[derive(Clone, serde::Serialize)]
|
||||
pub struct EndPayload {
|
||||
request_id: u32,
|
||||
status: u16,
|
||||
}
|
||||
|
||||
#[derive(Clone, serde::Serialize)]
|
||||
pub struct ChunkPayload {
|
||||
request_id: u32,
|
||||
chunk: bytes::Bytes,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn stream_fetch(
|
||||
window: tauri::Window,
|
||||
method: String,
|
||||
url: String,
|
||||
headers: HashMap<String, String>,
|
||||
body: Vec<u8>,
|
||||
) -> Result<StreamResponse, String> {
|
||||
|
||||
let event_name = "stream-response";
|
||||
let request_id = REQUEST_COUNTER.fetch_add(1, Ordering::SeqCst);
|
||||
|
||||
let mut _headers = HeaderMap::new();
|
||||
for (key, value) in &headers {
|
||||
_headers.insert(key.parse::<HeaderName>().unwrap(), value.parse().unwrap());
|
||||
}
|
||||
|
||||
// println!("method: {:?}", method);
|
||||
// println!("url: {:?}", url);
|
||||
// println!("headers: {:?}", headers);
|
||||
// println!("headers: {:?}", _headers);
|
||||
|
||||
let method = method.parse::<reqwest::Method>().map_err(|err| format!("failed to parse method: {}", err))?;
|
||||
let client = Client::builder()
|
||||
.default_headers(_headers)
|
||||
.redirect(reqwest::redirect::Policy::limited(3))
|
||||
.build()
|
||||
.map_err(|err| format!("failed to generate client: {}", err))?;
|
||||
|
||||
let mut request = client.request(
|
||||
method.clone(),
|
||||
url.parse::<reqwest::Url>().map_err(|err| format!("failed to parse url: {}", err))?
|
||||
);
|
||||
|
||||
if method == reqwest::Method::POST || method == reqwest::Method::PUT || method == reqwest::Method::PATCH {
|
||||
let body = bytes::Bytes::from(body);
|
||||
// println!("body: {:?}", body);
|
||||
request = request.body(body);
|
||||
}
|
||||
|
||||
// println!("client: {:?}", client);
|
||||
// println!("request: {:?}", request);
|
||||
|
||||
let response_future = request.send();
|
||||
|
||||
let res = response_future.await;
|
||||
let response = match res {
|
||||
Ok(res) => {
|
||||
// get response and emit to client
|
||||
let mut headers = HashMap::new();
|
||||
for (name, value) in res.headers() {
|
||||
headers.insert(
|
||||
name.as_str().to_string(),
|
||||
std::str::from_utf8(value.as_bytes()).unwrap().to_string()
|
||||
);
|
||||
}
|
||||
let status = res.status().as_u16();
|
||||
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut stream = res.bytes_stream();
|
||||
|
||||
while let Some(chunk) = stream.next().await {
|
||||
match chunk {
|
||||
Ok(bytes) => {
|
||||
// println!("chunk: {:?}", bytes);
|
||||
if let Err(e) = window.emit(event_name, ChunkPayload{ request_id, chunk: bytes }) {
|
||||
println!("Failed to emit chunk payload: {:?}", e);
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Error chunk: {:?}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Err(e) = window.emit(event_name, EndPayload{ request_id, status: 0 }) {
|
||||
println!("Failed to emit end payload: {:?}", e);
|
||||
}
|
||||
});
|
||||
|
||||
StreamResponse {
|
||||
request_id,
|
||||
status,
|
||||
status_text: "OK".to_string(),
|
||||
headers,
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Error response: {:?}", err.source().expect("REASON").to_string());
|
||||
StreamResponse {
|
||||
request_id,
|
||||
status: 599,
|
||||
status_text: err.source().expect("REASON").to_string(),
|
||||
headers: HashMap::new(),
|
||||
}
|
||||
}
|
||||
};
|
||||
// println!("Response: {:?}", response);
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user