mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2026-02-09 07:54:27 +08:00
Compare commits
24 Commits
feature/gl
...
aef119bb2b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aef119bb2b | ||
|
|
b2de4c0461 | ||
|
|
540f93ad25 | ||
|
|
cc33b7b6bf | ||
|
|
7f857284bb | ||
|
|
9bd1635064 | ||
|
|
4c9fe56f33 | ||
|
|
29296d42d7 | ||
|
|
d306491e88 | ||
|
|
a7d8abbc4d | ||
|
|
4c6a593ae3 | ||
|
|
9ba1b39cf5 | ||
|
|
454c247de6 | ||
|
|
cb6576ec04 | ||
|
|
2758ea5ddd | ||
|
|
a2e9e78457 | ||
|
|
b720ba4c30 | ||
|
|
7151c0134a | ||
|
|
97099849b7 | ||
|
|
1fa58c60d9 | ||
|
|
5aaa190a7d | ||
|
|
b9e6dce193 | ||
|
|
7b1667f204 | ||
|
|
87c5b72ea2 |
4
.github/workflows/app.yml
vendored
4
.github/workflows/app.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
with:
|
||||
node-version: 18
|
||||
- name: get version
|
||||
run: echo "PACKAGE_VERSION=$(node -p "require('./src-tauri/tauri.conf.json').package.version")" >> $GITHUB_ENV
|
||||
run: echo "PACKAGE_VERSION=$(node -p "require('./src-tauri/tauri.conf.json').version")" >> $GITHUB_ENV
|
||||
- name: create release
|
||||
id: create-release
|
||||
uses: actions/github-script@v6
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
if: matrix.config.os == 'ubuntu-latest'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
sudo apt-get install -y libgtk-3-dev libsoup-3.0-dev libjavascriptcoregtk-4.1 libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
- name: install frontend dependencies
|
||||
run: yarn install # change this to npm or pnpm depending on which one you use
|
||||
- uses: tauri-apps/tauri-action@v0
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -46,3 +46,4 @@ dev
|
||||
*.key.pub
|
||||
|
||||
masks.json
|
||||
package-lock.json
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, fetch } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@@ -178,6 +178,7 @@ export class QwenApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -7,8 +7,6 @@ import {
|
||||
usePluginStore,
|
||||
ChatMessageTool,
|
||||
} from "@/app/store";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||
import { preProcessImageContent, stream } from "@/app/utils/chat";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
@@ -386,11 +384,7 @@ export class ClaudeApi implements LLMApi {
|
||||
|
||||
// if endpoint is empty, use default endpoint
|
||||
if (baseUrl.trim().length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
|
||||
baseUrl = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/anthropic"
|
||||
: ApiPath.Anthropic;
|
||||
baseUrl = ApiPath.Anthropic;
|
||||
}
|
||||
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith("/api")) {
|
||||
|
||||
@@ -23,7 +23,7 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, fetch } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@@ -197,6 +197,7 @@ export class ErnieApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, fetch } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
@@ -165,6 +165,7 @@ export class DoubaoApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -8,8 +8,6 @@ import {
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
@@ -20,6 +18,7 @@ import {
|
||||
getMessageTextContent,
|
||||
getMessageImages,
|
||||
isVisionModel,
|
||||
fetch,
|
||||
} from "@/app/utils";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
|
||||
@@ -32,9 +31,8 @@ export class GeminiProApi implements LLMApi {
|
||||
baseUrl = accessStore.googleUrl;
|
||||
}
|
||||
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
if (baseUrl.length === 0) {
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + `/api/proxy/google` : ApiPath.Google;
|
||||
baseUrl = ApiPath.Google;
|
||||
}
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
@@ -217,6 +215,7 @@ export class GeminiProApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
"use client";
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
Iflytek,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { ApiPath, Iflytek, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
|
||||
import {
|
||||
@@ -20,8 +15,7 @@ import {
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { getMessageTextContent, fetch } from "@/app/utils";
|
||||
|
||||
import { RequestPayload } from "./openai";
|
||||
|
||||
@@ -38,9 +32,7 @@ export class SparkApi implements LLMApi {
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.Iflytek;
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
||||
baseUrl = ApiPath.Iflytek;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
@@ -149,6 +141,7 @@ export class SparkApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
"use client";
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
Moonshot,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { ApiPath, Moonshot, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import {
|
||||
useAccessStore,
|
||||
useAppConfig,
|
||||
@@ -21,7 +16,6 @@ import {
|
||||
LLMModel,
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
import { RequestPayload } from "./openai";
|
||||
|
||||
@@ -38,9 +32,8 @@ export class MoonshotApi implements LLMApi {
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = ApiPath.Moonshot;
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
||||
baseUrl = apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
DEFAULT_MODELS,
|
||||
OpenaiPath,
|
||||
Azure,
|
||||
@@ -36,7 +35,6 @@ import {
|
||||
SpeechOptions,
|
||||
} from "../api";
|
||||
import Locale from "../../locales";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
isVisionModel,
|
||||
@@ -96,9 +94,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
||||
baseUrl = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"use client";
|
||||
import { ApiPath, DEFAULT_API_HOST, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import { ApiPath, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
|
||||
import {
|
||||
@@ -16,8 +16,7 @@ import {
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||
import { getMessageTextContent, isVisionModel, fetch } from "@/app/utils";
|
||||
import mapKeys from "lodash-es/mapKeys";
|
||||
import mapValues from "lodash-es/mapValues";
|
||||
import isArray from "lodash-es/isArray";
|
||||
@@ -69,10 +68,7 @@ export class HunyuanApi implements LLMApi {
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
baseUrl = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/tencent"
|
||||
: ApiPath.Tencent;
|
||||
baseUrl = ApiPath.Tencent;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
@@ -179,6 +175,7 @@ export class HunyuanApi implements LLMApi {
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any,
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -10,7 +10,7 @@ export const getBuildConfig = () => {
|
||||
|
||||
const buildMode = process.env.BUILD_MODE ?? "standalone";
|
||||
const isApp = !!process.env.BUILD_APP;
|
||||
const version = "v" + tauriConfig.package.version;
|
||||
const version = "v" + tauriConfig.version;
|
||||
|
||||
const commitInfo = (() => {
|
||||
try {
|
||||
|
||||
@@ -11,7 +11,6 @@ export const RUNTIME_CONFIG_DOM = "danger-runtime-config";
|
||||
|
||||
export const STABILITY_BASE_URL = "https://api.stability.ai";
|
||||
|
||||
export const DEFAULT_API_HOST = "https://api.nextchat.dev";
|
||||
export const OPENAI_BASE_URL = "https://api.openai.com";
|
||||
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
GoogleSafetySettingsThreshold,
|
||||
ServiceProvider,
|
||||
StoreKey,
|
||||
@@ -15,46 +14,6 @@ let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
|
||||
|
||||
const isApp = getClientConfig()?.buildMode === "export";
|
||||
|
||||
const DEFAULT_OPENAI_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/openai"
|
||||
: ApiPath.OpenAI;
|
||||
|
||||
const DEFAULT_GOOGLE_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/google"
|
||||
: ApiPath.Google;
|
||||
|
||||
const DEFAULT_ANTHROPIC_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/anthropic"
|
||||
: ApiPath.Anthropic;
|
||||
|
||||
const DEFAULT_BAIDU_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/baidu"
|
||||
: ApiPath.Baidu;
|
||||
|
||||
const DEFAULT_BYTEDANCE_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/bytedance"
|
||||
: ApiPath.ByteDance;
|
||||
|
||||
const DEFAULT_ALIBABA_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/alibaba"
|
||||
: ApiPath.Alibaba;
|
||||
|
||||
const DEFAULT_TENCENT_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/tencent"
|
||||
: ApiPath.Tencent;
|
||||
|
||||
const DEFAULT_MOONSHOT_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/moonshot"
|
||||
: ApiPath.Moonshot;
|
||||
|
||||
const DEFAULT_STABILITY_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/stability"
|
||||
: ApiPath.Stability;
|
||||
|
||||
const DEFAULT_IFLYTEK_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/iflytek"
|
||||
: ApiPath.Iflytek;
|
||||
|
||||
const DEFAULT_ACCESS_STATE = {
|
||||
accessCode: "",
|
||||
useCustomConfig: false,
|
||||
@@ -62,7 +21,7 @@ const DEFAULT_ACCESS_STATE = {
|
||||
provider: ServiceProvider.OpenAI,
|
||||
|
||||
// openai
|
||||
openaiUrl: DEFAULT_OPENAI_URL,
|
||||
openaiUrl: ApiPath.OpenAI as string,
|
||||
openaiApiKey: "",
|
||||
|
||||
// azure
|
||||
@@ -71,44 +30,44 @@ const DEFAULT_ACCESS_STATE = {
|
||||
azureApiVersion: "2023-08-01-preview",
|
||||
|
||||
// google ai studio
|
||||
googleUrl: DEFAULT_GOOGLE_URL,
|
||||
googleUrl: ApiPath.Google as string,
|
||||
googleApiKey: "",
|
||||
googleApiVersion: "v1",
|
||||
googleSafetySettings: GoogleSafetySettingsThreshold.BLOCK_ONLY_HIGH,
|
||||
|
||||
// anthropic
|
||||
anthropicUrl: DEFAULT_ANTHROPIC_URL,
|
||||
anthropicUrl: ApiPath.Anthropic as string,
|
||||
anthropicApiKey: "",
|
||||
anthropicApiVersion: "2023-06-01",
|
||||
|
||||
// baidu
|
||||
baiduUrl: DEFAULT_BAIDU_URL,
|
||||
baiduUrl: ApiPath.Baidu as string,
|
||||
baiduApiKey: "",
|
||||
baiduSecretKey: "",
|
||||
|
||||
// bytedance
|
||||
bytedanceUrl: DEFAULT_BYTEDANCE_URL,
|
||||
bytedanceUrl: ApiPath.ByteDance as string,
|
||||
bytedanceApiKey: "",
|
||||
|
||||
// alibaba
|
||||
alibabaUrl: DEFAULT_ALIBABA_URL,
|
||||
alibabaUrl: ApiPath.Alibaba as string,
|
||||
alibabaApiKey: "",
|
||||
|
||||
// moonshot
|
||||
moonshotUrl: DEFAULT_MOONSHOT_URL,
|
||||
moonshotUrl: ApiPath.Moonshot as string,
|
||||
moonshotApiKey: "",
|
||||
|
||||
//stability
|
||||
stabilityUrl: DEFAULT_STABILITY_URL,
|
||||
stabilityUrl: ApiPath.Stability as string,
|
||||
stabilityApiKey: "",
|
||||
|
||||
// tencent
|
||||
tencentUrl: DEFAULT_TENCENT_URL,
|
||||
tencentUrl: ApiPath.Tencent as string,
|
||||
tencentSecretKey: "",
|
||||
tencentSecretId: "",
|
||||
|
||||
// iflytek
|
||||
iflytekUrl: DEFAULT_IFLYTEK_URL,
|
||||
iflytekUrl: ApiPath.Iflytek as string,
|
||||
iflytekApiKey: "",
|
||||
iflytekApiSecret: "",
|
||||
|
||||
|
||||
@@ -12,7 +12,6 @@ import { downloadAs, readFromFile } from "../utils";
|
||||
import { showToast } from "../components/ui-lib";
|
||||
import Locale from "../locales";
|
||||
import { createSyncClient, ProviderType } from "../utils/cloud";
|
||||
import { corsPath } from "../utils/cors";
|
||||
|
||||
export interface WebDavConfig {
|
||||
server: string;
|
||||
@@ -26,7 +25,7 @@ export type SyncStore = GetStoreState<typeof useSyncStore>;
|
||||
const DEFAULT_SYNC_STATE = {
|
||||
provider: ProviderType.WebDAV,
|
||||
useProxy: true,
|
||||
proxyUrl: corsPath(ApiPath.Cors),
|
||||
proxyUrl: ApiPath.Cors as string,
|
||||
|
||||
webdav: {
|
||||
endpoint: "",
|
||||
|
||||
25
app/utils.ts
25
app/utils.ts
@@ -2,8 +2,8 @@ import { useEffect, useState } from "react";
|
||||
import { showToast } from "./components/ui-lib";
|
||||
import Locale from "./locales";
|
||||
import { RequestMessage } from "./client/api";
|
||||
import { ServiceProvider, REQUEST_TIMEOUT_MS } from "./constant";
|
||||
import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
|
||||
import { ServiceProvider } from "./constant";
|
||||
import { fetch as tauriFetch } from "@tauri-apps/plugin-http";
|
||||
|
||||
export function trimTopic(topic: string) {
|
||||
// Fix an issue where double quotes still show in the Indonesian language
|
||||
@@ -292,19 +292,7 @@ export function fetch(
|
||||
options?: Record<string, unknown>,
|
||||
): Promise<any> {
|
||||
if (window.__TAURI__) {
|
||||
const payload = options?.body || options?.data;
|
||||
return tauriFetch(url, {
|
||||
...options,
|
||||
body:
|
||||
payload &&
|
||||
({
|
||||
type: "Text",
|
||||
payload,
|
||||
} as any),
|
||||
timeout: ((options?.timeout as number) || REQUEST_TIMEOUT_MS) / 1000,
|
||||
responseType:
|
||||
options?.responseType == "text" ? ResponseType.Text : ResponseType.JSON,
|
||||
} as any);
|
||||
return tauriFetch(url, options);
|
||||
}
|
||||
return window.fetch(url, options);
|
||||
}
|
||||
@@ -315,7 +303,12 @@ export function adapter(config: Record<string, unknown>) {
|
||||
const fetchUrl = params
|
||||
? `${path}?${new URLSearchParams(params as any).toString()}`
|
||||
: path;
|
||||
return fetch(fetchUrl as string, { ...rest, responseType: "text" });
|
||||
if (window.__TAURI__) {
|
||||
return tauriFetch(fetchUrl as string, rest)
|
||||
.then((res) => res.text())
|
||||
.then((data: any) => ({ data }));
|
||||
}
|
||||
return window.fetch(fetchUrl as string, rest);
|
||||
}
|
||||
|
||||
export function safeLocalStorage(): {
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "./format";
|
||||
import { fetch } from "@/app/utils";
|
||||
|
||||
export function compressImage(file: Blob, maxSize: number): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -287,6 +288,7 @@ export function stream(
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
fetchEventSource(chatPath, {
|
||||
fetch: fetch as any, // using tauriFetch or window.fetch
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { DEFAULT_API_HOST } from "../constant";
|
||||
|
||||
export function corsPath(path: string) {
|
||||
const baseUrl = getClientConfig()?.isApp ? `${DEFAULT_API_HOST}` : "";
|
||||
|
||||
if (baseUrl === "" && path === "") {
|
||||
return "";
|
||||
}
|
||||
if (!path.startsWith("/")) {
|
||||
path = "/" + path;
|
||||
}
|
||||
|
||||
if (!path.endsWith("/")) {
|
||||
path += "/";
|
||||
}
|
||||
|
||||
return `${baseUrl}${path}`;
|
||||
}
|
||||
11
package.json
11
package.json
@@ -13,6 +13,9 @@
|
||||
"export:dev": "concurrently -r \"yarn mask:watch\" \"cross-env BUILD_MODE=export BUILD_APP=1 next dev\"",
|
||||
"app:dev": "concurrently -r \"yarn mask:watch\" \"yarn tauri dev\"",
|
||||
"app:build": "yarn mask && yarn tauri build",
|
||||
"ios:init": "yarn tauri ios init",
|
||||
"ios:dev": "concurrently -r \"yarn mask:watch\" \"yarn tauri ios dev\"",
|
||||
"ios:build": "yarn mask && yarn tauri ios build",
|
||||
"prompts": "node ./scripts/fetch-prompts.mjs",
|
||||
"prepare": "husky install",
|
||||
"proxy-dev": "sh ./scripts/init-proxy.sh && proxychains -f ./scripts/proxychains.conf yarn dev"
|
||||
@@ -22,6 +25,7 @@
|
||||
"@hello-pangea/dnd": "^16.5.0",
|
||||
"@next/third-parties": "^14.1.0",
|
||||
"@svgr/webpack": "^6.5.1",
|
||||
"@tauri-apps/plugin-http": "^2.0.0-rc.2",
|
||||
"@vercel/analytics": "^0.1.11",
|
||||
"@vercel/speed-insights": "^1.0.2",
|
||||
"axios": "^1.7.5",
|
||||
@@ -31,8 +35,8 @@
|
||||
"html-to-image": "^1.11.11",
|
||||
"idb-keyval": "^6.2.1",
|
||||
"lodash-es": "^4.17.21",
|
||||
"mermaid": "^10.6.1",
|
||||
"markdown-to-txt": "^2.0.1",
|
||||
"mermaid": "^10.6.1",
|
||||
"nanoid": "^5.0.3",
|
||||
"next": "^14.1.1",
|
||||
"node-fetch": "^3.3.1",
|
||||
@@ -52,8 +56,7 @@
|
||||
"zustand": "^4.3.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@tauri-apps/api": "^1.6.0",
|
||||
"@tauri-apps/cli": "1.5.11",
|
||||
"@tauri-apps/cli": "^2.0.0-rc.0",
|
||||
"@types/js-yaml": "4.0.9",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/node": "^20.11.30",
|
||||
@@ -80,4 +83,4 @@
|
||||
"lint-staged/yaml": "^2.2.2"
|
||||
},
|
||||
"packageManager": "yarn@1.22.19"
|
||||
}
|
||||
}
|
||||
|
||||
1
src-tauri/.gitignore
vendored
1
src-tauri/.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target/
|
||||
/gen/
|
||||
4064
src-tauri/Cargo.lock
generated
4064
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,44 +2,55 @@
|
||||
name = "nextchat"
|
||||
version = "0.1.0"
|
||||
description = "A cross platform app for LLM ChatBot."
|
||||
authors = ["Yidadaa"]
|
||||
authors = ["GPTsMotion Tech LLC"]
|
||||
license = "mit"
|
||||
repository = ""
|
||||
repository = "https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web"
|
||||
default-run = "nextchat"
|
||||
edition = "2021"
|
||||
rust-version = "1.60"
|
||||
rust-version = "1.71"
|
||||
|
||||
[lib]
|
||||
name = "nextchat"
|
||||
crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "1.5.1", features = [] }
|
||||
tauri-build = { version = "2.0.0-rc", features = [] }
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
tauri = { version = "1.5.4", features = [ "http-all",
|
||||
"notification-all",
|
||||
"fs-all",
|
||||
"clipboard-all",
|
||||
"dialog-all",
|
||||
"shell-open",
|
||||
"updater",
|
||||
"window-close",
|
||||
"window-hide",
|
||||
"window-maximize",
|
||||
"window-minimize",
|
||||
"window-set-icon",
|
||||
"window-set-ignore-cursor-events",
|
||||
"window-set-resizable",
|
||||
"window-show",
|
||||
"window-start-dragging",
|
||||
"window-unmaximize",
|
||||
"window-unminimize",
|
||||
] }
|
||||
tauri-plugin-window-state = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
|
||||
log = "0.4"
|
||||
tauri = { version = "2.0.0-rc.15", features = [] }
|
||||
tauri-plugin-log = "2.0.0-rc"
|
||||
tauri-plugin-clipboard-manager = "2.0.0-rc.4"
|
||||
tauri-plugin-dialog = "2.0.0-rc.7"
|
||||
tauri-plugin-fs = "2.0.0-rc.5"
|
||||
tauri-plugin-http = "2.0.0-rc.5"
|
||||
tauri-plugin-updater = "2.0.0-rc.3"
|
||||
tauri-plugin-notification = "2.0.0-rc.5"
|
||||
tauri-plugin-shell = "2.0.0-rc.3"
|
||||
tauri-plugin-window-state = "2.0.0-rc.5"
|
||||
|
||||
|
||||
[replace]
|
||||
# using this version from github fixed data_directory for webkitgtk
|
||||
"wry:0.43.1" = { git = "https://github.com/lloydzhou/wry", branch="webkitgtk-data_manager-directory" }
|
||||
|
||||
# Optimize for smaller binary size
|
||||
[profile.release]
|
||||
panic = "abort" # Strip expensive panic clean-up logic
|
||||
codegen-units = 1 # Compile crates one after another so the compiler can optimize better
|
||||
lto = true # Enables link to optimizations
|
||||
opt-level = "s" # Optimize for binary size
|
||||
strip = true # Remove debug symbols
|
||||
|
||||
[features]
|
||||
# this feature is used for production builds or when `devPath` points to the filesystem and the built-in dev server is disabled.
|
||||
# If you use cargo directly instead of tauri's cli you can use this feature flag to switch between tauri's `dev` and `build` modes.
|
||||
# DO NOT REMOVE!!
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
|
||||
|
||||
|
||||
|
||||
29
src-tauri/capabilities/main.json
Normal file
29
src-tauri/capabilities/main.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"identifier": "main",
|
||||
"description": "permissions for desktop app",
|
||||
"local": true,
|
||||
"windows": ["main"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"core:window:allow-start-dragging",
|
||||
"core:window:allow-maximize",
|
||||
"clipboard-manager:allow-write-text",
|
||||
"fs:default",
|
||||
"fs:allow-exists",
|
||||
{
|
||||
"identifier": "http:default",
|
||||
"allow": [
|
||||
{
|
||||
"url": "https://*"
|
||||
},
|
||||
{
|
||||
"url": "http://*"
|
||||
}
|
||||
]
|
||||
},
|
||||
"notification:default",
|
||||
"shell:allow-open",
|
||||
"window-state:allow-restore-state",
|
||||
"window-state:allow-save-window-state"
|
||||
]
|
||||
}
|
||||
24
src-tauri/src/lib.rs
Normal file
24
src-tauri/src/lib.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
pub fn run() {
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_clipboard_manager::init())
|
||||
.plugin(tauri_plugin_dialog::init())
|
||||
.plugin(tauri_plugin_fs::init())
|
||||
.plugin(tauri_plugin_http::init())
|
||||
.plugin(tauri_plugin_notification::init())
|
||||
.plugin(tauri_plugin_shell::init())
|
||||
.plugin(tauri_plugin_window_state::Builder::default().build())
|
||||
.setup(|app| {
|
||||
if cfg!(debug_assertions) {
|
||||
app.handle().plugin(
|
||||
tauri_plugin_log::Builder::default()
|
||||
.level(log::LevelFilter::Info)
|
||||
.build(),
|
||||
)?;
|
||||
app.handle().plugin(tauri_plugin_updater::Builder::new().build())?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
}
|
||||
@@ -1,9 +1,4 @@
|
||||
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||
|
||||
fn main() {
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_window_state::Builder::default().build())
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
}
|
||||
nextchat::run()
|
||||
}
|
||||
@@ -1,110 +1,30 @@
|
||||
{
|
||||
"$schema": "../node_modules/@tauri-apps/cli/schema.json",
|
||||
"productName": "NextChat",
|
||||
"mainBinaryName": "next-chat",
|
||||
"identifier": "com.yida.chatgpt.next.web",
|
||||
"version": "2.15.4",
|
||||
"build": {
|
||||
"beforeBuildCommand": "yarn export",
|
||||
"beforeDevCommand": "yarn export:dev",
|
||||
"devPath": "http://localhost:3000",
|
||||
"distDir": "../out",
|
||||
"withGlobalTauri": true
|
||||
"devUrl": "http://localhost:3000",
|
||||
"frontendDist": "../out"
|
||||
},
|
||||
"package": {
|
||||
"productName": "NextChat",
|
||||
"version": "2.15.2"
|
||||
},
|
||||
"tauri": {
|
||||
"allowlist": {
|
||||
"all": false,
|
||||
"shell": {
|
||||
"all": false,
|
||||
"open": true
|
||||
},
|
||||
"dialog": {
|
||||
"all": true,
|
||||
"ask": true,
|
||||
"confirm": true,
|
||||
"message": true,
|
||||
"open": true,
|
||||
"save": true
|
||||
},
|
||||
"clipboard": {
|
||||
"all": true,
|
||||
"writeText": true,
|
||||
"readText": true
|
||||
},
|
||||
"window": {
|
||||
"all": false,
|
||||
"close": true,
|
||||
"hide": true,
|
||||
"maximize": true,
|
||||
"minimize": true,
|
||||
"setIcon": true,
|
||||
"setIgnoreCursorEvents": true,
|
||||
"setResizable": true,
|
||||
"show": true,
|
||||
"startDragging": true,
|
||||
"unmaximize": true,
|
||||
"unminimize": true
|
||||
},
|
||||
"fs": {
|
||||
"all": true
|
||||
},
|
||||
"notification": {
|
||||
"all": true
|
||||
},
|
||||
"http": {
|
||||
"all": true,
|
||||
"request": true,
|
||||
"scope": ["https://*", "http://*"]
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"category": "DeveloperTool",
|
||||
"copyright": "2023, Zhang Yifei All Rights Reserved.",
|
||||
"deb": {
|
||||
"depends": []
|
||||
},
|
||||
"externalBin": [],
|
||||
"icon": [
|
||||
"icons/32x32.png",
|
||||
"icons/128x128.png",
|
||||
"icons/128x128@2x.png",
|
||||
"icons/icon.icns",
|
||||
"icons/icon.ico"
|
||||
],
|
||||
"identifier": "com.yida.chatgpt.next.web",
|
||||
"longDescription": "NextChat is a cross-platform ChatGPT client, including Web/Win/Linux/OSX/PWA.",
|
||||
"macOS": {
|
||||
"entitlements": null,
|
||||
"exceptionDomain": "",
|
||||
"frameworks": [],
|
||||
"providerShortName": null,
|
||||
"signingIdentity": null
|
||||
},
|
||||
"resources": [],
|
||||
"shortDescription": "NextChat App",
|
||||
"targets": "all",
|
||||
"windows": {
|
||||
"certificateThumbprint": null,
|
||||
"digestAlgorithm": "sha256",
|
||||
"timestampUrl": ""
|
||||
}
|
||||
},
|
||||
"security": {
|
||||
"csp": null,
|
||||
"dangerousUseHttpScheme": true
|
||||
},
|
||||
"plugins": {
|
||||
"updater": {
|
||||
"active": true,
|
||||
"endpoints": [
|
||||
"https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json"
|
||||
],
|
||||
"dialog": false,
|
||||
"windows": {
|
||||
"installMode": "passive"
|
||||
},
|
||||
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IERFNDE4MENFM0Y1RTZBOTQKUldTVWFsNC96b0JCM3RqM2NmMnlFTmxIaStRaEJrTHNOU2VqRVlIV1hwVURoWUdVdEc1eDcxVEYK"
|
||||
},
|
||||
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IERFNDE4MENFM0Y1RTZBOTQKUldTVWFsNC96b0JCM3RqM2NmMnlFTmxIaStRaEJrTHNOU2VqRVlIV1hwVURoWUdVdEc1eDcxVEYK",
|
||||
"endpoints": [
|
||||
"https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web/releases/latest/download/latest.json"
|
||||
]
|
||||
}
|
||||
},
|
||||
"app": {
|
||||
"withGlobalTauri": true,
|
||||
"windows": [
|
||||
{
|
||||
"fullscreen": false,
|
||||
@@ -115,6 +35,28 @@
|
||||
"hiddenTitle": true,
|
||||
"titleBarStyle": "Overlay"
|
||||
}
|
||||
]
|
||||
],
|
||||
"security": {
|
||||
"csp": null
|
||||
}
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"targets": "all",
|
||||
"category": "Utility",
|
||||
"copyright": "Copyright © 2024 GPTsMotion Tech LLC All Rights Reserved.",
|
||||
"shortDescription": "NextChat App",
|
||||
"longDescription": "Experience NextChat: Local-first, seamless, and designed for the ultimate chat experience",
|
||||
"macOS": {
|
||||
"signingIdentity": "-"
|
||||
},
|
||||
"icon": [
|
||||
"icons/32x32.png",
|
||||
"icons/128x128.png",
|
||||
"icons/128x128@2x.png",
|
||||
"icons/icon.icns",
|
||||
"icons/icon.ico"
|
||||
],
|
||||
"createUpdaterArtifacts": "v1Compatible"
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user