merge upstream

This commit is contained in:
sijinhui 2023-12-27 22:47:01 +08:00
commit 19f4ef9194
32 changed files with 1460 additions and 296 deletions

View File

@ -8,6 +8,16 @@ CODE=your-password
# You can start service behind a proxy # You can start service behind a proxy
PROXY_URL=http://localhost:7890 PROXY_URL=http://localhost:7890
# (optional)
# Default: Empty
# Googel Gemini Pro API key, set if you want to use Google Gemini Pro API.
GOOGLE_API_KEY=
# (optional)
# Default: https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent
# Googel Gemini Pro API url, set if you want to customize Google Gemini Pro API url.
GOOGLE_URL=
# Override openai api request base url. (optional) # Override openai api request base url. (optional)
# Default: https://api.openai.com # Default: https://api.openai.com
# Examples: http://your-openai-proxy.com # Examples: http://your-openai-proxy.com
@ -36,3 +46,4 @@ ENABLE_BALANCE_QUERY=
# Default: Empty # Default: Empty
# If you want to disable parse settings from url, set this value to 1. # If you want to disable parse settings from url, set this value to 1.
DISABLE_FAST_LINK= DISABLE_FAST_LINK=

View File

@ -24,6 +24,7 @@ RUN yarn install
FROM base AS builder FROM base AS builder
ENV OPENAI_API_KEY="" ENV OPENAI_API_KEY=""
ENV GOOGLE_API_KEY=""
ENV CODE="" ENV CODE=""
WORKDIR /app WORKDIR /app
@ -39,6 +40,7 @@ RUN apk add proxychains-ng
ENV PROXY_URL="" ENV PROXY_URL=""
ENV OPENAI_API_KEY="" ENV OPENAI_API_KEY=""
ENV GOOGLE_API_KEY=""
ENV CODE="" ENV CODE=""
COPY --from=builder /app/public ./public COPY --from=builder /app/public ./public
@ -52,22 +54,22 @@ EXPOSE 3000
ENV KEEP_ALIVE_TIMEOUT=30 ENV KEEP_ALIVE_TIMEOUT=30
CMD if [ -n "$PROXY_URL" ]; then \ CMD if [ -n "$PROXY_URL" ]; then \
export HOSTNAME="127.0.0.1"; \ export HOSTNAME="127.0.0.1"; \
protocol=$(echo $PROXY_URL | cut -d: -f1); \ protocol=$(echo $PROXY_URL | cut -d: -f1); \
host=$(echo $PROXY_URL | cut -d/ -f3 | cut -d: -f1); \ host=$(echo $PROXY_URL | cut -d/ -f3 | cut -d: -f1); \
port=$(echo $PROXY_URL | cut -d: -f3); \ port=$(echo $PROXY_URL | cut -d: -f3); \
conf=/etc/proxychains.conf; \ conf=/etc/proxychains.conf; \
echo "strict_chain" > $conf; \ echo "strict_chain" > $conf; \
echo "proxy_dns" >> $conf; \ echo "proxy_dns" >> $conf; \
echo "remote_dns_subnet 224" >> $conf; \ echo "remote_dns_subnet 224" >> $conf; \
echo "tcp_read_time_out 15000" >> $conf; \ echo "tcp_read_time_out 15000" >> $conf; \
echo "tcp_connect_time_out 8000" >> $conf; \ echo "tcp_connect_time_out 8000" >> $conf; \
echo "localnet 127.0.0.0/255.0.0.0" >> $conf; \ echo "localnet 127.0.0.0/255.0.0.0" >> $conf; \
echo "localnet ::1/128" >> $conf; \ echo "localnet ::1/128" >> $conf; \
echo "[ProxyList]" >> $conf; \ echo "[ProxyList]" >> $conf; \
echo "$protocol $host $port" >> $conf; \ echo "$protocol $host $port" >> $conf; \
cat /etc/proxychains.conf; \ cat /etc/proxychains.conf; \
proxychains -f $conf node server.js; \ proxychains -f $conf node server.js; \
else \ else \
node server.js; \ node server.js; \
fi fi

View File

@ -1,22 +1,22 @@
<div align="center"> <div align="center">
<img src="./docs/images/icon.svg" alt="icon"/> <img src="./docs/images/icon.svg" alt="icon"/>
<h1 align="center">ChatGPT Next Web</h1> <h1 align="center">NextChat (ChatGPT Next Web)</h1>
English / [简体中文](./README_CN.md) English / [简体中文](./README_CN.md)
One-Click to get well-designed cross-platform ChatGPT web UI. One-Click to get a well-designed cross-platform ChatGPT web UI, with GPT3, GPT4 & Gemini Pro support.
一键免费部署你的跨平台私人 ChatGPT 应用。 一键免费部署你的跨平台私人 ChatGPT 应用, 支持 GPT3, GPT4 & Gemini Pro 模型
[![Web][Web-image]][web-url] [![Web][Web-image]][web-url]
[![Windows][Windows-image]][download-url] [![Windows][Windows-image]][download-url]
[![MacOS][MacOS-image]][download-url] [![MacOS][MacOS-image]][download-url]
[![Linux][Linux-image]][download-url] [![Linux][Linux-image]][download-url]
[Web App](https://chatgpt.nextweb.fun/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Twitter](https://twitter.com/mortiest_ricky) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa) [Web App](https://app.nextchat.dev/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Twitter](https://twitter.com/mortiest_ricky) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa)
[网页版](https://chatgpt.nextweb.fun/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) [网页版](https://app.nextchat.dev/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg)
[web-url]: https://chatgpt.nextweb.fun [web-url]: https://chatgpt.nextweb.fun
[download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases [download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases
@ -25,7 +25,7 @@ One-Click to get well-designed cross-platform ChatGPT web UI.
[MacOS-image]: https://img.shields.io/badge/-MacOS-black?logo=apple [MacOS-image]: https://img.shields.io/badge/-MacOS-black?logo=apple
[Linux-image]: https://img.shields.io/badge/-Linux-333?logo=ubuntu [Linux-image]: https://img.shields.io/badge/-Linux-333?logo=ubuntu
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web) [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&env=GOOGLE_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web)
[![Deploy on Zeabur](https://zeabur.com/button.svg)](https://zeabur.com/templates/ZBUEFA) [![Deploy on Zeabur](https://zeabur.com/button.svg)](https://zeabur.com/templates/ZBUEFA)
@ -191,6 +191,14 @@ Azure Api Key.
Azure Api Version, find it at [Azure Documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions). Azure Api Version, find it at [Azure Documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions).
### `GOOGLE_API_KEY` (optional)
Google Gemini Pro Api Key.
### `GOOGLE_URL` (optional)
Google Gemini Pro Api Url.
### `HIDE_USER_API_KEY` (optional) ### `HIDE_USER_API_KEY` (optional)
> Default: Empty > Default: Empty

View File

@ -1,9 +1,9 @@
<div align="center"> <div align="center">
<img src="./docs/images/icon.svg" alt="预览"/> <img src="./docs/images/icon.svg" alt="预览"/>
<h1 align="center">ChatGPT Next Web</h1> <h1 align="center">NextChat</h1>
一键免费部署你的私人 ChatGPT 网页应用。 一键免费部署你的私人 ChatGPT 网页应用,支持 GPT3, GPT4 & Gemini Pro 模型
[演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) / [Donate](#捐赠-donate-usdt) [演示 Demo](https://chat-gpt-next-web.vercel.app/) / [反馈 Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [加入 Discord](https://discord.gg/zrhvHCr79N) / [QQ 群](https://user-images.githubusercontent.com/16968934/228190818-7dd00845-e9b9-4363-97e5-44c507ac76da.jpeg) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) / [Donate](#捐赠-donate-usdt)
@ -21,7 +21,7 @@
1. 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys); 1. 准备好你的 [OpenAI API Key](https://platform.openai.com/account/api-keys);
2. 点击右侧按钮开始部署: 2. 点击右侧按钮开始部署:
[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登录即可,记得在环境变量页填入 API Key 和[页面访问密码](#配置页面访问密码) CODE [![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FYidadaa%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&env=GOOGLE_API_KEY&project-name=chatgpt-next-web&repository-name=ChatGPT-Next-Web),直接使用 Github 账号登录即可,记得在环境变量页填入 API Key 和[页面访问密码](#配置页面访问密码) CODE
3. 部署完毕后,即可开始使用; 3. 部署完毕后,即可开始使用;
4. (可选)[绑定自定义域名](https://vercel.com/docs/concepts/projects/domains/add-a-domain)Vercel 分配的域名 DNS 在某些区域被污染了,绑定自定义域名即可直连。 4. (可选)[绑定自定义域名](https://vercel.com/docs/concepts/projects/domains/add-a-domain)Vercel 分配的域名 DNS 在某些区域被污染了,绑定自定义域名即可直连。
@ -106,6 +106,14 @@ Azure 密钥。
Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions)。 Azure Api 版本,你可以在这里找到:[Azure 文档](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions)。
### `GOOGLE_API_KEY` (optional)
Google Gemini Pro 密钥.
### `GOOGLE_URL` (optional)
Google Gemini Pro Api Url.
### `HIDE_USER_API_KEY` (可选) ### `HIDE_USER_API_KEY` (可选)
如果你不想让用户自行填入 API Key将此环境变量设置为 1 即可。 如果你不想让用户自行填入 API Key将此环境变量设置为 1 即可。

View File

@ -1,7 +1,7 @@
import { NextRequest } from "next/server"; import { NextRequest } from "next/server";
import { getServerSideConfig } from "../config/server"; import { getServerSideConfig } from "../config/server";
import md5 from "spark-md5"; import md5 from "spark-md5";
import { ACCESS_CODE_PREFIX } from "../constant"; import { ACCESS_CODE_PREFIX, ModelProvider } from "../constant";
export function getIP(req: NextRequest) { export function getIP(req: NextRequest) {
let ip = req.headers.get("x-real-ip") ?? req.ip; let ip = req.headers.get("x-real-ip") ?? req.ip;
@ -17,15 +17,19 @@ export function getIP(req: NextRequest) {
function parseApiKey(bearToken: string) { function parseApiKey(bearToken: string) {
const token = bearToken.trim().replaceAll("Bearer ", "").trim(); const token = bearToken.trim().replaceAll("Bearer ", "").trim();
const isOpenAiKey = !token.startsWith(ACCESS_CODE_PREFIX); const isApiKey = !token.startsWith(ACCESS_CODE_PREFIX);
return { return {
accessCode: isOpenAiKey ? "" : token.slice(ACCESS_CODE_PREFIX.length), accessCode: isApiKey ? "" : token.slice(ACCESS_CODE_PREFIX.length),
apiKey: isOpenAiKey ? token : "", apiKey: isApiKey ? token : "",
}; };
} }
export function auth(req: NextRequest, isAzure?: boolean) { export function auth(
req: NextRequest,
modelProvider: ModelProvider,
isAzure?: boolean,
) {
const authToken = req.headers.get("Authorization") ?? ""; const authToken = req.headers.get("Authorization") ?? "";
// check if it is openai api key or user token // check if it is openai api key or user token
@ -50,22 +54,23 @@ export function auth(req: NextRequest, isAzure?: boolean) {
if (serverConfig.hideUserApiKey && !!apiKey) { if (serverConfig.hideUserApiKey && !!apiKey) {
return { return {
error: true, error: true,
msg: "you are not allowed to access openai with your own api key", msg: "you are not allowed to access with your own api key",
}; };
} }
// if user does not provide an api key, inject system api key // if user does not provide an api key, inject system api key
if (!apiKey) { if (!apiKey) {
const serverApiKey = isAzure const serverConfig = getServerSideConfig();
? serverConfig.azureApiKey
: serverConfig.apiKey;
if (serverApiKey) { const systemApiKey =
modelProvider === ModelProvider.GeminiPro
? serverConfig.googleApiKey
: isAzure
? serverConfig.azureApiKey
: serverConfig.apiKey;
if (systemApiKey) {
console.log("[Auth] use system api key"); console.log("[Auth] use system api key");
req.headers.set( req.headers.set("Authorization", `Bearer ${systemApiKey}`);
"Authorization",
`${isAzure ? "" : "Bearer "}${serverApiKey}`,
);
} else { } else {
console.log("[Auth] admin did not provide an api key"); console.log("[Auth] admin did not provide an api key");
} }

View File

@ -1,6 +1,6 @@
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { getServerSideConfig } from "../config/server"; import { getServerSideConfig } from "../config/server";
import { DEFAULT_MODELS, OPENAI_BASE_URL } from "../constant"; import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant";
import { collectModelTable } from "../utils/model"; import { collectModelTable } from "../utils/model";
import { makeAzurePath } from "../azure"; import { makeAzurePath } from "../azure";
@ -14,8 +14,23 @@ export async function requestOpenai(
) { ) {
const controller = new AbortController(); const controller = new AbortController();
const authValue = req.headers.get("Authorization") ?? ""; var authValue,
const authHeaderName = isAzure ? "api-key" : "Authorization"; authHeaderName = "";
if (isAzure) {
authValue =
req.headers
.get("Authorization")
?.trim()
.replaceAll("Bearer ", "")
.trim() ?? "";
authHeaderName = "api-key";
} else {
authValue = req.headers.get("Authorization") ?? "";
authHeaderName = "Authorization";
}
// const authValue = req.headers.get("Authorization") ?? "";
// const authHeaderName = isAzure ? "api-key" : "Authorization";
let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll( let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
"/api/openai/", "/api/openai/",
@ -100,6 +115,12 @@ export async function requestOpenai(
// to disable nginx buffering // to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no"); newHeaders.set("X-Accel-Buffering", "no");
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new Response(res.body, { return new Response(res.body, {
status: res.status, status: res.status,
statusText: res.statusText, statusText: res.statusText,

View File

@ -0,0 +1,121 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { getServerSideConfig } from "@/app/config/server";
import { GEMINI_BASE_URL, Google, ModelProvider } from "@/app/constant";
async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
console.log("[Google Route] params ", params);
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
}
const controller = new AbortController();
const serverConfig = getServerSideConfig();
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
}
if (baseUrl.endsWith("/")) {
baseUrl = baseUrl.slice(0, -1);
}
let path = `${req.nextUrl.pathname}`.replaceAll("/api/google/", "");
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
const timeoutId = setTimeout(
() => {
controller.abort();
},
10 * 60 * 1000,
);
const authResult = auth(req, ModelProvider.GeminiPro);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
}
const bearToken = req.headers.get("Authorization") ?? "";
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
const key = token ? token : serverConfig.googleApiKey;
if (!key) {
return NextResponse.json(
{
error: true,
message: `missing GOOGLE_API_KEY in server env vars`,
},
{
status: 401,
},
);
}
const fetchUrl = `${baseUrl}/${path}?key=${key}`;
const fetchOptions: RequestInit = {
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-store",
},
method: req.method,
body: req.body,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
};
try {
const res = await fetch(fetchUrl, fetchOptions);
// to prevent browser prompt for credentials
const newHeaders = new Headers(res.headers);
newHeaders.delete("www-authenticate");
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
return new Response(res.body, {
status: res.status,
statusText: res.statusText,
headers: newHeaders,
});
} finally {
clearTimeout(timeoutId);
}
}
export const GET = handle;
export const POST = handle;
export const runtime = "edge";
export const preferredRegion = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];

View File

@ -1,6 +1,11 @@
import { type OpenAIListModelResponse } from "@/app/client/platforms/openai"; import { type OpenAIListModelResponse } from "@/app/client/platforms/openai";
import { getServerSideConfig } from "@/app/config/server"; import { getServerSideConfig } from "@/app/config/server";
import { OpenaiPath, AZURE_PATH, AZURE_MODELS } from "@/app/constant"; import {
ModelProvider,
OpenaiPath,
AZURE_PATH,
AZURE_MODELS,
} from "@/app/constant";
import { prettyObject } from "@/app/utils/format"; import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { auth, getIP } from "../../auth"; import { auth, getIP } from "../../auth";
@ -54,6 +59,13 @@ async function handle(
}, },
); );
} }
// const authResult = auth(req, ModelProvider.GPT);
// if (authResult.error) {
// return NextResponse.json(authResult, {
// status: 401,
// });
// }
let cloneBody, jsonBody; let cloneBody, jsonBody;
try { try {
@ -96,7 +108,7 @@ async function handle(
const isAzure = AZURE_MODELS.includes(jsonBody?.model as string); const isAzure = AZURE_MODELS.includes(jsonBody?.model as string);
// console.log("[Models]", jsonBody?.model); // console.log("[Models]", jsonBody?.model);
const authResult = auth(req, isAzure); const authResult = auth(req, ModelProvider.GPT, isAzure);
// if (authResult.error) { // if (authResult.error) {
// return NextResponse.json(authResult, { // return NextResponse.json(authResult, {
// status: 401, // status: 401,

View File

@ -3,11 +3,12 @@ import {
ACCESS_CODE_PREFIX, ACCESS_CODE_PREFIX,
Azure, Azure,
AZURE_MODELS, AZURE_MODELS,
ModelProvider,
ServiceProvider, ServiceProvider,
} from "../constant"; } from "../constant";
import { ChatMessage, ModelType, useAccessStore } from "../store"; import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
import { ChatGPTApi } from "./platforms/openai"; import { ChatGPTApi } from "./platforms/openai";
import { GeminiProApi } from "./platforms/google";
export const ROLES = ["system", "user", "assistant"] as const; export const ROLES = ["system", "user", "assistant"] as const;
export type MessageRole = (typeof ROLES)[number]; export type MessageRole = (typeof ROLES)[number];
@ -16,6 +17,7 @@ export const Models = [
"gpt-4-0613", "gpt-4-0613",
"gpt-4-32k", "gpt-4-32k",
"midjourney", "midjourney",
"emini-pro",
] as const; ] as const;
export type ChatModel = ModelType; export type ChatModel = ModelType;
@ -52,6 +54,13 @@ export interface LLMModel {
name: string; name: string;
describe: string; describe: string;
available: boolean; available: boolean;
provider: LLMModelProvider;
}
export interface LLMModelProvider {
id: string;
providerName: string;
providerType: string;
} }
export abstract class LLMApi { export abstract class LLMApi {
@ -84,7 +93,11 @@ interface ChatProvider {
export class ClientApi { export class ClientApi {
public llm: LLMApi; public llm: LLMApi;
constructor() { constructor(provider: ModelProvider = ModelProvider.GPT) {
if (provider === ModelProvider.GeminiPro) {
this.llm = new GeminiProApi();
return;
}
this.llm = new ChatGPTApi(); this.llm = new ChatGPTApi();
} }
@ -104,7 +117,7 @@ export class ClientApi {
{ {
from: "human", from: "human",
value: value:
"Share from [ChatGPT Next Web]: https://github.com/Yidadaa/ChatGPT-Next-Web", "Share from [NextChat]: https://github.com/Yidadaa/ChatGPT-Next-Web",
}, },
]); ]);
// 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用 // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用
@ -134,19 +147,22 @@ export class ClientApi {
} }
} }
export const api = new ClientApi();
export function getHeaders(isAzure?: boolean) { export function getHeaders(isAzure?: boolean) {
const accessStore = useAccessStore.getState(); const accessStore = useAccessStore.getState();
const headers: Record<string, string> = { const headers: Record<string, string> = {
"Content-Type": "application/json", "Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest", "x-requested-with": "XMLHttpRequest",
Accept: "application/json",
}; };
// const isAzure = AZURE_MODELS.includes(jsonBody?.model as string) const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini-pro";
// const isAzure = accessStore.provider === ServiceProvider.Azure; // const isAzure = accessStore.provider === ServiceProvider.Azure;
const authHeader = isAzure ? "api-key" : "Authorization"; const authHeader = isAzure ? "api-key" : "Authorization";
const apiKey = isAzure ? accessStore.azureApiKey : accessStore.openaiApiKey; const apiKey = isGoogle
? accessStore.googleApiKey
: isAzure
? accessStore.azureApiKey
: accessStore.openaiApiKey;
const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`; const makeBearer = (s: string) => `${isAzure ? "" : "Bearer "}${s.trim()}`;
const validString = (x: string) => x && x.length > 0; const validString = (x: string) => x && x.length > 0;

View File

@ -0,0 +1,222 @@
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import Locale from "../../locales";
import { getServerSideConfig } from "@/app/config/server";
export class GeminiProApi implements LLMApi {
extractMessage(res: any) {
console.log("[Response] gemini-pro response: ", res);
return (
res?.candidates?.at(0)?.content?.parts.at(0)?.text ||
res?.error?.message ||
""
);
}
async chat(options: ChatOptions): Promise<void> {
const messages = options.messages.map((v) => ({
role: v.role.replace("assistant", "model").replace("system", "user"),
parts: [{ text: v.content }],
}));
// google requires that role in neighboring messages must not be the same
for (let i = 0; i < messages.length - 1; ) {
// Check if current and next item both have the role "model"
if (messages[i].role === messages[i + 1].role) {
// Concatenate the 'parts' of the current and next item
messages[i].parts = messages[i].parts.concat(messages[i + 1].parts);
// Remove the next item
messages.splice(i + 1, 1);
} else {
// Move to the next item
i++;
}
}
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
...{
model: options.config.model,
},
};
const requestPayload = {
contents: messages,
generationConfig: {
// stopSequences: [
// "Title"
// ],
temperature: modelConfig.temperature,
maxOutputTokens: modelConfig.max_tokens,
topP: modelConfig.top_p,
// "topK": modelConfig.top_k,
},
};
console.log("[Request] google payload: ", requestPayload);
// todo: support stream later
const shouldStream = false;
const controller = new AbortController();
options.onController?.(controller);
try {
const chatPath = this.path(Google.ChatPath);
const chatPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: getHeaders(),
};
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
if (shouldStream) {
let responseText = "";
let remainText = "";
let finished = false;
// animate response to make it looks smooth
function animateResponseText() {
if (finished || controller.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
options.onUpdate?.(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
}
// start animaion
animateResponseText();
const finish = () => {
if (!finished) {
finished = true;
options.onFinish(responseText + remainText);
}
};
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
console.log(
"[OpenAI] request response content type: ",
contentType,
);
if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text) as {
choices: Array<{
delta: {
content: string;
};
}>;
};
const delta = json.choices[0]?.delta?.content;
if (delta) {
remainText += delta;
}
} catch (e) {
console.error("[Request] parse error", text);
}
},
onclose() {
finish();
},
onerror(e) {
options.onError?.(e);
throw e;
},
openWhenHidden: true,
});
} else {
const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
if (resJson?.promptFeedback?.blockReason) {
// being blocked
options.onError?.(
new Error(
"Message is being blocked for reason: " +
resJson.promptFeedback.blockReason,
),
);
}
const message = this.extractMessage(resJson);
options.onFinish(message);
}
} catch (e) {
console.log("[Request] failed to make a chat request", e);
options.onError?.(e as Error);
}
}
usage(): Promise<LLMUsage> {
throw new Error("Method not implemented.");
}
async models(): Promise<LLMModel[]> {
return [];
}
path(path: string): string {
return "/api/google/" + path;
}
}

View File

@ -333,6 +333,11 @@ export class ChatGPTApi implements LLMApi {
return chatModels.map((m) => ({ return chatModels.map((m) => ({
name: m.id, name: m.id,
available: true, available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
describe: "", describe: "",
})); }));
} }

View File

@ -64,6 +64,17 @@ export function AuthPage() {
); );
}} }}
/> />
<input
className={styles["auth-input"]}
type="password"
placeholder={Locale.Settings.Access.Google.ApiKey.Placeholder}
value={accessStore.googleApiKey}
onChange={(e) => {
accessStore.update(
(access) => (access.googleApiKey = e.currentTarget.value),
);
}}
/>
</> </>
) : null} ) : null}

View File

@ -10,7 +10,10 @@ import BotIcon from "../icons/bot.svg";
import BlackBotIcon from "../icons/black-bot.svg"; import BlackBotIcon from "../icons/black-bot.svg";
export function getEmojiUrl(unified: string, style: EmojiStyle) { export function getEmojiUrl(unified: string, style: EmojiStyle) {
return `https://cdn.staticfile.org/emoji-datasource-apple/14.0.0/img/${style}/64/${unified}.png`; // Whoever owns this Content Delivery Network (CDN), I am using your CDN to serve emojis
// Old CDN broken, so I had to switch to this one
// Author: https://github.com/H0llyW00dzZ
return `https://cdn.jsdelivr.net/npm/emoji-datasource-apple/img/${style}/64/${unified}.png`;
} }
export function AvatarPicker(props: { export function AvatarPicker(props: {

View File

@ -29,10 +29,11 @@ import NextImage from "next/image";
import { toBlob, toPng } from "html-to-image"; import { toBlob, toPng } from "html-to-image";
import { DEFAULT_MASK_AVATAR } from "../store/mask"; import { DEFAULT_MASK_AVATAR } from "../store/mask";
import { api } from "../client/api";
import { prettyObject } from "../utils/format"; import { prettyObject } from "../utils/format";
import { EXPORT_MESSAGE_CLASS_NAME } from "../constant"; import { EXPORT_MESSAGE_CLASS_NAME, ModelProvider } from "../constant";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { ClientApi } from "../client/api";
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, { const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />, loading: () => <LoadingIcon />,
@ -301,10 +302,17 @@ export function PreviewActions(props: {
}) { }) {
const [loading, setLoading] = useState(false); const [loading, setLoading] = useState(false);
const [shouldExport, setShouldExport] = useState(false); const [shouldExport, setShouldExport] = useState(false);
const config = useAppConfig();
const onRenderMsgs = (msgs: ChatMessage[]) => { const onRenderMsgs = (msgs: ChatMessage[]) => {
setShouldExport(false); setShouldExport(false);
var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
api api
.share(msgs) .share(msgs)
.then((res) => { .then((res) => {
@ -530,7 +538,7 @@ export function ImagePreviewer(props: {
</div> </div>
<div> <div>
<div className={styles["main-title"]}>ChatGPT Next Web</div> <div className={styles["main-title"]}>NextChat</div>
<div className={styles["sub-title"]}> <div className={styles["sub-title"]}>
github.com/Yidadaa/ChatGPT-Next-Web github.com/Yidadaa/ChatGPT-Next-Web
</div> </div>

View File

@ -12,7 +12,7 @@ import LoadingIcon from "../icons/three-dots.svg";
import { getCSSVar, useMobileScreen } from "../utils"; import { getCSSVar, useMobileScreen } from "../utils";
import dynamic from "next/dynamic"; import dynamic from "next/dynamic";
import { Path, SlotID } from "../constant"; import { ModelProvider, Path, SlotID } from "../constant";
import { ErrorBoundary } from "./error"; import { ErrorBoundary } from "./error";
import { getISOLang, getLang } from "../locales"; import { getISOLang, getLang } from "../locales";
@ -27,7 +27,7 @@ import { SideBar } from "./sidebar";
import { useAppConfig } from "@/app/store"; import { useAppConfig } from "@/app/store";
import { AuthPage } from "./auth"; import { AuthPage } from "./auth";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { api } from "../client/api"; import { ClientApi } from "../client/api";
import { useAccessStore } from "../store"; import { useAccessStore } from "../store";
export function Loading(props: { noLogo?: boolean }) { export function Loading(props: { noLogo?: boolean }) {
@ -173,6 +173,12 @@ function Screen() {
export function useLoadData() { export function useLoadData() {
const config = useAppConfig(); const config = useAppConfig();
var api: ClientApi;
if (config.modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
useEffect(() => { useEffect(() => {
(async () => { (async () => {
const models = await api.llm.models(); const models = await api.llm.models();

View File

@ -29,7 +29,7 @@ export function ModelConfigList(props: {
.filter((v) => v.available) .filter((v) => v.available)
.map((v, i) => ( .map((v, i) => (
<option value={v.name} key={i}> <option value={v.name} key={i}>
{v.displayName} {v.displayName}({v.provider?.providerName})
</option> </option>
))} ))}
</Select> </Select>
@ -91,79 +91,84 @@ export function ModelConfigList(props: {
} }
></input> ></input>
</ListItem> </ListItem>
<ListItem
title={Locale.Settings.PresencePenalty.Title}
subTitle={Locale.Settings.PresencePenalty.SubTitle}
>
<InputRange
value={props.modelConfig.presence_penalty?.toFixed(1)}
min="-2"
max="2"
step="0.1"
onChange={(e) => {
props.updateConfig(
(config) =>
(config.presence_penalty =
ModalConfigValidator.presence_penalty(
e.currentTarget.valueAsNumber,
)),
);
}}
></InputRange>
</ListItem>
<ListItem {props.modelConfig.model === "gemini-pro" ? null : (
title={Locale.Settings.FrequencyPenalty.Title} <>
subTitle={Locale.Settings.FrequencyPenalty.SubTitle} <ListItem
> title={Locale.Settings.PresencePenalty.Title}
<InputRange subTitle={Locale.Settings.PresencePenalty.SubTitle}
value={props.modelConfig.frequency_penalty?.toFixed(1)} >
min="-2" <InputRange
max="2" value={props.modelConfig.presence_penalty?.toFixed(1)}
step="0.1" min="-2"
onChange={(e) => { max="2"
props.updateConfig( step="0.1"
(config) => onChange={(e) => {
(config.frequency_penalty = props.updateConfig(
ModalConfigValidator.frequency_penalty( (config) =>
e.currentTarget.valueAsNumber, (config.presence_penalty =
)), ModalConfigValidator.presence_penalty(
); e.currentTarget.valueAsNumber,
}} )),
></InputRange> );
</ListItem> }}
></InputRange>
</ListItem>
<ListItem <ListItem
title={Locale.Settings.InjectSystemPrompts.Title} title={Locale.Settings.FrequencyPenalty.Title}
subTitle={Locale.Settings.InjectSystemPrompts.SubTitle} subTitle={Locale.Settings.FrequencyPenalty.SubTitle}
> >
<input <InputRange
type="checkbox" value={props.modelConfig.frequency_penalty?.toFixed(1)}
checked={props.modelConfig.enableInjectSystemPrompts} min="-2"
onChange={(e) => max="2"
props.updateConfig( step="0.1"
(config) => onChange={(e) => {
(config.enableInjectSystemPrompts = e.currentTarget.checked), props.updateConfig(
) (config) =>
} (config.frequency_penalty =
></input> ModalConfigValidator.frequency_penalty(
</ListItem> e.currentTarget.valueAsNumber,
)),
);
}}
></InputRange>
</ListItem>
<ListItem <ListItem
title={Locale.Settings.InputTemplate.Title} title={Locale.Settings.InjectSystemPrompts.Title}
subTitle={Locale.Settings.InputTemplate.SubTitle} subTitle={Locale.Settings.InjectSystemPrompts.SubTitle}
> >
<input <input
type="text" type="checkbox"
value={props.modelConfig.template} checked={props.modelConfig.enableInjectSystemPrompts}
onChange={(e) => onChange={(e) =>
props.updateConfig( props.updateConfig(
(config) => (config.template = e.currentTarget.value), (config) =>
) (config.enableInjectSystemPrompts =
} e.currentTarget.checked),
></input> )
</ListItem> }
></input>
</ListItem>
<ListItem
title={Locale.Settings.InputTemplate.Title}
subTitle={Locale.Settings.InputTemplate.SubTitle}
>
<input
type="text"
value={props.modelConfig.template}
onChange={(e) =>
props.updateConfig(
(config) => (config.template = e.currentTarget.value),
)
}
></input>
</ListItem>
</>
)}
<ListItem <ListItem
title={Locale.Settings.HistoryCount.Title} title={Locale.Settings.HistoryCount.Title}
subTitle={Locale.Settings.HistoryCount.SubTitle} subTitle={Locale.Settings.HistoryCount.SubTitle}

View File

@ -52,6 +52,7 @@ import { copyToClipboard } from "../utils";
import Link from "next/link"; import Link from "next/link";
import { import {
Azure, Azure,
Google,
OPENAI_BASE_URL, OPENAI_BASE_URL,
Path, Path,
RELEASE_URL, RELEASE_URL,
@ -583,6 +584,7 @@ export function Settings() {
const accessStore = useAccessStore(); const accessStore = useAccessStore();
const shouldHideBalanceQuery = useMemo(() => { const shouldHideBalanceQuery = useMemo(() => {
const isOpenAiUrl = accessStore.openaiUrl.includes(OPENAI_BASE_URL); const isOpenAiUrl = accessStore.openaiUrl.includes(OPENAI_BASE_URL);
return ( return (
accessStore.hideBalanceQuery || accessStore.hideBalanceQuery ||
isOpenAiUrl || isOpenAiUrl ||
@ -959,109 +961,168 @@ export function Settings() {
{/* </Select>*/} {/* </Select>*/}
{/* </ListItem>*/} {/* </ListItem>*/}
{/* {accessStore.provider === "OpenAI" ? (*/} {/* {accessStore.provider === "OpenAI" ? (*/}
{/* <>*/} {/* <>*/}
{/* <ListItem*/} {/* <ListItem*/}
{/* title={Locale.Settings.Access.OpenAI.Endpoint.Title}*/} {/* title={Locale.Settings.Access.OpenAI.Endpoint.Title}*/}
{/* subTitle={*/} {/* subTitle={*/}
{/* Locale.Settings.Access.OpenAI.Endpoint.SubTitle*/} {/* Locale.Settings.Access.OpenAI.Endpoint.SubTitle*/}
{/* }*/}
{/* >*/}
{/* <input*/}
{/* type="text"*/}
{/* value={accessStore.openaiUrl}*/}
{/* placeholder={OPENAI_BASE_URL}*/}
{/* onChange={(e) =>*/}
{/* accessStore.update(*/}
{/* (access) =>*/}
{/* (access.openaiUrl = e.currentTarget.value),*/}
{/* )*/}
{/* }*/} {/* }*/}
{/* >*/} {/* ></input>*/}
{/* <input*/} {/* </ListItem>*/}
{/* type="text"*/} {/* <ListItem*/}
{/* value={accessStore.openaiUrl}*/} {/* title={Locale.Settings.Access.OpenAI.ApiKey.Title}*/}
{/* placeholder={OPENAI_BASE_URL}*/} {/* subTitle={Locale.Settings.Access.OpenAI.ApiKey.SubTitle}*/}
{/* onChange={(e) =>*/} {/* >*/}
{/* accessStore.update(*/} {/* <PasswordInput*/}
{/* (access) =>*/} {/* value={accessStore.openaiApiKey}*/}
{/* (access.openaiUrl = e.currentTarget.value),*/} {/* type="text"*/}
{/* )*/} {/* placeholder={*/}
{/* }*/} {/* Locale.Settings.Access.OpenAI.ApiKey.Placeholder*/}
{/* ></input>*/}
{/* </ListItem>*/}
{/* <ListItem*/}
{/* title={Locale.Settings.Access.OpenAI.ApiKey.Title}*/}
{/* subTitle={Locale.Settings.Access.OpenAI.ApiKey.SubTitle}*/}
{/* >*/}
{/* <PasswordInput*/}
{/* value={accessStore.openaiApiKey}*/}
{/* type="text"*/}
{/* placeholder={*/}
{/* Locale.Settings.Access.OpenAI.ApiKey.Placeholder*/}
{/* }*/}
{/* onChange={(e) => {*/}
{/* accessStore.update(*/}
{/* (access) =>*/}
{/* (access.openaiApiKey = e.currentTarget.value),*/}
{/* );*/}
{/* }}*/}
{/* />*/}
{/* </ListItem>*/}
{/* </>*/}
{/* ) : (*/}
{/* <>*/}
{/* <ListItem*/}
{/* title={Locale.Settings.Access.Azure.Endpoint.Title}*/}
{/* subTitle={*/}
{/* Locale.Settings.Access.Azure.Endpoint.SubTitle +*/}
{/* Azure.ExampleEndpoint*/}
{/* }*/} {/* }*/}
{/* >*/} {/* onChange={(e) => {*/}
{/* <input*/} {/* accessStore.update(*/}
{/* type="text"*/} {/* (access) =>*/}
{/* value={accessStore.azureUrl}*/} {/* (access.openaiApiKey = e.currentTarget.value),*/}
{/* placeholder={Azure.ExampleEndpoint}*/} {/* );*/}
{/* onChange={(e) =>*/} {/* }}*/}
{/* accessStore.update(*/} {/* />*/}
{/* (access) =>*/} {/* </ListItem>*/}
{/* (access.azureUrl = e.currentTarget.value),*/} {/* </>*/}
{/* )*/} {/* ) : accessStore.provider === "Azure" ? (*/}
{/* }*/} {/* <>*/}
{/* ></input>*/} {/* <ListItem*/}
{/* </ListItem>*/} {/* title={Locale.Settings.Access.Azure.Endpoint.Title}*/}
{/* <ListItem*/} {/* subTitle={*/}
{/* title={Locale.Settings.Access.Azure.ApiKey.Title}*/} {/* Locale.Settings.Access.Azure.Endpoint.SubTitle +*/}
{/* subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle}*/} {/* Azure.ExampleEndpoint*/}
{/* >*/} {/* }*/}
{/* <PasswordInput*/} {/* >*/}
{/* value={accessStore.azureApiKey}*/} {/* <input*/}
{/* type="text"*/} {/* type="text"*/}
{/* placeholder={*/} {/* value={accessStore.azureUrl}*/}
{/* Locale.Settings.Access.Azure.ApiKey.Placeholder*/} {/* placeholder={Azure.ExampleEndpoint}*/}
{/* }*/} {/* onChange={(e) =>*/}
{/* onChange={(e) => {*/} {/* accessStore.update(*/}
{/* accessStore.update(*/} {/* (access) =>*/}
{/* (access) =>*/} {/* (access.azureUrl = e.currentTarget.value),*/}
{/* (access.azureApiKey = e.currentTarget.value),*/} {/* )*/}
{/* );*/}
{/* }}*/}
{/* />*/}
{/* </ListItem>*/}
{/* <ListItem*/}
{/* title={Locale.Settings.Access.Azure.ApiVerion.Title}*/}
{/* subTitle={*/}
{/* Locale.Settings.Access.Azure.ApiVerion.SubTitle*/}
{/* }*/} {/* }*/}
{/* >*/} {/* ></input>*/}
{/* <input*/} {/* </ListItem>*/}
{/* type="text"*/} {/* <ListItem*/}
{/* value={accessStore.azureApiVersion}*/} {/* title={Locale.Settings.Access.Azure.ApiKey.Title}*/}
{/* placeholder="2023-08-01-preview"*/} {/* subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle}*/}
{/* onChange={(e) =>*/} {/* >*/}
{/* accessStore.update(*/} {/* <PasswordInput*/}
{/* (access) =>*/} {/* value={accessStore.azureApiKey}*/}
{/* (access.azureApiVersion =*/} {/* type="text"*/}
{/* e.currentTarget.value),*/} {/* placeholder={*/}
{/* )*/} {/* Locale.Settings.Access.Azure.ApiKey.Placeholder*/}
{/* }*/} {/* }*/}
{/* ></input>*/} {/* onChange={(e) => {*/}
{/* </ListItem>*/} {/* accessStore.update(*/}
{/* </>*/} {/* (access) =>*/}
{/* )}*/} {/* (access.azureApiKey = e.currentTarget.value),*/}
{/* </>*/} {/* );*/}
{/* )}*/} {/* }}*/}
{/* </>*/} {/* />*/}
{/* )}*/} {/* </ListItem>*/}
{/* <ListItem*/}
{/* title={Locale.Settings.Access.Azure.ApiVerion.Title}*/}
{/* subTitle={*/}
{/* Locale.Settings.Access.Azure.ApiVerion.SubTitle*/}
{/* }*/}
{/* >*/}
{/* <input*/}
{/* type="text"*/}
{/* value={accessStore.azureApiVersion}*/}
{/* placeholder="2023-08-01-preview"*/}
{/* onChange={(e) =>*/}
{/* accessStore.update(*/}
{/* (access) =>*/}
{/* (access.azureApiVersion =*/}
{/* e.currentTarget.value),*/}
{/* )*/}
{/* }*/}
{/* ></input>*/}
{/* </ListItem>*/}
{/* </>*/}
{/* ) : accessStore.provider === "Google" ? (*/}
{/* <>*/}
{/* <ListItem*/}
{/* title={Locale.Settings.Access.Google.Endpoint.Title}*/}
{/* subTitle={*/}
{/* Locale.Settings.Access.Google.Endpoint.SubTitle +*/}
{/* Google.ExampleEndpoint*/}
{/* }*/}
{/* >*/}
{/* <input*/}
{/* type="text"*/}
{/* value={accessStore.googleUrl}*/}
{/* placeholder={Google.ExampleEndpoint}*/}
{/* onChange={(e) =>*/}
{/* accessStore.update(*/}
{/* (access) =>*/}
{/* (access.googleUrl = e.currentTarget.value),*/}
{/* )*/}
{/* }*/}
{/* ></input>*/}
{/* </ListItem>*/}
{/* <ListItem*/}
{/* title={Locale.Settings.Access.Azure.ApiKey.Title}*/}
{/* subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle}*/}
{/* >*/}
{/* <PasswordInput*/}
{/* value={accessStore.googleApiKey}*/}
{/* type="text"*/}
{/* placeholder={*/}
{/* Locale.Settings.Access.Google.ApiKey.Placeholder*/}
{/* }*/}
{/* onChange={(e) => {*/}
{/* accessStore.update(*/}
{/* (access) =>*/}
{/* (access.googleApiKey = e.currentTarget.value),*/}
{/* );*/}
{/* }}*/}
{/* />*/}
{/* </ListItem>*/}
{/* <ListItem*/}
{/* title={Locale.Settings.Access.Google.ApiVerion.Title}*/}
{/* subTitle={*/}
{/* Locale.Settings.Access.Google.ApiVerion.SubTitle*/}
{/* }*/}
{/* >*/}
{/* <input*/}
{/* type="text"*/}
{/* value={accessStore.googleApiVersion}*/}
{/* placeholder="2023-08-01-preview"*/}
{/* onChange={(e) =>*/}
{/* accessStore.update(*/}
{/* (access) =>*/}
{/* (access.googleApiVersion =*/}
{/* e.currentTarget.value),*/}
{/* )*/}
{/* }*/}
{/* ></input>*/}
{/* </ListItem>*/}
{/* </>*/}
{/* ) : null}*/}
{/* </>*/}
{/* )}*/}
{/* </>*/}
{/*)}*/}
{/*{!shouldHideBalanceQuery && !clientConfig?.isApp ? (*/} {/*{!shouldHideBalanceQuery && !clientConfig?.isApp ? (*/}
{/* <ListItem*/} {/* <ListItem*/}

View File

@ -27,6 +27,10 @@ declare global {
AZURE_URL?: string; // https://{azure-url}/openai/deployments/{deploy-name} AZURE_URL?: string; // https://{azure-url}/openai/deployments/{deploy-name}
AZURE_API_KEY?: string; AZURE_API_KEY?: string;
AZURE_API_VERSION?: string; AZURE_API_VERSION?: string;
// google only
GOOGLE_API_KEY?: string;
GOOGLE_URL?: string;
} }
} }
} }
@ -61,6 +65,8 @@ export const getServerSideConfig = () => {
.join(","); .join(",");
} }
// const isAzure = !!process.env.AZURE_URL;
const isGoogle = !!process.env.GOOGLE_API_KEY;
// 需要一个函数来判断请求中模型是否为微软的。 // 需要一个函数来判断请求中模型是否为微软的。
// 当前逻辑gpt-4-32k模型为微软别的不是 // 当前逻辑gpt-4-32k模型为微软别的不是
// const isAzure = !!process.env.AZURE_URL; // const isAzure = !!process.env.AZURE_URL;
@ -85,6 +91,10 @@ export const getServerSideConfig = () => {
azureApiKey: process.env.AZURE_API_KEY ?? "", azureApiKey: process.env.AZURE_API_KEY ?? "",
azureApiVersion: process.env.AZURE_API_VERSION ?? "", azureApiVersion: process.env.AZURE_API_VERSION ?? "",
isGoogle,
googleApiKey: process.env.GOOGLE_API_KEY,
googleUrl: process.env.GOOGLE_URL,
needCode: ACCESS_CODES.size > 0, needCode: ACCESS_CODES.size > 0,
code: process.env.CODE, code: process.env.CODE,
codes: ACCESS_CODES, codes: ACCESS_CODES,

View File

@ -12,6 +12,8 @@ export const DEFAULT_CORS_HOST = "https://a.nextweb.fun";
export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`; export const DEFAULT_API_HOST = `${DEFAULT_CORS_HOST}/api/proxy`;
export const OPENAI_BASE_URL = "https://api.openai.com"; export const OPENAI_BASE_URL = "https://api.openai.com";
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
export enum Path { export enum Path {
Home = "/", Home = "/",
Chat = "/chat", Chat = "/chat",
@ -66,6 +68,12 @@ export const EXPORT_MESSAGE_CLASS_NAME = "export-markdown";
export enum ServiceProvider { export enum ServiceProvider {
OpenAI = "OpenAI", OpenAI = "OpenAI",
Azure = "Azure", Azure = "Azure",
Google = "Google",
}
export enum ModelProvider {
GPT = "GPT",
GeminiPro = "GeminiPro",
} }
export const OpenaiPath = { export const OpenaiPath = {
@ -82,6 +90,14 @@ export const Azure = {
ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}", ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}",
}; };
export const Google = {
ExampleEndpoint:
"https://generativelanguage.googleapis.com/v1beta/models/gemini-pro:generateContent",
ChatPath: "v1beta/models/gemini-pro:generateContent",
// /api/openai/v1/chat/completions
};
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
export const DEFAULT_SYSTEM_TEMPLATE = ` export const DEFAULT_SYSTEM_TEMPLATE = `
You are ChatGPT, a large language model trained by OpenAI. You are ChatGPT, a large language model trained by OpenAI.
@ -114,6 +130,11 @@ export const DEFAULT_MODELS = [
name: "gpt-3.5-turbo-1106", name: "gpt-3.5-turbo-1106",
describe: "GPT-3,最快,笨,最便宜", describe: "GPT-3,最快,笨,最便宜",
available: true, available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
}, },
{ {
name: "gpt-4", name: "gpt-4",
@ -124,6 +145,20 @@ export const DEFAULT_MODELS = [
name: "gpt-4-1106-preview", name: "gpt-4-1106-preview",
describe: "GPT-4,又强又快,推荐", describe: "GPT-4,又强又快,推荐",
available: true, available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gemini-pro",
available: true,
provider: {
id: "google",
providerName: "Google",
providerType: "google",
},
}, },
{ {
name: "gpt-4-32k", name: "gpt-4-32k",

View File

@ -15,7 +15,7 @@ const cn = {
Auth: { Auth: {
Title: "需要密码", Title: "需要密码",
Tips: "管理员开启了密码验证,请在下方填入访问码", Tips: "管理员开启了密码验证,请在下方填入访问码",
SubTips: "或者输入你的 OpenAI API 密钥", SubTips: "或者输入你的 OpenAI 或 Google API 密钥",
Input: "在此处填写访问码", Input: "在此处填写访问码",
Confirm: "确认", Confirm: "确认",
Later: "稍后再说", Later: "稍后再说",
@ -353,6 +353,23 @@ const cn = {
SubTitle: "选择指定的部分版本", SubTitle: "选择指定的部分版本",
}, },
}, },
Google: {
ApiKey: {
Title: "接口密钥",
SubTitle: "使用自定义 Google AI Studio API Key 绕过密码访问限制",
Placeholder: "Google AI Studio API Key",
},
Endpoint: {
Title: "接口地址",
SubTitle: "样例:",
},
ApiVerion: {
Title: "接口版本 (gemini-pro api version)",
SubTitle: "选择指定的部分版本",
},
},
CustomModel: { CustomModel: {
Title: "自定义模型名", Title: "自定义模型名",
SubTitle: "增加自定义模型可选项,使用英文逗号隔开", SubTitle: "增加自定义模型可选项,使用英文逗号隔开",
@ -389,7 +406,7 @@ const cn = {
Prompt: { Prompt: {
History: (content: string) => "这是历史聊天总结作为前情提要:" + content, History: (content: string) => "这是历史聊天总结作为前情提要:" + content,
Topic: Topic:
"使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,如果没有主题,请直接返回“闲聊”", "使用四到五个字直接返回这句话的简要主题,不要解释、不要标点、不要语气词、不要多余文本,不要加粗,如果没有主题,请直接返回“闲聊”",
Summarize: Summarize:
"简要总结一下对话内容,用作后续的上下文提示 prompt控制在 200 字以内", "简要总结一下对话内容,用作后续的上下文提示 prompt控制在 200 字以内",
}, },

View File

@ -17,7 +17,7 @@ const en: LocaleType = {
Auth: { Auth: {
Title: "Need Access Code", Title: "Need Access Code",
Tips: "Please enter access code below", Tips: "Please enter access code below",
SubTips: "Or enter your OpenAI API Key", SubTips: "Or enter your OpenAI or Google API Key",
Input: "access code", Input: "access code",
Confirm: "Confirm", Confirm: "Confirm",
Later: "Later", Later: "Later",
@ -360,6 +360,24 @@ const en: LocaleType = {
Title: "Custom Models", Title: "Custom Models",
SubTitle: "Custom model options, seperated by comma", SubTitle: "Custom model options, seperated by comma",
}, },
Google: {
ApiKey: {
Title: "API Key",
SubTitle:
"Bypass password access restrictions using a custom Google AI Studio API Key",
Placeholder: "Google AI Studio API Key",
},
Endpoint: {
Title: "Endpoint Address",
SubTitle: "Example:",
},
ApiVerion: {
Title: "API Version (gemini-pro api version)",
SubTitle: "Select a specific part version",
},
},
}, },
Model: "Model", Model: "Model",
@ -395,7 +413,7 @@ const en: LocaleType = {
History: (content: string) => History: (content: string) =>
"This is a summary of the chat history as a recap: " + content, "This is a summary of the chat history as a recap: " + content,
Topic: Topic:
"Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, or additional text. Remove enclosing quotation marks.", "Please generate a four to five word title summarizing our conversation without any lead-in, punctuation, quotation marks, periods, symbols, bold text, or additional text. Remove enclosing quotation marks.",
Summarize: Summarize:
"Summarize the discussion briefly in 200 words or less to use as a prompt for future context.", "Summarize the discussion briefly in 200 words or less to use as a prompt for future context.",
}, },

View File

@ -29,6 +29,11 @@ const DEFAULT_ACCESS_STATE = {
azureApiKey: "", azureApiKey: "",
azureApiVersion: "2023-05-15", azureApiVersion: "2023-05-15",
// google ai studio
googleUrl: "",
googleApiKey: "",
googleApiVersion: "v1",
// server config // server config
needCode: true, needCode: true,
hideUserApiKey: false, hideUserApiKey: false,
@ -59,6 +64,10 @@ export const useAccessStore = createPersistStore(
// return ensure(get(), ["azureUrl", "azureApiKey", "azureApiVersion"]); // return ensure(get(), ["azureUrl", "azureApiKey", "azureApiVersion"]);
}, },
isValidGoogle() {
return ensure(get(), ["googleApiKey"]);
},
isAuthorized() { isAuthorized() {
this.fetch(); this.fetch();
@ -66,6 +75,7 @@ export const useAccessStore = createPersistStore(
return ( return (
this.isValidOpenAI() || this.isValidOpenAI() ||
this.isValidAzure() || this.isValidAzure() ||
this.isValidGoogle() ||
!this.enabledAccessControl() || !this.enabledAccessControl() ||
(this.enabledAccessControl() && ensure(get(), ["accessCode"])) (this.enabledAccessControl() && ensure(get(), ["accessCode"]))
); );
@ -93,6 +103,7 @@ export const useAccessStore = createPersistStore(
token: string; token: string;
openaiApiKey: string; openaiApiKey: string;
azureApiVersion: string; azureApiVersion: string;
googleApiKey: string;
}; };
state.openaiApiKey = state.token; state.openaiApiKey = state.token;
state.azureApiVersion = "2023-05-15"; state.azureApiVersion = "2023-05-15";

View File

@ -8,13 +8,14 @@ import {
DEFAULT_INPUT_TEMPLATE, DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE, DEFAULT_SYSTEM_TEMPLATE,
KnowledgeCutOffDate, KnowledgeCutOffDate,
ModelProvider,
StoreKey, StoreKey,
SUMMARIZE_MODEL, SUMMARIZE_MODEL,
} from "../constant"; } from "../constant";
import { import {
api,
getHeaders, getHeaders,
useGetMidjourneySelfProxyUrl, useGetMidjourneySelfProxyUrl,
ClientApi,
RequestMessage, RequestMessage,
} from "../client/api"; } from "../client/api";
import { ChatControllerPool } from "../client/controller"; import { ChatControllerPool } from "../client/controller";
@ -440,7 +441,6 @@ export const useChatStore = createPersistStore(
botMessage, botMessage,
]); ]);
}); });
if ( if (
content.toLowerCase().startsWith("/mj") || content.toLowerCase().startsWith("/mj") ||
content.toLowerCase().startsWith("/MJ") content.toLowerCase().startsWith("/MJ")
@ -604,6 +604,13 @@ export const useChatStore = createPersistStore(
set(() => ({})); set(() => ({}));
extAttr?.setAutoScroll(true); extAttr?.setAutoScroll(true);
} else { } else {
var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
// make request // make request
api.llm.chat({ api.llm.chat({
messages: sendMessages, messages: sendMessages,
@ -627,12 +634,11 @@ export const useChatStore = createPersistStore(
}, },
onError(error) { onError(error) {
const isAborted = error.message.includes("aborted"); const isAborted = error.message.includes("aborted");
botMessage.content = botMessage.content +=
"\n\n" + "\n\n" +
prettyObject({ prettyObject({
error: true, error: true,
message: error.message, message: error.message,
message2: "用上面刷新按钮试试。",
}); });
botMessage.streaming = false; botMessage.streaming = false;
userMessage.isError = !isAborted; userMessage.isError = !isAborted;
@ -684,7 +690,9 @@ export const useChatStore = createPersistStore(
// system prompts, to get close to OpenAI Web ChatGPT // system prompts, to get close to OpenAI Web ChatGPT
const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts; const shouldInjectSystemPrompts = modelConfig.enableInjectSystemPrompts;
const systemPrompts = shouldInjectSystemPrompts
var systemPrompts: ChatMessage[] = [];
systemPrompts = shouldInjectSystemPrompts
? [ ? [
createMessage({ createMessage({
role: "system", role: "system",
@ -778,6 +786,14 @@ export const useChatStore = createPersistStore(
summarizeSession() { summarizeSession() {
const config = useAppConfig.getState(); const config = useAppConfig.getState();
const session = get().currentSession(); const session = get().currentSession();
const modelConfig = session.mask.modelConfig;
var api: ClientApi;
if (modelConfig.model === "gemini-pro") {
api = new ClientApi(ModelProvider.GeminiPro);
} else {
api = new ClientApi(ModelProvider.GPT);
}
// remove error messages if any // remove error messages if any
const messages = session.messages; const messages = session.messages;
@ -809,8 +825,6 @@ export const useChatStore = createPersistStore(
}, },
}); });
} }
const modelConfig = session.mask.modelConfig;
const summarizeIndex = Math.max( const summarizeIndex = Math.max(
session.lastSummarizeIndex, session.lastSummarizeIndex,
session.clearContextIndex ?? 0, session.clearContextIndex ?? 0,

View File

@ -1,9 +1,16 @@
import { FETCH_COMMIT_URL, FETCH_TAG_URL, StoreKey } from "../constant"; import {
import { api } from "../client/api"; FETCH_COMMIT_URL,
FETCH_TAG_URL,
ModelProvider,
StoreKey,
} from "../constant";
import { getClientConfig } from "../config/client"; import { getClientConfig } from "../config/client";
import { createPersistStore } from "../utils/store"; import { createPersistStore } from "../utils/store";
import ChatGptIcon from "../icons/chatgpt.png"; import ChatGptIcon from "../icons/chatgpt.png";
import Locale from "../locales"; import Locale from "../locales";
import { use } from "react";
import { useAppConfig } from ".";
import { ClientApi } from "../client/api";
const ONE_MINUTE = 60 * 1000; const ONE_MINUTE = 60 * 1000;
const isApp = !!getClientConfig()?.isApp; const isApp = !!getClientConfig()?.isApp;
@ -99,7 +106,7 @@ export const useUpdateStore = createPersistStore(
if (version === remoteId) { if (version === remoteId) {
// Show a notification using Tauri // Show a notification using Tauri
window.__TAURI__?.notification.sendNotification({ window.__TAURI__?.notification.sendNotification({
title: "ChatGPT Next Web", title: "NextChat",
body: `${Locale.Settings.Update.IsLatest}`, body: `${Locale.Settings.Update.IsLatest}`,
icon: `${ChatGptIcon.src}`, icon: `${ChatGptIcon.src}`,
sound: "Default", sound: "Default",
@ -109,7 +116,7 @@ export const useUpdateStore = createPersistStore(
Locale.Settings.Update.FoundUpdate(`${remoteId}`); Locale.Settings.Update.FoundUpdate(`${remoteId}`);
// Show a notification for the new version using Tauri // Show a notification for the new version using Tauri
window.__TAURI__?.notification.sendNotification({ window.__TAURI__?.notification.sendNotification({
title: "ChatGPT Next Web", title: "NextChat",
body: updateMessage, body: updateMessage,
icon: `${ChatGptIcon.src}`, icon: `${ChatGptIcon.src}`,
sound: "Default", sound: "Default",
@ -127,6 +134,7 @@ export const useUpdateStore = createPersistStore(
}, },
async updateUsage(force = false) { async updateUsage(force = false) {
// only support openai for now
const overOneMinute = Date.now() - get().lastUpdateUsage >= ONE_MINUTE; const overOneMinute = Date.now() - get().lastUpdateUsage >= ONE_MINUTE;
if (!overOneMinute && !force) return; if (!overOneMinute && !force) return;
@ -135,6 +143,7 @@ export const useUpdateStore = createPersistStore(
})); }));
try { try {
const api = new ClientApi(ModelProvider.GPT);
const usage = await api.llm.usage(); const usage = await api.llm.usage();
if (usage) { if (usage) {

View File

@ -6,23 +6,28 @@ export function collectModelTable(
) { ) {
const modelTable: Record< const modelTable: Record<
string, string,
{ available: boolean; name: string; displayName: string; describe: string } {
available: boolean;
name: string;
displayName: string;
describe: string;
provider?: LLMModel["provider"]; // Marked as optional
}
> = {}; > = {};
// default models // default models
models.forEach( models.forEach((m) => {
(m) => modelTable[m.name] = {
(modelTable[m.name] = { ...m,
...m, displayName: m.name, // 'provider' is copied over if it exists
displayName: m.name, };
}), });
);
// server custom models // server custom models
customModels customModels
.split(",") .split(",")
.filter((v) => !!v && v.length > 0) .filter((v) => !!v && v.length > 0)
.map((m) => { .forEach((m) => {
const available = !m.startsWith("-"); const available = !m.startsWith("-");
const nameConfig = const nameConfig =
m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m; m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m;
@ -30,15 +35,18 @@ export function collectModelTable(
// enable or disable all models // enable or disable all models
if (name === "all") { if (name === "all") {
Object.values(modelTable).forEach((m) => (m.available = available)); Object.values(modelTable).forEach(
(model) => (model.available = available),
);
} else {
modelTable[name] = {
name,
displayName: displayName || name,
available,
describe: "",
provider: modelTable[name]?.provider, // Use optional chaining
};
} }
modelTable[name] = {
name,
displayName: displayName || name,
available,
describe: "",
};
}); });
return modelTable; return modelTable;
} }

View File

@ -16,6 +16,7 @@ services:
environment: environment:
- TZ=Asia/Shanghai - TZ=Asia/Shanghai
- OPENAI_API_KEY=$OPENAI_API_KEY - OPENAI_API_KEY=$OPENAI_API_KEY
- GOOGLE_API_KEY=$GOOGLE_API_KEY
- CODE=$CODE - CODE=$CODE
- BASE_URL=$BASE_URL - BASE_URL=$BASE_URL
- OPENAI_ORG_ID=$OPENAI_ORG_ID - OPENAI_ORG_ID=$OPENAI_ORG_ID

View File

@ -23,7 +23,7 @@ Docker 版本相当于稳定版latest Docker 总是与 latest release version
## 如何修改 Vercel 环境变量 ## 如何修改 Vercel 环境变量
- 进入 vercel 的控制台页面; - 进入 vercel 的控制台页面;
- 选中你的 chatgpt next web 项目; - 选中你的 NextChat 项目;
- 点击页面头部的 Settings 选项; - 点击页面头部的 Settings 选项;
- 找到侧边栏的 Environment Variables 选项; - 找到侧边栏的 Environment Variables 选项;
- 修改对应的值即可。 - 修改对应的值即可。

View File

@ -23,7 +23,7 @@ Docker 버전은 사실상 안정된 버전과 같습니다. latest Docker는
## Vercel 환경 변수를 어떻게 수정하나요? ## Vercel 환경 변수를 어떻게 수정하나요?
- Vercel의 제어판 페이지로 이동합니다. - Vercel의 제어판 페이지로 이동합니다.
- chatgpt next web 프로젝트를 선택합니다. - NextChat 프로젝트를 선택합니다.
- 페이지 상단의 Settings 옵션을 클릭합니다. - 페이지 상단의 Settings 옵션을 클릭합니다.
- 사이드바의 Environment Variables 옵션을 찾습니다. - 사이드바의 Environment Variables 옵션을 찾습니다.
- 해당 값을 수정합니다. - 해당 값을 수정합니다.

View File

@ -2,7 +2,7 @@
> No english version yet, please read this doc with ChatGPT or other translation tools. > No english version yet, please read this doc with ChatGPT or other translation tools.
本文档用于解释 ChatGPT Next Web 的部分功能介绍和设计原则。 本文档用于解释 NextChat 的部分功能介绍和设计原则。
## 面具 (Mask) ## 面具 (Mask)
@ -22,7 +22,7 @@
编辑步骤如下: 编辑步骤如下:
1. 在 ChatGPT Next Web 中配置好一个面具; 1. 在 NextChat 中配置好一个面具;
2. 使用面具编辑页面的下载按钮,将面具保存为 JSON 格式; 2. 使用面具编辑页面的下载按钮,将面具保存为 JSON 格式;
3. 让 ChatGPT 帮你将 json 文件格式化为对应的 ts 代码; 3. 让 ChatGPT 帮你将 json 文件格式化为对应的 ts 代码;
4. 放入对应的 .ts 文件。 4. 放入对应的 .ts 文件。

View File

@ -1,21 +1,20 @@
{ {
"name": "ChatGPT Next Web", "name": "NextChat",
"short_name": "ChatGPT", "short_name": "NextChat",
"icons": [ "icons": [
{ {
"src": "https://cos.xiaosi.cc/next/public/android-chrome-192x192.png", "src": "https://cos.xiaosi.cc/next/public/android-chrome-192x192.png",
"sizes": "192x192", "sizes": "192x192",
"type": "image/png" "type": "image/png"
}, },
{ {
"src": "https://cos.xiaosi.cc/next/public/android-chrome-512x512.png", "src": "https://cos.xiaosi.cc/next/public/android-chrome-512x512.png",
"sizes": "512x512", "sizes": "512x512",
"type": "image/png" "type": "image/png"
} }
], ],
"start_url": "/", "start_url": "/",
"theme_color": "#ffffff", "theme_color": "#ffffff",
"background_color": "#ffffff", "background_color": "#ffffff",
"display": "standalone" "display": "standalone"
} }

523
src-tauri/Cargo.lock generated
View File

@ -56,6 +56,128 @@ version = "1.0.71"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8"
[[package]]
name = "async-broadcast"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c48ccdbf6ca6b121e0f586cbc0e73ae440e56c67c30fa0873b4e110d9c26d2b"
dependencies = [
"event-listener",
"futures-core",
]
[[package]]
name = "async-channel"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
dependencies = [
"concurrent-queue",
"event-listener",
"futures-core",
]
[[package]]
name = "async-executor"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b0c4a4f319e45986f347ee47fef8bf5e81c9abc3f6f58dc2391439f30df65f0"
dependencies = [
"async-lock",
"async-task",
"concurrent-queue",
"fastrand 2.0.1",
"futures-lite",
"slab",
]
[[package]]
name = "async-fs"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06"
dependencies = [
"async-lock",
"autocfg",
"blocking",
"futures-lite",
]
[[package]]
name = "async-io"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af"
dependencies = [
"async-lock",
"autocfg",
"cfg-if",
"concurrent-queue",
"futures-lite",
"log",
"parking",
"polling",
"rustix",
"slab",
"socket2",
"waker-fn",
]
[[package]]
name = "async-lock"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b"
dependencies = [
"event-listener",
]
[[package]]
name = "async-process"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a9d28b1d97e08915212e2e45310d47854eafa69600756fc735fb788f75199c9"
dependencies = [
"async-io",
"async-lock",
"autocfg",
"blocking",
"cfg-if",
"event-listener",
"futures-lite",
"rustix",
"signal-hook",
"windows-sys 0.48.0",
]
[[package]]
name = "async-recursion"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.16",
]
[[package]]
name = "async-task"
version = "4.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1d90cd0b264dfdd8eb5bad0a2c217c1f88fa96a8573f40e7b12de23fb468f46"
[[package]]
name = "async-trait"
version = "0.1.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2d0f03b3640e3a630367e40c468cb7f309529c708ed1d88597047b0e7c6ef7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.16",
]
[[package]] [[package]]
name = "atk" name = "atk"
version = "0.15.1" version = "0.15.1"
@ -80,6 +202,12 @@ dependencies = [
"system-deps 6.1.0", "system-deps 6.1.0",
] ]
[[package]]
name = "atomic-waker"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
[[package]] [[package]]
name = "attohttpc" name = "attohttpc"
version = "0.22.0" version = "0.22.0"
@ -150,6 +278,22 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "blocking"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c36a4d0d48574b3dd360b4b7d95cc651d2b6557b6402848a27d4b228a473e2a"
dependencies = [
"async-channel",
"async-lock",
"async-task",
"fastrand 2.0.1",
"futures-io",
"futures-lite",
"piper",
"tracing",
]
[[package]] [[package]]
name = "brotli" name = "brotli"
version = "3.3.4" version = "3.3.4"
@ -358,6 +502,15 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "concurrent-queue"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d16048cd947b08fa32c24458a22f5dc5e835264f689f4f5653210c69fd107363"
dependencies = [
"crossbeam-utils",
]
[[package]] [[package]]
name = "convert_case" name = "convert_case"
version = "0.4.0" version = "0.4.0"
@ -530,6 +683,17 @@ dependencies = [
"syn 2.0.16", "syn 2.0.16",
] ]
[[package]]
name = "derivative"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]] [[package]]
name = "derive_more" name = "derive_more"
version = "0.99.17" version = "0.99.17"
@ -629,6 +793,27 @@ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]]
name = "enumflags2"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5998b4f30320c9d93aed72f63af821bfdac50465b75428fce77b48ec482c3939"
dependencies = [
"enumflags2_derive",
"serde",
]
[[package]]
name = "enumflags2_derive"
version = "0.7.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f95e2801cd355d4a1a3e3953ce6ee5ae9603a5c833455343a8bfe3f44d418246"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.16",
]
[[package]] [[package]]
name = "errno" name = "errno"
version = "0.3.1" version = "0.3.1"
@ -650,6 +835,12 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "event-listener"
version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
[[package]] [[package]]
name = "fastrand" name = "fastrand"
version = "1.9.0" version = "1.9.0"
@ -659,6 +850,12 @@ dependencies = [
"instant", "instant",
] ]
[[package]]
name = "fastrand"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]] [[package]]
name = "fdeflate" name = "fdeflate"
version = "0.3.0" version = "0.3.0"
@ -674,7 +871,7 @@ version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3cf3a800ff6e860c863ca6d4b16fd999db8b752819c1606884047b73e468535" checksum = "a3cf3a800ff6e860c863ca6d4b16fd999db8b752819c1606884047b73e468535"
dependencies = [ dependencies = [
"memoffset", "memoffset 0.8.0",
"rustc_version", "rustc_version",
] ]
@ -772,6 +969,21 @@ version = "0.3.28"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
[[package]]
name = "futures-lite"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce"
dependencies = [
"fastrand 1.9.0",
"futures-core",
"futures-io",
"memchr",
"parking",
"pin-project-lite",
"waker-fn",
]
[[package]] [[package]]
name = "futures-macro" name = "futures-macro"
version = "0.3.28" version = "0.3.28"
@ -783,6 +995,12 @@ dependencies = [
"syn 2.0.16", "syn 2.0.16",
] ]
[[package]]
name = "futures-sink"
version = "0.3.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817"
[[package]] [[package]]
name = "futures-task" name = "futures-task"
version = "0.3.28" version = "0.3.28"
@ -796,8 +1014,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
dependencies = [ dependencies = [
"futures-core", "futures-core",
"futures-io",
"futures-macro", "futures-macro",
"futures-sink",
"futures-task", "futures-task",
"memchr",
"pin-project-lite", "pin-project-lite",
"pin-utils", "pin-utils",
"slab", "slab",
@ -1451,6 +1672,19 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]]
name = "mac-notification-sys"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51fca4d74ff9dbaac16a01b924bc3693fa2bba0862c2c633abc73f9a8ea21f64"
dependencies = [
"cc",
"dirs-next",
"objc-foundation",
"objc_id",
"time",
]
[[package]] [[package]]
name = "malloc_buf" name = "malloc_buf"
version = "0.0.6" version = "0.0.6"
@ -1495,6 +1729,15 @@ version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memoffset"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]] [[package]]
name = "memoffset" name = "memoffset"
version = "0.8.0" version = "0.8.0"
@ -1504,6 +1747,15 @@ dependencies = [
"autocfg", "autocfg",
] ]
[[package]]
name = "memoffset"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
dependencies = [
"autocfg",
]
[[package]] [[package]]
name = "minisign-verify" name = "minisign-verify"
version = "0.2.1" version = "0.2.1"
@ -1572,12 +1824,37 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"libc",
"memoffset 0.7.1",
]
[[package]] [[package]]
name = "nodrop" name = "nodrop"
version = "0.1.14" version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
[[package]]
name = "notify-rust"
version = "4.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "827c5edfa80235ded4ab3fe8e9dc619b4f866ef16fe9b1c6b8a7f8692c0f2226"
dependencies = [
"log",
"mac-notification-sys",
"serde",
"tauri-winrt-notification",
"zbus",
]
[[package]] [[package]]
name = "nu-ansi-term" name = "nu-ansi-term"
version = "0.46.0" version = "0.46.0"
@ -1757,6 +2034,16 @@ dependencies = [
"vcpkg", "vcpkg",
] ]
[[package]]
name = "ordered-stream"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50"
dependencies = [
"futures-core",
"pin-project-lite",
]
[[package]] [[package]]
name = "overload" name = "overload"
version = "0.1.1" version = "0.1.1"
@ -1788,6 +2075,12 @@ dependencies = [
"system-deps 6.1.0", "system-deps 6.1.0",
] ]
[[package]]
name = "parking"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.12.1" version = "0.12.1"
@ -1933,6 +2226,17 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "piper"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4"
dependencies = [
"atomic-waker",
"fastrand 2.0.1",
"futures-io",
]
[[package]] [[package]]
name = "pkg-config" name = "pkg-config"
version = "0.3.27" version = "0.3.27"
@ -1948,7 +2252,7 @@ dependencies = [
"base64 0.21.0", "base64 0.21.0",
"indexmap", "indexmap",
"line-wrap", "line-wrap",
"quick-xml", "quick-xml 0.28.2",
"serde", "serde",
"time", "time",
] ]
@ -1966,6 +2270,22 @@ dependencies = [
"miniz_oxide", "miniz_oxide",
] ]
[[package]]
name = "polling"
version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce"
dependencies = [
"autocfg",
"bitflags 1.3.2",
"cfg-if",
"concurrent-queue",
"libc",
"log",
"pin-project-lite",
"windows-sys 0.48.0",
]
[[package]] [[package]]
name = "ppv-lite86" name = "ppv-lite86"
version = "0.2.17" version = "0.2.17"
@ -2027,6 +2347,15 @@ dependencies = [
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "quick-xml"
version = "0.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11bafc859c6815fbaffbbbf4229ecb767ac913fecb27f9ad4343662e9ef099ea"
dependencies = [
"memchr",
]
[[package]] [[package]]
name = "quick-xml" name = "quick-xml"
version = "0.28.2" version = "0.28.2"
@ -2466,6 +2795,17 @@ dependencies = [
"stable_deref_trait", "stable_deref_trait",
] ]
[[package]]
name = "sha1"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
dependencies = [
"cfg-if",
"cpufeatures",
"digest",
]
[[package]] [[package]]
name = "sha2" name = "sha2"
version = "0.10.6" version = "0.10.6"
@ -2486,6 +2826,25 @@ dependencies = [
"lazy_static", "lazy_static",
] ]
[[package]]
name = "signal-hook"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801"
dependencies = [
"libc",
"signal-hook-registry",
]
[[package]]
name = "signal-hook-registry"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "simd-adler32" name = "simd-adler32"
version = "0.3.5" version = "0.3.5"
@ -2513,6 +2872,16 @@ version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "socket2"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
dependencies = [
"libc",
"winapi",
]
[[package]] [[package]]
name = "soup2" name = "soup2"
version = "0.2.1" version = "0.2.1"
@ -2556,6 +2925,12 @@ dependencies = [
"loom", "loom",
] ]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]] [[package]]
name = "string_cache" name = "string_cache"
version = "0.8.7" version = "0.8.7"
@ -2733,6 +3108,7 @@ dependencies = [
"http", "http",
"ignore", "ignore",
"minisign-verify", "minisign-verify",
"notify-rust",
"objc", "objc",
"once_cell", "once_cell",
"open", "open",
@ -2915,6 +3291,16 @@ dependencies = [
"toml 0.7.3", "toml 0.7.3",
] ]
[[package]]
name = "tauri-winrt-notification"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f5bff1d532fead7c43324a0fa33643b8621a47ce2944a633be4cb6c0240898f"
dependencies = [
"quick-xml 0.23.1",
"windows 0.39.0",
]
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.5.0" version = "3.5.0"
@ -2922,7 +3308,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"fastrand", "fastrand 1.9.0",
"redox_syscall 0.3.5", "redox_syscall 0.3.5",
"rustix", "rustix",
"windows-sys 0.45.0", "windows-sys 0.45.0",
@ -3135,6 +3521,17 @@ version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "uds_windows"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9"
dependencies = [
"memoffset 0.9.0",
"tempfile",
"winapi",
]
[[package]] [[package]]
name = "unicode-bidi" name = "unicode-bidi"
version = "0.3.13" version = "0.3.13"
@ -3239,6 +3636,12 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "waker-fn"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690"
[[package]] [[package]]
name = "walkdir" name = "walkdir"
version = "2.3.3" version = "2.3.3"
@ -3815,6 +4218,82 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "xdg-home"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2769203cd13a0c6015d515be729c526d041e9cf2c0cc478d57faee85f40c6dcd"
dependencies = [
"nix",
"winapi",
]
[[package]]
name = "zbus"
version = "3.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31de390a2d872e4cd04edd71b425e29853f786dc99317ed72d73d6fcf5ebb948"
dependencies = [
"async-broadcast",
"async-executor",
"async-fs",
"async-io",
"async-lock",
"async-process",
"async-recursion",
"async-task",
"async-trait",
"blocking",
"byteorder",
"derivative",
"enumflags2",
"event-listener",
"futures-core",
"futures-sink",
"futures-util",
"hex",
"nix",
"once_cell",
"ordered-stream",
"rand 0.8.5",
"serde",
"serde_repr",
"sha1",
"static_assertions",
"tracing",
"uds_windows",
"winapi",
"xdg-home",
"zbus_macros",
"zbus_names",
"zvariant",
]
[[package]]
name = "zbus_macros"
version = "3.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "41d1794a946878c0e807f55a397187c11fc7a038ba5d868e7db4f3bd7760bc9d"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"regex",
"syn 1.0.109",
"zvariant_utils",
]
[[package]]
name = "zbus_names"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb80bb776dbda6e23d705cf0123c3b95df99c4ebeaec6c2599d4a5419902b4a9"
dependencies = [
"serde",
"static_assertions",
"zvariant",
]
[[package]] [[package]]
name = "zip" name = "zip"
version = "0.6.6" version = "0.6.6"
@ -3825,3 +4304,41 @@ dependencies = [
"crc32fast", "crc32fast",
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "zvariant"
version = "3.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44b291bee0d960c53170780af148dca5fa260a63cdd24f1962fa82e03e53338c"
dependencies = [
"byteorder",
"enumflags2",
"libc",
"serde",
"static_assertions",
"zvariant_derive",
]
[[package]]
name = "zvariant_derive"
version = "3.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "934d7a7dfc310d6ee06c87ffe88ef4eca7d3e37bb251dece2ef93da8f17d8ecd"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 1.0.109",
"zvariant_utils",
]
[[package]]
name = "zvariant_utils"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7234f0d811589db492d16893e3f21e8e2fd282e6d01b0cddee310322062cc200"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]

View File

@ -8,8 +8,8 @@
"withGlobalTauri": true "withGlobalTauri": true
}, },
"package": { "package": {
"productName": "ChatGPT Next Web", "productName": "NextChat",
"version": "2.9.12" "version": "2.9.13"
}, },
"tauri": { "tauri": {
"allowlist": { "allowlist": {
@ -68,7 +68,7 @@
"icons/icon.ico" "icons/icon.ico"
], ],
"identifier": "com.yida.chatgpt.next.web", "identifier": "com.yida.chatgpt.next.web",
"longDescription": "ChatGPT Next Web is a cross-platform ChatGPT client, including Web/Win/Linux/OSX/PWA.", "longDescription": "NextChat is a cross-platform ChatGPT client, including Web/Win/Linux/OSX/PWA.",
"macOS": { "macOS": {
"entitlements": null, "entitlements": null,
"exceptionDomain": "", "exceptionDomain": "",
@ -77,7 +77,7 @@
"signingIdentity": null "signingIdentity": null
}, },
"resources": [], "resources": [],
"shortDescription": "ChatGPT Next Web App", "shortDescription": "NextChat App",
"targets": "all", "targets": "all",
"windows": { "windows": {
"certificateThumbprint": null, "certificateThumbprint": null,
@ -104,7 +104,7 @@
"fullscreen": false, "fullscreen": false,
"height": 600, "height": 600,
"resizable": true, "resizable": true,
"title": "ChatGPT Next Web", "title": "NextChat",
"width": 960, "width": 960,
"hiddenTitle": true, "hiddenTitle": true,
"titleBarStyle": "Overlay" "titleBarStyle": "Overlay"