diff --git a/.babelrc b/.babelrc new file mode 100644 index 000000000..53e4d9b24 --- /dev/null +++ b/.babelrc @@ -0,0 +1,14 @@ +{ + "presets": [ + [ + "next/babel", + { + "preset-env": { + "targets": { + "browsers": ["> 0.25%, not dead"] + } + } + } + ] + ] +} diff --git a/.env.template b/.env.template index 5f3fc02da..0f4bf0e7c 100644 --- a/.env.template +++ b/.env.template @@ -27,3 +27,8 @@ HIDE_USER_API_KEY= # Default: Empty # If you do not want users to use GPT-4, set this value to 1. DISABLE_GPT4= + +# (optional) +# Default: Empty +# If you do not want users to query balance, set this value to 1. +HIDE_BALANCE_QUERY= \ No newline at end of file diff --git a/.github/workflows/app.yml b/.github/workflows/app.yml index 234338dd4..b928ad6c1 100644 --- a/.github/workflows/app.yml +++ b/.github/workflows/app.yml @@ -9,7 +9,7 @@ jobs: create-release: permissions: contents: write - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest outputs: release_id: ${{ steps.create-release.outputs.result }} @@ -39,9 +39,21 @@ jobs: strategy: fail-fast: false matrix: - platform: [macos-latest, ubuntu-20.04, windows-latest] + config: + - os: ubuntu-latest + arch: x86_64 + rust_target: x86_64-unknown-linux-gnu + - os: macos-latest + arch: x86_64 + rust_target: x86_64-apple-darwin + - os: macos-latest + arch: aarch64 + rust_target: aarch64-apple-darwin + - os: windows-latest + arch: x86_64 + rust_target: x86_64-pc-windows-msvc - runs-on: ${{ matrix.platform }} + runs-on: ${{ matrix.config.os }} steps: - uses: actions/checkout@v3 - name: setup node @@ -50,8 +62,13 @@ jobs: node-version: 16 - name: install Rust stable uses: dtolnay/rust-toolchain@stable + with: + targets: ${{ matrix.config.rust_target }} + - uses: Swatinem/rust-cache@v2 + with: + key: ${{ matrix.config.rust_target }} - name: install dependencies (ubuntu only) - if: matrix.platform == 'ubuntu-20.04' + if: matrix.config.os == 'ubuntu-latest' run: | sudo apt-get update sudo apt-get install -y libgtk-3-dev libwebkit2gtk-4.0-dev libappindicator3-dev librsvg2-dev patchelf @@ -68,7 +85,7 @@ jobs: publish-release: permissions: contents: write - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest needs: [create-release, build-tauri] steps: diff --git a/README.md b/README.md index 148c137f8..c4f83c117 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ One-Click to get well-designed cross-platform ChatGPT web UI. [![MacOS][MacOS-image]][download-url] [![Linux][Linux-image]][download-url] -[Web App](https://chatgpt.nextweb.fun/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa) +[Web App](https://chatgpt.nextweb.fun/) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Issues](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [Discord](https://discord.gg/YCkeafCafC) / [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa) [网页版](https://chatgpt.nextweb.fun/) / [客户端](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [反馈](https://github.com/Yidadaa/ChatGPT-Next-Web/issues) / [QQ 群](https://github.com/Yidadaa/ChatGPT-Next-Web/discussions/1724) / [打赏开发者](https://user-images.githubusercontent.com/16968934/227772541-5bcd52d8-61b7-488c-a203-0330d8006e2b.jpg) @@ -84,7 +84,7 @@ One-Click to get well-designed cross-platform ChatGPT web UI. - [x] 预制角色:使用预制角色快速定制新对话 [#993](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/993) - [x] 分享为图片,分享到 ShareGPT 链接 [#1741](https://github.com/Yidadaa/ChatGPT-Next-Web/pull/1741) - [x] 使用 tauri 打包桌面应用 -- [x] 支持自部署的大语言模型:开箱即用 [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) ,服务端部署 [LocalAI 项目](https://github.com/go-skynet/LocalAI) llama / gpt4all / rwkv / vicuna / koala / gpt4all-j / cerebras / falcon / dolly 等等 +- [x] 支持自部署的大语言模型:开箱即用 [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) ,服务端部署 [LocalAI 项目](https://github.com/go-skynet/LocalAI) llama / gpt4all / rwkv / vicuna / koala / gpt4all-j / cerebras / falcon / dolly 等等,或者使用 [api-for-open-llm](https://github.com/xusenlinzy/api-for-open-llm) - [ ] 插件机制,支持联网搜索、计算器、调用其他平台 api [#165](https://github.com/Yidadaa/ChatGPT-Next-Web/issues/165) ## 最新动态 @@ -185,6 +185,12 @@ If you do not want users to input their own API key, set this value to 1. If you do not want users to use GPT-4, set this value to 1. +### `HIDE_BALANCE_QUERY` (optional) + +> Default: Empty + +If you do not want users to query balance, set this value to 1. + ## Requirements NodeJS >= 18, Docker >= 20 @@ -257,6 +263,10 @@ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/s ![More](./docs/images/more.png) +## Translation + +If you want to add a new translation, read this [document](./docs/translation.md). + ## Donation [Buy Me a Coffee](https://www.buymeacoffee.com/yidadaa) @@ -288,6 +298,7 @@ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/s [@Sha1rholder](https://github.com/Sha1rholder) [@AnsonHyq](https://github.com/AnsonHyq) [@synwith](https://github.com/synwith) +[@piksonGit](https://github.com/piksonGit) ### Contributor diff --git a/README_CN.md b/README_CN.md index 5fda7fc2e..12da24699 100644 --- a/README_CN.md +++ b/README_CN.md @@ -98,6 +98,10 @@ OpenAI 接口代理 URL,如果你手动配置了 openai 接口代理,请填 如果你不想让用户使用 GPT-4,将此环境变量设置为 1 即可。 +### `HIDE_BALANCE_QUERY` (可选) + +如果你不想让用户查询余额,将此环境变量设置为 1 即可。 + ## 开发 点击下方按钮,开始二次开发: @@ -117,7 +121,7 @@ BASE_URL=https://chatgpt1.nextweb.fun/api/proxy 1. 安装 nodejs 18 和 yarn,具体细节请询问 ChatGPT; 2. 执行 `yarn install && yarn dev` 即可。⚠️ 注意:此命令仅用于本地开发,不要用于部署! -3. 如果你想本地部署,请使用 `yarn install && yarn start` 命令,你可以配合 pm2 来守护进程,防止被杀死,详情询问 ChatGPT。 +3. 如果你想本地部署,请使用 `yarn install && yarn build && yarn start` 命令,你可以配合 pm2 来守护进程,防止被杀死,详情询问 ChatGPT。 ## 部署 diff --git a/README_ES.md b/README_ES.md index cdd835908..e9705e402 100644 --- a/README_ES.md +++ b/README_ES.md @@ -96,6 +96,10 @@ Si no desea que los usuarios rellenen la clave de API ellos mismos, establezca e Si no desea que los usuarios utilicen GPT-4, establezca esta variable de entorno en 1. +### `HIDE_BALANCE_QUERY` (Opcional) + +Si no desea que los usuarios consulte el saldo, establezca esta variable de entorno en 1. + ## explotación > No se recomienda encarecidamente desarrollar o implementar localmente, debido a algunas razones técnicas, es difícil configurar el agente API de OpenAI localmente, a menos que pueda asegurarse de que puede conectarse directamente al servidor OpenAI. diff --git a/app/api/common.ts b/app/api/common.ts index 22bd5d4a4..3146b6bd9 100644 --- a/app/api/common.ts +++ b/app/api/common.ts @@ -2,8 +2,8 @@ import { NextRequest, NextResponse } from "next/server"; export const OPENAI_URL = "api.openai.com"; const DEFAULT_PROTOCOL = "https"; -const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL; -const BASE_URL = process.env.BASE_URL ?? OPENAI_URL; +const PROTOCOL = process.env.PROTOCOL || DEFAULT_PROTOCOL; +const BASE_URL = process.env.BASE_URL || OPENAI_URL; const DISABLE_GPT4 = !!process.env.DISABLE_GPT4; export async function requestOpenai(req: NextRequest) { @@ -35,12 +35,12 @@ export async function requestOpenai(req: NextRequest) { const fetchOptions: RequestInit = { headers: { "Content-Type": "application/json", + "Cache-Control": "no-store", Authorization: authValue, ...(process.env.OPENAI_ORG_ID && { "OpenAI-Organization": process.env.OPENAI_ORG_ID, }), }, - cache: "no-store", method: req.method, body: req.body, // @ts-ignore @@ -78,8 +78,7 @@ export async function requestOpenai(req: NextRequest) { // to prevent browser prompt for credentials const newHeaders = new Headers(res.headers); newHeaders.delete("www-authenticate"); - - // to disbale ngnix buffering + // to disable nginx buffering newHeaders.set("X-Accel-Buffering", "no"); return new Response(res.body, { diff --git a/app/api/config/route.ts b/app/api/config/route.ts index 2b3bcbf20..7749e6e9e 100644 --- a/app/api/config/route.ts +++ b/app/api/config/route.ts @@ -9,7 +9,8 @@ const serverConfig = getServerSideConfig(); const DANGER_CONFIG = { needCode: serverConfig.needCode, hideUserApiKey: serverConfig.hideUserApiKey, - enableGPT4: serverConfig.enableGPT4, + disableGPT4: serverConfig.disableGPT4, + hideBalanceQuery: serverConfig.hideBalanceQuery, }; declare global { diff --git a/app/api/openai/[...path]/route.ts b/app/api/openai/[...path]/route.ts index 36f92d0ff..9df005a31 100644 --- a/app/api/openai/[...path]/route.ts +++ b/app/api/openai/[...path]/route.ts @@ -1,3 +1,5 @@ +import { type OpenAIListModelResponse } from "@/app/client/platforms/openai"; +import { getServerSideConfig } from "@/app/config/server"; import { OpenaiPath } from "@/app/constant"; import { prettyObject } from "@/app/utils/format"; import { NextRequest, NextResponse } from "next/server"; @@ -6,6 +8,18 @@ import { requestOpenai } from "../../common"; const ALLOWD_PATH = new Set(Object.values(OpenaiPath)); +function getModels(remoteModelRes: OpenAIListModelResponse) { + const config = getServerSideConfig(); + + if (config.disableGPT4) { + remoteModelRes.data = remoteModelRes.data.filter( + (m) => !m.id.startsWith("gpt-4"), + ); + } + + return remoteModelRes; +} + async function handle( req: NextRequest, { params }: { params: { path: string[] } }, @@ -39,7 +53,18 @@ async function handle( } try { - return await requestOpenai(req); + const response = await requestOpenai(req); + + // list models + if (subpath === OpenaiPath.ListModelPath && response.status === 200) { + const resJson = (await response.json()) as OpenAIListModelResponse; + const availableModels = getModels(resJson); + return NextResponse.json(availableModels, { + status: response.status, + }); + } + + return response; } catch (e) { console.error("[OpenAI] ", e); return NextResponse.json(prettyObject(e)); diff --git a/app/client/api.ts b/app/client/api.ts index a8960ff51..b04dd88b8 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -38,9 +38,15 @@ export interface LLMUsage { total: number; } +export interface LLMModel { + name: string; + available: boolean; +} + export abstract class LLMApi { abstract chat(options: ChatOptions): Promise; abstract usage(): Promise; + abstract models(): Promise; } type ProviderName = "openai" | "azure" | "claude" | "palm"; @@ -93,7 +99,7 @@ export class ClientApi { // 敬告二开开发者们,为了开源大模型的发展,请不要修改上述消息,此消息用于后续数据清洗使用 // Please do not modify this message - console.log("[Share]", msgs); + console.log("[Share]", messages, msgs); const clientConfig = getClientConfig(); const proxyUrl = "/sharegpt"; const rawUrl = "https://sharegpt.com/api/conversations"; diff --git a/app/client/controller.ts b/app/client/controller.ts index 86cb99e7f..a2e00173d 100644 --- a/app/client/controller.ts +++ b/app/client/controller.ts @@ -3,17 +3,17 @@ export const ChatControllerPool = { controllers: {} as Record, addController( - sessionIndex: number, - messageId: number, + sessionId: string, + messageId: string, controller: AbortController, ) { - const key = this.key(sessionIndex, messageId); + const key = this.key(sessionId, messageId); this.controllers[key] = controller; return key; }, - stop(sessionIndex: number, messageId: number) { - const key = this.key(sessionIndex, messageId); + stop(sessionId: string, messageId: string) { + const key = this.key(sessionId, messageId); const controller = this.controllers[key]; controller?.abort(); }, @@ -26,12 +26,12 @@ export const ChatControllerPool = { return Object.values(this.controllers).length > 0; }, - remove(sessionIndex: number, messageId: number) { - const key = this.key(sessionIndex, messageId); + remove(sessionId: string, messageId: string) { + const key = this.key(sessionId, messageId); delete this.controllers[key]; }, - key(sessionIndex: number, messageIndex: number) { - return `${sessionIndex},${messageIndex}`; + key(sessionId: string, messageIndex: string) { + return `${sessionId},${messageIndex}`; }, }; diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index fce7eee4e..e140a1ef5 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -1,7 +1,12 @@ -import { OpenaiPath, REQUEST_TIMEOUT_MS } from "@/app/constant"; +import { + DEFAULT_API_HOST, + DEFAULT_MODELS, + OpenaiPath, + REQUEST_TIMEOUT_MS, +} from "@/app/constant"; import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; -import { ChatOptions, getHeaders, LLMApi, LLMUsage } from "../api"; +import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api"; import Locale from "../../locales"; import { EventStreamContentType, @@ -9,12 +14,29 @@ import { } from "@fortaine/fetch-event-source"; import { prettyObject } from "@/app/utils/format"; +export interface OpenAIListModelResponse { + object: string; + data: Array<{ + id: string; + object: string; + root: string; + }>; +} + export class ChatGPTApi implements LLMApi { + private disableListModels = true; + path(path: string): string { let openaiUrl = useAccessStore.getState().openaiUrl; + if (openaiUrl.length === 0) { + openaiUrl = DEFAULT_API_HOST; + } if (openaiUrl.endsWith("/")) { openaiUrl = openaiUrl.slice(0, openaiUrl.length - 1); } + if (!openaiUrl.startsWith("http") && !openaiUrl.startsWith("/api/openai")) { + openaiUrl = "https://" + openaiUrl; + } return [openaiUrl, path].join("/"); } @@ -43,6 +65,7 @@ export class ChatGPTApi implements LLMApi { temperature: modelConfig.temperature, presence_penalty: modelConfig.presence_penalty, frequency_penalty: modelConfig.frequency_penalty, + top_p: modelConfig.top_p, }; console.log("[Request] openai payload: ", requestPayload); @@ -224,5 +247,31 @@ export class ChatGPTApi implements LLMApi { total: total.hard_limit_usd, } as LLMUsage; } + + async models(): Promise { + if (this.disableListModels) { + return DEFAULT_MODELS.slice(); + } + + const res = await fetch(this.path(OpenaiPath.ListModelPath), { + method: "GET", + headers: { + ...getHeaders(), + }, + }); + + const resJson = (await res.json()) as OpenAIListModelResponse; + const chatModels = resJson.data?.filter((m) => m.id.startsWith("gpt-")); + console.log("[Models]", chatModels); + + if (!chatModels) { + return []; + } + + return chatModels.map((m) => ({ + name: m.id, + available: true, + })); + } } export { OpenaiPath }; diff --git a/app/command.ts b/app/command.ts index 40bad92b3..9330d4ff5 100644 --- a/app/command.ts +++ b/app/command.ts @@ -1,4 +1,6 @@ +import { useEffect } from "react"; import { useSearchParams } from "react-router-dom"; +import Locale from "./locales"; type Command = (param: string) => void; interface Commands { @@ -10,19 +12,62 @@ interface Commands { export function useCommand(commands: Commands = {}) { const [searchParams, setSearchParams] = useSearchParams(); - if (commands === undefined) return; + useEffect(() => { + let shouldUpdate = false; + searchParams.forEach((param, name) => { + const commandName = name as keyof Commands; + if (typeof commands[commandName] === "function") { + commands[commandName]!(param); + searchParams.delete(name); + shouldUpdate = true; + } + }); - let shouldUpdate = false; - searchParams.forEach((param, name) => { - const commandName = name as keyof Commands; - if (typeof commands[commandName] === "function") { - commands[commandName]!(param); - searchParams.delete(name); - shouldUpdate = true; + if (shouldUpdate) { + setSearchParams(searchParams); } - }); - - if (shouldUpdate) { - setSearchParams(searchParams); - } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [searchParams, commands]); +} + +interface ChatCommands { + new?: Command; + newm?: Command; + next?: Command; + prev?: Command; + clear?: Command; + del?: Command; +} + +export const ChatCommandPrefix = ":"; + +export function useChatCommand(commands: ChatCommands = {}) { + function extract(userInput: string) { + return ( + userInput.startsWith(ChatCommandPrefix) ? userInput.slice(1) : userInput + ) as keyof ChatCommands; + } + + function search(userInput: string) { + const input = extract(userInput); + const desc = Locale.Chat.Commands; + return Object.keys(commands) + .filter((c) => c.startsWith(input)) + .map((c) => ({ + title: desc[c as keyof ChatCommands], + content: ChatCommandPrefix + c, + })); + } + + function match(userInput: string) { + const command = extract(userInput); + const matched = typeof commands[command] === "function"; + + return { + matched, + invoke: () => matched && commands[command]!(userInput), + }; + } + + return { match, search }; } diff --git a/app/components/button.module.scss b/app/components/button.module.scss index 5aa53dcf9..e332df2d2 100644 --- a/app/components/button.module.scss +++ b/app/components/button.module.scss @@ -27,6 +27,26 @@ fill: white !important; } } + + &.danger { + color: rgba($color: red, $alpha: 0.8); + border-color: rgba($color: red, $alpha: 0.5); + background-color: rgba($color: red, $alpha: 0.05); + + &:hover { + border-color: red; + background-color: rgba($color: red, $alpha: 0.1); + } + + path { + fill: red !important; + } + } + + &:hover, + &:focus { + border-color: var(--primary); + } } .shadow { @@ -37,10 +57,6 @@ border: var(--border-in-light); } -.icon-button:hover { - border-color: var(--primary); -} - .icon-button-icon { width: 16px; height: 16px; @@ -56,9 +72,12 @@ } .icon-button-text { - margin-left: 5px; font-size: 12px; overflow: hidden; text-overflow: ellipsis; white-space: nowrap; + + &:not(:first-child) { + margin-left: 5px; + } } diff --git a/app/components/button.tsx b/app/components/button.tsx index f93741b39..7a5633924 100644 --- a/app/components/button.tsx +++ b/app/components/button.tsx @@ -2,16 +2,20 @@ import * as React from "react"; import styles from "./button.module.scss"; +export type ButtonType = "primary" | "danger" | null; + export function IconButton(props: { onClick?: () => void; icon?: JSX.Element; - type?: "primary" | "danger"; + type?: ButtonType; text?: string; bordered?: boolean; shadow?: boolean; className?: string; title?: string; disabled?: boolean; + tabIndex?: number; + autoFocus?: boolean; }) { return (