diff --git a/Dockerfile b/Dockerfile index 6f7547b21..7974b6c9a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,7 +16,7 @@ RUN apk update && apk add --no-cache git ENV OPENAI_API_KEY="" ENV CODE="" -ARG DOCKER=true +ARG STANDALONE=true WORKDIR /app COPY --from=deps /app/node_modules ./node_modules diff --git a/README.md b/README.md index 40386a005..c9f25662a 100644 --- a/README.md +++ b/README.md @@ -177,7 +177,13 @@ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/s ```shell docker pull yidadaa/chatgpt-next-web -docker run -d -p 3000:3000 -e OPENAI_API_KEY="" -e CODE="" yidadaa/chatgpt-next-web +# https_proxy 指向代理,需要带协议前缀 例如: https_proxy=https://192.168.1.1:7890 +docker run -it --rm \ + -p 3000:3000 \ + -e https_proxy="" \ + -e OPENAI_API_KEY="" \ + -e CODE="" \ + --name chatgpt-next-web yidadaa/chatgpt-next-web ``` ## 截图 Screenshots diff --git a/app/api/access.ts b/app/api/access.ts index d3e4c9cf9..2fae3eafd 100644 --- a/app/api/access.ts +++ b/app/api/access.ts @@ -14,4 +14,4 @@ export function getAccessCodes(): Set { } export const ACCESS_CODES = getAccessCodes(); -export const IS_IN_DOCKER = process.env.DOCKER; +export const IS_STANDALONE = process.env.STANDALONE; diff --git a/app/api/chat-stream/route.ts b/app/api/chat-stream/route.ts index f33175543..c14216fa6 100644 --- a/app/api/chat-stream/route.ts +++ b/app/api/chat-stream/route.ts @@ -1,6 +1,7 @@ import { createParser } from "eventsource-parser"; import { NextRequest } from "next/server"; import { requestOpenai } from "../common"; +import { PageConfig } from "next/types"; async function createStream(req: NextRequest) { const encoder = new TextEncoder(); @@ -56,6 +57,6 @@ export async function POST(req: NextRequest) { } } -export const config = { - runtime: "edge", +export const config: PageConfig = { + runtime: process.env.STANDALONE ? "nodejs" : "edge", }; diff --git a/app/api/common.ts b/app/api/common.ts index 842eeacaf..97f8a02db 100644 --- a/app/api/common.ts +++ b/app/api/common.ts @@ -4,6 +4,9 @@ const OPENAI_URL = "api.openai.com"; const DEFAULT_PROTOCOL = "https"; const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL; const BASE_URL = process.env.BASE_URL ?? OPENAI_URL; +const STANDALONE = Boolean(process.env.STANDALONE); + +let fetch: FetchLike = globalThis.fetch; export async function requestOpenai(req: NextRequest) { const apiKey = req.headers.get("token"); @@ -20,3 +23,64 @@ export async function requestOpenai(req: NextRequest) { body: req.body, }); } + +export type FetchLike = ( + url: string | Request, + init?: RequestInit, +) => Promise; + +if (STANDALONE) { + const proxy = + process.env.HTTPS_PROXY || + process.env.https_proxy || + process.env.ALL_PROXY || + process.env.all_proxy; + if (proxy) { + console.log(`[HTTP Proxy] ${new URL(proxy).hostname}`); + } + + fetch = createFetchWithProxyByNextUndici({ proxy, fetch }); +} + +export function createFetchWithProxyByNextUndici({ + proxy, + fetch, +}: { + proxy?: string; + fetch?: FetchLike; +} = {}): FetchLike { + if (!proxy) { + return fetch || globalThis.fetch; + } + let agent: any; + return async (...args) => { + const init = (args[1] ||= {}); + if (init.body instanceof ReadableStream) { + // https://github.com/nodejs/node/issues/46221 + (init as any).duplex ||= "half"; + } + if (!agent) { + let ProxyAgent; + if ("ProxyAgent" in globalThis) { + ProxyAgent = (globalThis as any).ProxyAgent; + fetch ||= globalThis.fetch; + } else { + // @ts-ignore + const undici = await import("next/dist/compiled/undici"); + ProxyAgent = undici.ProxyAgent; + fetch ||= undici.fetch; + } + agent = new ProxyAgent(proxy); + // https://github.com/nodejs/node/issues/43187#issuecomment-1134634174 + (global as any)[Symbol.for("undici.globalDispatcher.1")] = agent; + } + return fetch!(...args); + }; +} + +// @ts-ignore +declare module "next/dist/compiled/undici" { + const fetch: FetchLike; + const ProxyAgent: any; + export { fetch, ProxyAgent }; +} diff --git a/app/api/openai/route.ts b/app/api/openai/route.ts index cc51dbfc9..d6ad95723 100644 --- a/app/api/openai/route.ts +++ b/app/api/openai/route.ts @@ -1,5 +1,10 @@ import { NextRequest, NextResponse } from "next/server"; import { requestOpenai } from "../common"; +import { PageConfig } from "next/types"; + +export const config: PageConfig = { + runtime: process.env.STANDALONE ? "nodejs" : "edge", +}; async function makeRequest(req: NextRequest) { try { diff --git a/app/layout.tsx b/app/layout.tsx index 49a6d644d..5345f4088 100644 --- a/app/layout.tsx +++ b/app/layout.tsx @@ -3,7 +3,7 @@ import "./styles/globals.scss"; import "./styles/markdown.scss"; import "./styles/highlight.scss"; import process from "child_process"; -import { ACCESS_CODES, IS_IN_DOCKER } from "./api/access"; +import { ACCESS_CODES, IS_STANDALONE } from "./api/access"; let COMMIT_ID: string | undefined; try { @@ -29,7 +29,7 @@ export const metadata = { function Meta() { const metas = { version: COMMIT_ID ?? "unknown", - access: ACCESS_CODES.size > 0 || IS_IN_DOCKER ? "enabled" : "disabled", + access: ACCESS_CODES.size > 0 || IS_STANDALONE ? "enabled" : "disabled", }; return ( diff --git a/next.config.js b/next.config.mjs similarity index 62% rename from next.config.js rename to next.config.mjs index fc164db9c..b27f4bda8 100644 --- a/next.config.js +++ b/next.config.mjs @@ -1,5 +1,4 @@ -/** @type {import('next').NextConfig} */ - +/** @type {import("next").NextConfig} */ const nextConfig = { experimental: { appDir: true, @@ -11,11 +10,11 @@ const nextConfig = { }); // 针对 SVG 的处理规则 return config; - } + }, }; -if (process.env.DOCKER) { - nextConfig.output = 'standalone' +if (process.env.STANDALONE) { + nextConfig.output = "standalone"; } -module.exports = nextConfig; +export default nextConfig;