mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-27 11:09:22 +08:00
feat: support https_proxy for standalone mode
This commit is contained in:
@@ -14,4 +14,4 @@ export function getAccessCodes(): Set<string> {
|
||||
}
|
||||
|
||||
export const ACCESS_CODES = getAccessCodes();
|
||||
export const IS_IN_DOCKER = process.env.DOCKER;
|
||||
export const IS_STANDALONE = process.env.STANDALONE;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createParser } from "eventsource-parser";
|
||||
import { NextRequest } from "next/server";
|
||||
import { requestOpenai } from "../common";
|
||||
import { PageConfig } from "next/types";
|
||||
|
||||
async function createStream(req: NextRequest) {
|
||||
const encoder = new TextEncoder();
|
||||
@@ -56,6 +57,6 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
export const config = {
|
||||
runtime: "edge",
|
||||
export const config: PageConfig = {
|
||||
runtime: process.env.STANDALONE ? "nodejs" : "edge",
|
||||
};
|
||||
|
||||
@@ -4,6 +4,9 @@ const OPENAI_URL = "api.openai.com";
|
||||
const DEFAULT_PROTOCOL = "https";
|
||||
const PROTOCOL = process.env.PROTOCOL ?? DEFAULT_PROTOCOL;
|
||||
const BASE_URL = process.env.BASE_URL ?? OPENAI_URL;
|
||||
const STANDALONE = Boolean(process.env.STANDALONE);
|
||||
|
||||
let fetch: FetchLike = globalThis.fetch;
|
||||
|
||||
export async function requestOpenai(req: NextRequest) {
|
||||
const apiKey = req.headers.get("token");
|
||||
@@ -20,3 +23,64 @@ export async function requestOpenai(req: NextRequest) {
|
||||
body: req.body,
|
||||
});
|
||||
}
|
||||
|
||||
export type FetchLike = (
|
||||
url: string | Request,
|
||||
init?: RequestInit,
|
||||
) => Promise<Response>;
|
||||
|
||||
if (STANDALONE) {
|
||||
const proxy =
|
||||
process.env.HTTPS_PROXY ||
|
||||
process.env.https_proxy ||
|
||||
process.env.ALL_PROXY ||
|
||||
process.env.all_proxy;
|
||||
if (proxy) {
|
||||
console.log(`[HTTP Proxy] ${new URL(proxy).hostname}`);
|
||||
}
|
||||
|
||||
fetch = createFetchWithProxyByNextUndici({ proxy, fetch });
|
||||
}
|
||||
|
||||
export function createFetchWithProxyByNextUndici({
|
||||
proxy,
|
||||
fetch,
|
||||
}: {
|
||||
proxy?: string;
|
||||
fetch?: FetchLike;
|
||||
} = {}): FetchLike {
|
||||
if (!proxy) {
|
||||
return fetch || globalThis.fetch;
|
||||
}
|
||||
let agent: any;
|
||||
return async (...args) => {
|
||||
const init = (args[1] ||= {});
|
||||
if (init.body instanceof ReadableStream) {
|
||||
// https://github.com/nodejs/node/issues/46221
|
||||
(init as any).duplex ||= "half";
|
||||
}
|
||||
if (!agent) {
|
||||
let ProxyAgent;
|
||||
if ("ProxyAgent" in globalThis) {
|
||||
ProxyAgent = (globalThis as any).ProxyAgent;
|
||||
fetch ||= globalThis.fetch;
|
||||
} else {
|
||||
// @ts-ignore
|
||||
const undici = await import("next/dist/compiled/undici");
|
||||
ProxyAgent = undici.ProxyAgent;
|
||||
fetch ||= undici.fetch;
|
||||
}
|
||||
agent = new ProxyAgent(proxy);
|
||||
// https://github.com/nodejs/node/issues/43187#issuecomment-1134634174
|
||||
(global as any)[Symbol.for("undici.globalDispatcher.1")] = agent;
|
||||
}
|
||||
return fetch!(...args);
|
||||
};
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
declare module "next/dist/compiled/undici" {
|
||||
const fetch: FetchLike;
|
||||
const ProxyAgent: any;
|
||||
export { fetch, ProxyAgent };
|
||||
}
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { requestOpenai } from "../common";
|
||||
import { PageConfig } from "next/types";
|
||||
|
||||
export const config: PageConfig = {
|
||||
runtime: process.env.STANDALONE ? "nodejs" : "edge",
|
||||
};
|
||||
|
||||
async function makeRequest(req: NextRequest) {
|
||||
try {
|
||||
|
||||
Reference in New Issue
Block a user