This commit is contained in:
sijinhui
2023-12-16 23:05:14 +08:00
parent efdd61595e
commit b43c0b0109
91 changed files with 3399 additions and 12096 deletions

View File

@@ -3,12 +3,13 @@ import { getServerSideConfig } from "../config/server";
import md5 from "spark-md5";
import { ACCESS_CODE_PREFIX } from "../constant";
function getIP(req: NextRequest) {
let ip = req.ip ?? req.headers.get("x-real-ip");
export function getIP(req: NextRequest) {
let ip = req.headers.get("x-real-ip") ?? req.ip;
const forwardedFor = req.headers.get("x-forwarded-for");
if (!ip && forwardedFor) {
ip = forwardedFor.split(",").at(0) ?? "";
if (forwardedFor) {
ip = forwardedFor.split(",").at(0) ?? ip;
}
return ip;
@@ -24,7 +25,7 @@ function parseApiKey(bearToken: string) {
};
}
export function auth(req: NextRequest) {
export function auth(req: NextRequest, isAzure?: boolean) {
const authToken = req.headers.get("Authorization") ?? "";
// check if it is openai api key or user token
@@ -33,11 +34,11 @@ export function auth(req: NextRequest) {
const hashedCode = md5.hash(accessCode ?? "").trim();
const serverConfig = getServerSideConfig();
console.log("[Auth] allowed hashed codes: ", [...serverConfig.codes]);
console.log("[Auth] got access code:", accessCode);
console.log("[Auth] hashed access code:", hashedCode);
console.log("[User IP] ", getIP(req));
console.log("[Time] ", new Date().toLocaleString());
// console.log("[Auth] allowed hashed codes: ", [...serverConfig.codes]);
// console.log("[Auth] got access code:", accessCode);
// console.log("[Auth] hashed access code:", hashedCode);
// console.log("[User IP] ", getIP(req));
// console.log("[Time]", new Date().toLocaleString());
if (serverConfig.needCode && !serverConfig.codes.has(hashedCode) && !apiKey) {
return {
@@ -55,7 +56,7 @@ export function auth(req: NextRequest) {
// if user does not provide an api key, inject system api key
if (!apiKey) {
const serverApiKey = serverConfig.isAzure
const serverApiKey = isAzure
? serverConfig.azureApiKey
: serverConfig.apiKey;
@@ -63,7 +64,7 @@ export function auth(req: NextRequest) {
console.log("[Auth] use system api key");
req.headers.set(
"Authorization",
`${serverConfig.isAzure ? "" : "Bearer "}${serverApiKey}`,
`${isAzure ? "" : "Bearer "}${serverApiKey}`,
);
} else {
console.log("[Auth] admin did not provide an api key");

View File

@@ -0,0 +1,6 @@
import { authOptions } from "@/lib/auth";
import NextAuth from "next-auth";
const handler = NextAuth(authOptions);
export { handler as GET, handler as POST };

View File

@@ -6,19 +6,23 @@ import { makeAzurePath } from "../azure";
const serverConfig = getServerSideConfig();
export async function requestOpenai(req: NextRequest) {
export async function requestOpenai(
req: NextRequest,
cloneBody: any,
isAzure: boolean,
) {
const controller = new AbortController();
const authValue = req.headers.get("Authorization") ?? "";
const authHeaderName = serverConfig.isAzure ? "api-key" : "Authorization";
const authHeaderName = isAzure ? "api-key" : "Authorization";
let path = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
"/api/openai/",
"",
);
let baseUrl =
serverConfig.azureUrl || serverConfig.baseUrl || OPENAI_BASE_URL;
let baseUrl = isAzure
? serverConfig.azureUrl
: serverConfig.baseUrl || OPENAI_BASE_URL;
if (!baseUrl.startsWith("http")) {
baseUrl = `https://${baseUrl}`;
@@ -28,12 +32,12 @@ export async function requestOpenai(req: NextRequest) {
baseUrl = baseUrl.slice(0, -1);
}
console.log("[Proxy] ", path);
console.log("[Base Url]", baseUrl);
// this fix [Org ID] undefined in server side if not using custom point
if (serverConfig.openaiOrgId !== undefined) {
console.log("[Org ID]", serverConfig.openaiOrgId);
}
// console.log("[Proxy] ", path);
// console.log("[Base Url]", baseUrl);
// // this fix [Org ID] undefined in server side if not using custom point
// if (serverConfig.openaiOrgId !== undefined) {
// console.log("[Org ID]", serverConfig.openaiOrgId);
// }
const timeoutId = setTimeout(
() => {
@@ -42,16 +46,6 @@ export async function requestOpenai(req: NextRequest) {
10 * 60 * 1000,
);
if (serverConfig.isAzure) {
if (!serverConfig.azureApiVersion) {
return NextResponse.json({
error: true,
message: `missing AZURE_API_VERSION in server env vars`,
});
}
path = makeAzurePath(path, serverConfig.azureApiVersion);
}
const fetchUrl = `${baseUrl}/${path}`;
const fetchOptions: RequestInit = {
headers: {
@@ -63,7 +57,7 @@ export async function requestOpenai(req: NextRequest) {
}),
},
method: req.method,
body: req.body,
body: cloneBody,
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
redirect: "manual",
// @ts-ignore
@@ -72,19 +66,21 @@ export async function requestOpenai(req: NextRequest) {
};
// #1815 try to refuse gpt4 request
if (serverConfig.customModels && req.body) {
if (serverConfig.customModels && cloneBody) {
try {
const modelTable = collectModelTable(
DEFAULT_MODELS,
serverConfig.customModels,
);
const clonedBody = await req.text();
fetchOptions.body = clonedBody;
// const clonedBody = await req.text();
fetchOptions.body = cloneBody;
const jsonBody = JSON.parse(clonedBody) as { model?: string };
const jsonBody = JSON.parse(cloneBody) as {
model?: string;
};
// not undefined and is false
if (modelTable[jsonBody?.model ?? ""].available === false) {
if (!modelTable[jsonBody?.model ?? ""].available) {
return NextResponse.json(
{
error: true,

View File

@@ -0,0 +1,28 @@
import { NextRequest, NextResponse } from "next/server";
import prisma from "@/lib/prisma";
import { insertUser } from "@/lib/auth";
async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
try {
const request_data = await req.json();
if (request_data?.userName) {
await insertUser({ name: request_data?.userName });
}
// console.log("===========4", request_data);
await prisma.logEntry.create({
data: request_data,
});
} catch (e) {
return NextResponse.json({ status: 0 });
// console.log("[LOG]", e);
}
return NextResponse.json({ status: 1 });
}
export const GET = handle;
export const POST = handle;
// export const runtime = "edge";

View File

@@ -0,0 +1,93 @@
import { NextRequest, NextResponse } from "next/server";
import { auth } from "@/app/api/auth";
const BASE_URL = process.env.MIDJOURNEY_PROXY_URL ?? null;
const MIDJOURNEY_PROXY_KEY = process.env.MIDJOURNEY_PROXY_KEY ?? null;
async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
console.log("[Midjourney Route] params ", params);
const customMjProxyUrl = req.headers.get("midjourney-proxy-url");
let mjProxyUrl = BASE_URL;
if (
customMjProxyUrl &&
(customMjProxyUrl.startsWith("http://") ||
customMjProxyUrl.startsWith("https://"))
) {
mjProxyUrl = customMjProxyUrl;
}
if (!mjProxyUrl) {
return NextResponse.json(
{
error: true,
msg: "please set MIDJOURNEY_PROXY_URL in .env or set midjourney-proxy-url in config",
},
{
status: 500,
},
);
}
let cloneBody, jsonBody;
try {
cloneBody = (await req.text()) as any;
jsonBody = JSON.parse(cloneBody) as { model?: string };
} catch (e) {
jsonBody = {};
}
const authResult = auth(req);
// if (authResult.error) {
// return NextResponse.json(authResult, {
// status: 401,
// });
// }
const reqPath = `${req.nextUrl.pathname}${req.nextUrl.search}`.replaceAll(
"/api/midjourney/",
"",
);
let fetchUrl = `${mjProxyUrl}/${reqPath}`;
console.log("[MJ Proxy] ", fetchUrl);
const controller = new AbortController();
const timeoutId = setTimeout(() => {
controller.abort();
}, 15 * 1000);
const fetchOptions: RequestInit = {
//@ts-ignore
headers: {
"Content-Type": "application/json",
Authorization: MIDJOURNEY_PROXY_KEY,
// "mj-api-secret": API_SECRET,
},
cache: "no-store",
method: req.method,
body: cloneBody,
signal: controller.signal,
//@ts-ignore
// duplex: "half",
};
try {
const res = await fetch(fetchUrl, fetchOptions);
if (res.status !== 200) {
return new Response(res.body, {
status: res.status,
statusText: res.statusText,
});
}
return res;
} finally {
clearTimeout(timeoutId);
}
}
export const GET = handle;
export const POST = handle;

View File

@@ -1,12 +1,14 @@
import { type OpenAIListModelResponse } from "@/app/client/platforms/openai";
import { getServerSideConfig } from "@/app/config/server";
import { OpenaiPath } from "@/app/constant";
import { OpenaiPath, AZURE_PATH, AZURE_MODELS } from "@/app/constant";
import { prettyObject } from "@/app/utils/format";
import { NextRequest, NextResponse } from "next/server";
import { auth } from "../../auth";
import { auth, getIP } from "../../auth";
import { getToken } from "next-auth/jwt";
import { requestOpenai } from "../../common";
import { headers } from "next/headers";
const ALLOWD_PATH = new Set(Object.values(OpenaiPath));
const ALLOWD_PATH = new Set(Object.values({ ...OpenaiPath, ...AZURE_PATH }));
function getModels(remoteModelRes: OpenAIListModelResponse) {
const config = getServerSideConfig();
@@ -17,6 +19,15 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
);
}
console.log(remoteModelRes.data);
// 过滤不需要的模型
remoteModelRes.data = remoteModelRes.data.filter(
(m) =>
m.id === "gpt-4-0613" ||
m.id === "gpt-3.5-turbo-16k" ||
m.id === "gpt-4-32k",
);
return remoteModelRes;
}
@@ -24,7 +35,7 @@ async function handle(
req: NextRequest,
{ params }: { params: { path: string[] } },
) {
console.log("[OpenAI Route] params ", params);
// console.log("[OpenAI Route] params ", params);
if (req.method === "OPTIONS") {
return NextResponse.json({ body: "OK" }, { status: 200 });
@@ -44,16 +55,57 @@ async function handle(
},
);
}
let cloneBody, jsonBody;
const authResult = auth(req);
if (authResult.error) {
return NextResponse.json(authResult, {
status: 401,
});
try {
cloneBody = (await req.text()) as any;
jsonBody = JSON.parse(cloneBody) as { model?: string };
} catch (e) {
jsonBody = {};
}
try {
const response = await requestOpenai(req);
const protocol = req.headers.get("x-forwarded-proto") || "http";
const baseUrl = process.env.NEXTAUTH_URL ?? "http://localhost:3000";
const ip = getIP(req);
// 对其进行 Base64 解码
let h_userName = req.headers.get("x-request-name");
if (h_userName) {
const buffer = Buffer.from(h_userName, "base64");
h_userName = decodeURIComponent(buffer.toString("utf-8"));
}
console.log("[中文]", h_userName, baseUrl);
const logData = {
ip: ip,
path: subpath,
logEntry: JSON.stringify(jsonBody),
model: jsonBody?.model,
userName: h_userName,
};
await fetch(`${baseUrl}/api/logs/openai`, {
method: "POST",
headers: {
"Content-Type": "application/json",
// ...req.headers,
},
body: JSON.stringify(logData),
});
} catch (e) {
console.log("[LOG]", e, "==========");
}
const isAzure = AZURE_MODELS.includes(jsonBody?.model as string);
// console.log("[Models]", jsonBody?.model);
const authResult = auth(req, isAzure);
// if (authResult.error) {
// return NextResponse.json(authResult, {
// status: 401,
// });
// }
try {
const response = await requestOpenai(req, cloneBody, isAzure);
// list models
if (subpath === OpenaiPath.ListModelPath && response.status === 200) {
@@ -75,4 +127,22 @@ export const GET = handle;
export const POST = handle;
export const runtime = "edge";
export const preferredRegion = ['arn1', 'bom1', 'cdg1', 'cle1', 'cpt1', 'dub1', 'fra1', 'gru1', 'hnd1', 'iad1', 'icn1', 'kix1', 'lhr1', 'pdx1', 'sfo1', 'sin1', 'syd1'];
export const preferredRegion = [
"arn1",
"bom1",
"cdg1",
"cle1",
"cpt1",
"dub1",
"fra1",
"gru1",
"hnd1",
"iad1",
"icn1",
"kix1",
"lhr1",
"pdx1",
"sfo1",
"sin1",
"syd1",
];