mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-17 06:23:44 +08:00
重构绘图相关
This commit is contained in:
@@ -3,6 +3,7 @@ import { getServerSideConfig } from "../config/server";
|
||||
import { DEFAULT_MODELS, OPENAI_BASE_URL, GEMINI_BASE_URL } from "../constant";
|
||||
import { collectModelTable } from "../utils/model";
|
||||
import { makeAzurePath } from "../azure";
|
||||
import { getIP } from "@/app/api/auth";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
@@ -130,3 +131,45 @@ export async function requestOpenai(
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
|
||||
export async function requestLog(
|
||||
req: NextRequest,
|
||||
jsonBody: any,
|
||||
url_path: string,
|
||||
) {
|
||||
// LOG
|
||||
try {
|
||||
if (url_path.startsWith("mj/") && !url_path.startsWith("mj/submit/")) {
|
||||
return;
|
||||
}
|
||||
// const protocol = req.headers.get("x-forwarded-proto") || "http";
|
||||
//const baseUrl = process.env.NEXTAUTH_URL ?? "http://localhost:3000";
|
||||
const baseUrl = "http://localhost:3000";
|
||||
const ip = getIP(req);
|
||||
// 对其进行 Base64 解码
|
||||
let h_userName = req.headers.get("x-request-name");
|
||||
if (h_userName) {
|
||||
const buffer = Buffer.from(h_userName, "base64");
|
||||
h_userName = decodeURIComponent(buffer.toString("utf-8"));
|
||||
}
|
||||
console.log("[中文]", h_userName, baseUrl);
|
||||
const logData = {
|
||||
ip: ip,
|
||||
path: url_path,
|
||||
// logEntry: JSON.stringify(jsonBody),
|
||||
model: url_path.startsWith("mj/") ? "midjourney" : jsonBody?.model, // 后面尝试请求是添加到参数
|
||||
userName: h_userName,
|
||||
};
|
||||
|
||||
await fetch(`${baseUrl}/api/logs/openai`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
// ...req.headers,
|
||||
},
|
||||
body: JSON.stringify(logData),
|
||||
});
|
||||
} catch (e) {
|
||||
console.log("[LOG]", e, "==========");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,13 +31,18 @@ async function handle(
|
||||
const regex_message = /(?<="content":")(.*?)(?="}[,\]])/g;
|
||||
const matchAllMessage = request_data.logEntry.match(regex_message);
|
||||
// console.log(matchAllMessage, "=====");
|
||||
if (matchAllMessage.length > 0) {
|
||||
if (matchAllMessage && matchAllMessage.length > 0) {
|
||||
request_data.logToken =
|
||||
getTokenLength(matchAllMessage.join(" ")) +
|
||||
matchAllMessage.length * 3;
|
||||
} else {
|
||||
request_data.logToken = 0;
|
||||
}
|
||||
delete request_data["logEntry"];
|
||||
}
|
||||
if (request_data?.model == "midjourney") {
|
||||
request_data.logToken = 1000;
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[LOG]", "logToken", e);
|
||||
request_data.logToken = 0;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { auth, getIP } from "@/app/api/auth";
|
||||
import { ModelProvider } from "@/app/constant";
|
||||
import { requestLog } from "@/app/api/common";
|
||||
|
||||
const BASE_URL = process.env.MIDJOURNEY_PROXY_URL ?? null;
|
||||
const MIDJOURNEY_PROXY_KEY = process.env.MIDJOURNEY_PROXY_KEY ?? null;
|
||||
@@ -53,6 +54,10 @@ async function handle(
|
||||
"",
|
||||
);
|
||||
|
||||
if (reqPath.startsWith("mj/submit/")) {
|
||||
await requestLog(req, jsonBody, reqPath);
|
||||
}
|
||||
|
||||
let fetchUrl = `${mjProxyUrl}/${reqPath}`;
|
||||
|
||||
console.log("[MJ Proxy] ", fetchUrl);
|
||||
|
||||
@@ -10,7 +10,7 @@ import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth, getIP } from "../../auth";
|
||||
import { getToken } from "next-auth/jwt";
|
||||
import { requestOpenai } from "../../common";
|
||||
import { requestLog, requestOpenai } from "../../common";
|
||||
import { headers } from "next/headers";
|
||||
|
||||
const ALLOWD_PATH = new Set(Object.values({ ...OpenaiPath, ...AZURE_PATH }));
|
||||
@@ -75,37 +75,7 @@ async function handle(
|
||||
jsonBody = {};
|
||||
}
|
||||
|
||||
try {
|
||||
const protocol = req.headers.get("x-forwarded-proto") || "http";
|
||||
//const baseUrl = process.env.NEXTAUTH_URL ?? "http://localhost:3000";
|
||||
const baseUrl = "http://localhost:3000";
|
||||
const ip = getIP(req);
|
||||
// 对其进行 Base64 解码
|
||||
let h_userName = req.headers.get("x-request-name");
|
||||
if (h_userName) {
|
||||
const buffer = Buffer.from(h_userName, "base64");
|
||||
h_userName = decodeURIComponent(buffer.toString("utf-8"));
|
||||
}
|
||||
console.log("[中文]", h_userName, baseUrl);
|
||||
const logData = {
|
||||
ip: ip,
|
||||
path: subpath,
|
||||
logEntry: JSON.stringify(jsonBody),
|
||||
model: jsonBody?.model,
|
||||
userName: h_userName,
|
||||
};
|
||||
|
||||
await fetch(`${baseUrl}/api/logs/openai`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
// ...req.headers,
|
||||
},
|
||||
body: JSON.stringify(logData),
|
||||
});
|
||||
} catch (e) {
|
||||
console.log("[LOG]", e, "==========");
|
||||
}
|
||||
await requestLog(req, jsonBody, subpath);
|
||||
|
||||
const isAzure = AZURE_MODELS.includes(jsonBody?.model as string);
|
||||
// console.log("[Models]", jsonBody?.model);
|
||||
|
||||
Reference in New Issue
Block a user