mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-13 20:53:45 +08:00
Merge remote-tracking branch 'upstream/main'
This commit is contained in:
155
app/api/alibaba/[...path]/route.ts
Normal file
155
app/api/alibaba/[...path]/route.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
Alibaba,
|
||||
ALIBABA_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import type { RequestPayload } from "@/app/client/platforms/openai";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[Alibaba Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.Qwen);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[Alibaba] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "edge";
|
||||
export const preferredRegion = [
|
||||
"arn1",
|
||||
"bom1",
|
||||
"cdg1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"dub1",
|
||||
"fra1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"lhr1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
// alibaba use base url or just remove the path
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Alibaba, "");
|
||||
|
||||
let baseUrl = serverConfig.alibabaUrl || ALIBABA_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: req.headers.get("Authorization") ?? "",
|
||||
"X-DashScope-SSE": req.headers.get("X-DashScope-SSE") ?? "disable",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Alibaba as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Alibaba] filter`, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
@@ -4,12 +4,14 @@ import {
|
||||
Anthropic,
|
||||
ApiPath,
|
||||
DEFAULT_MODELS,
|
||||
ServiceProvider,
|
||||
ModelProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "../../auth";
|
||||
import { collectModelTable } from "@/app/utils/model";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
|
||||
const ALLOWD_PATH = new Set([Anthropic.ChatPath, Anthropic.ChatPath1]);
|
||||
|
||||
@@ -113,7 +115,8 @@ async function request(req: NextRequest) {
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
// try rebuild url, when using cloudflare ai gateway in server
|
||||
const fetchUrl = cloudflareAIGatewayUrl(`${baseUrl}${path}`);
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
@@ -136,17 +139,19 @@ async function request(req: NextRequest) {
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const modelTable = collectModelTable(
|
||||
DEFAULT_MODELS,
|
||||
serverConfig.customModels,
|
||||
);
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (modelTable[jsonBody?.model ?? ""].available === false) {
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Anthropic as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
@@ -161,17 +166,17 @@ async function request(req: NextRequest) {
|
||||
console.error(`[Anthropic] filter`, e);
|
||||
}
|
||||
}
|
||||
console.log("[Anthropic request]", fetchOptions.headers, req.method);
|
||||
// console.log("[Anthropic request]", fetchOptions.headers, req.method);
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
console.log(
|
||||
"[Anthropic response]",
|
||||
res.status,
|
||||
" ",
|
||||
res.headers,
|
||||
res.url,
|
||||
);
|
||||
// console.log(
|
||||
// "[Anthropic response]",
|
||||
// res.status,
|
||||
// " ",
|
||||
// res.headers,
|
||||
// res.url,
|
||||
// );
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
|
||||
@@ -78,9 +78,18 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
|
||||
case ModelProvider.Claude:
|
||||
systemApiKey = serverConfig.anthropicApiKey;
|
||||
break;
|
||||
case ModelProvider.Doubao:
|
||||
systemApiKey = serverConfig.bytedanceApiKey;
|
||||
break;
|
||||
case ModelProvider.Ernie:
|
||||
systemApiKey = serverConfig.baiduApiKey;
|
||||
break;
|
||||
case ModelProvider.Qwen:
|
||||
systemApiKey = serverConfig.alibabaApiKey;
|
||||
break;
|
||||
case ModelProvider.GPT:
|
||||
default:
|
||||
if (serverConfig.isAzure) {
|
||||
if (req.nextUrl.pathname.includes("azure/deployments")) {
|
||||
systemApiKey = serverConfig.azureApiKey;
|
||||
} else {
|
||||
systemApiKey = serverConfig.apiKey;
|
||||
|
||||
57
app/api/azure/[...path]/route.ts
Normal file
57
app/api/azure/[...path]/route.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import { ModelProvider } from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "../../auth";
|
||||
import { requestOpenai } from "../../common";
|
||||
|
||||
async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[Azure Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const subpath = params.path.join("/");
|
||||
|
||||
const authResult = auth(req, ModelProvider.GPT);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
return await requestOpenai(req);
|
||||
} catch (e) {
|
||||
console.error("[Azure] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "edge";
|
||||
export const preferredRegion = [
|
||||
"arn1",
|
||||
"bom1",
|
||||
"cdg1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"dub1",
|
||||
"fra1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"lhr1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
||||
169
app/api/baidu/[...path]/route.ts
Normal file
169
app/api/baidu/[...path]/route.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
BAIDU_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
BAIDU_OATUH_URL,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
import { getAccessToken } from "@/app/utils/baidu";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[Baidu Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.Ernie);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
if (!serverConfig.baiduApiKey || !serverConfig.baiduSecretKey) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `missing BAIDU_API_KEY or BAIDU_SECRET_KEY in server env vars`,
|
||||
},
|
||||
{
|
||||
status: 401,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[Baidu] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "edge";
|
||||
export const preferredRegion = [
|
||||
"arn1",
|
||||
"bom1",
|
||||
"cdg1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"dub1",
|
||||
"fra1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"lhr1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Baidu, "");
|
||||
|
||||
let baseUrl = serverConfig.baiduUrl || BAIDU_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const { access_token } = await getAccessToken(
|
||||
serverConfig.baiduApiKey as string,
|
||||
serverConfig.baiduSecretKey as string,
|
||||
);
|
||||
const fetchUrl = `${baseUrl}${path}?access_token=${access_token}`;
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Baidu as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[Baidu] filter`, e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
153
app/api/bytedance/[...path]/route.ts
Normal file
153
app/api/bytedance/[...path]/route.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import {
|
||||
BYTEDANCE_BASE_URL,
|
||||
ApiPath,
|
||||
ModelProvider,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "@/app/api/auth";
|
||||
import { isModelAvailableInServer } from "@/app/utils/model";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
async function handle(
|
||||
req: NextRequest,
|
||||
{ params }: { params: { path: string[] } },
|
||||
) {
|
||||
console.log("[ByteDance Route] params ", params);
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const authResult = auth(req, ModelProvider.Doubao);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
status: 401,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await request(req);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[ByteDance] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "edge";
|
||||
export const preferredRegion = [
|
||||
"arn1",
|
||||
"bom1",
|
||||
"cdg1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"dub1",
|
||||
"fra1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"lhr1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
||||
|
||||
async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.ByteDance, "");
|
||||
|
||||
let baseUrl = serverConfig.bytedanceUrl || BYTEDANCE_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: req.headers.get("Authorization") ?? "",
|
||||
},
|
||||
method: req.method,
|
||||
body: req.body,
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
signal: controller.signal,
|
||||
};
|
||||
|
||||
// #1815 try to refuse some request to some models
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.ByteDance as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
message: `you are not allowed to use ${jsonBody?.model} model`,
|
||||
},
|
||||
{
|
||||
status: 403,
|
||||
},
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(`[ByteDance] filter`, e);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetch(fetchUrl, fetchOptions);
|
||||
|
||||
// to prevent browser prompt for credentials
|
||||
const newHeaders = new Headers(res.headers);
|
||||
newHeaders.delete("www-authenticate");
|
||||
// to disable nginx buffering
|
||||
newHeaders.set("X-Accel-Buffering", "no");
|
||||
|
||||
return new Response(res.body, {
|
||||
status: res.status,
|
||||
statusText: res.statusText,
|
||||
headers: newHeaders,
|
||||
});
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
@@ -1,17 +1,24 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { getServerSideConfig } from "../config/server";
|
||||
import { DEFAULT_MODELS, OPENAI_BASE_URL } from "../constant";
|
||||
import { collectModelTable } from "../utils/model";
|
||||
import { makeAzurePath } from "../azure";
|
||||
import {
|
||||
DEFAULT_MODELS,
|
||||
OPENAI_BASE_URL,
|
||||
GEMINI_BASE_URL,
|
||||
ServiceProvider,
|
||||
} from "../constant";
|
||||
import { isModelAvailableInServer } from "../utils/model";
|
||||
import { cloudflareAIGatewayUrl } from "../utils/cloudflare";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
export async function requestOpenai(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
const isAzure = req.nextUrl.pathname.includes("azure/deployments");
|
||||
|
||||
var authValue,
|
||||
authHeaderName = "";
|
||||
if (serverConfig.isAzure) {
|
||||
if (isAzure) {
|
||||
authValue =
|
||||
req.headers
|
||||
.get("Authorization")
|
||||
@@ -31,7 +38,7 @@ export async function requestOpenai(req: NextRequest) {
|
||||
);
|
||||
|
||||
let baseUrl =
|
||||
serverConfig.azureUrl || serverConfig.baseUrl || OPENAI_BASE_URL;
|
||||
(isAzure ? serverConfig.azureUrl : serverConfig.baseUrl) || OPENAI_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
@@ -51,36 +58,49 @@ export async function requestOpenai(req: NextRequest) {
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
if (serverConfig.isAzure) {
|
||||
if (!serverConfig.azureApiVersion) {
|
||||
return NextResponse.json({
|
||||
error: true,
|
||||
message: `missing AZURE_API_VERSION in server env vars`,
|
||||
});
|
||||
if (isAzure) {
|
||||
const azureApiVersion =
|
||||
req?.nextUrl?.searchParams?.get("api-version") ||
|
||||
serverConfig.azureApiVersion;
|
||||
baseUrl = baseUrl.split("/deployments").shift() as string;
|
||||
path = `${req.nextUrl.pathname.replaceAll(
|
||||
"/api/azure/",
|
||||
"",
|
||||
)}?api-version=${azureApiVersion}`;
|
||||
|
||||
// Forward compatibility:
|
||||
// if display_name(deployment_name) not set, and '{deploy-id}' in AZURE_URL
|
||||
// then using default '{deploy-id}'
|
||||
if (serverConfig.customModels && serverConfig.azureUrl) {
|
||||
const modelName = path.split("/")[1];
|
||||
let realDeployName = "";
|
||||
serverConfig.customModels
|
||||
.split(",")
|
||||
.filter((v) => !!v && !v.startsWith("-") && v.includes(modelName))
|
||||
.forEach((m) => {
|
||||
const [fullName, displayName] = m.split("=");
|
||||
const [_, providerName] = fullName.split("@");
|
||||
if (providerName === "azure" && !displayName) {
|
||||
const [_, deployId] = (serverConfig?.azureUrl ?? "").split(
|
||||
"deployments/",
|
||||
);
|
||||
if (deployId) {
|
||||
realDeployName = deployId;
|
||||
}
|
||||
}
|
||||
});
|
||||
if (realDeployName) {
|
||||
console.log("[Replace with DeployId", realDeployName);
|
||||
path = path.replaceAll(modelName, realDeployName);
|
||||
}
|
||||
}
|
||||
path = makeAzurePath(path, serverConfig.azureApiVersion);
|
||||
}
|
||||
|
||||
let jsonBody;
|
||||
let clonedBody;
|
||||
const contentType = req.headers.get("Content-Type");
|
||||
if (
|
||||
req.method !== "GET" &&
|
||||
req.method !== "HEAD" &&
|
||||
contentType?.includes("json")
|
||||
) {
|
||||
clonedBody = await req.text();
|
||||
jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
} else {
|
||||
clonedBody = req.body;
|
||||
}
|
||||
if (serverConfig.isAzure) {
|
||||
baseUrl = `${baseUrl}/${jsonBody?.model}`;
|
||||
}
|
||||
const fetchUrl = `${baseUrl}/${path}`;
|
||||
const fetchUrl = cloudflareAIGatewayUrl(`${baseUrl}/${path}`);
|
||||
console.log("fetchUrl", fetchUrl);
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": contentType ?? "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"Cache-Control": "no-store",
|
||||
[authHeaderName]: authValue,
|
||||
...(serverConfig.openaiOrgId && {
|
||||
@@ -88,7 +108,7 @@ export async function requestOpenai(req: NextRequest) {
|
||||
}),
|
||||
},
|
||||
method: req.method,
|
||||
body: clonedBody,
|
||||
body: req.body,
|
||||
// to fix #2485: https://stackoverflow.com/questions/55920957/cloudflare-worker-typeerror-one-time-use-body
|
||||
redirect: "manual",
|
||||
// @ts-ignore
|
||||
@@ -97,18 +117,26 @@ export async function requestOpenai(req: NextRequest) {
|
||||
};
|
||||
|
||||
// #1815 try to refuse gpt4 request
|
||||
if (serverConfig.customModels && clonedBody) {
|
||||
if (serverConfig.customModels && req.body) {
|
||||
try {
|
||||
const modelTable = collectModelTable(
|
||||
DEFAULT_MODELS,
|
||||
serverConfig.customModels,
|
||||
);
|
||||
// const clonedBody = await req.text();
|
||||
// const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
const clonedBody = await req.text();
|
||||
fetchOptions.body = clonedBody;
|
||||
|
||||
const jsonBody = JSON.parse(clonedBody) as { model?: string };
|
||||
|
||||
// not undefined and is false
|
||||
if (modelTable[jsonBody?.model ?? ""]?.available === false) {
|
||||
if (
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.OpenAI as string,
|
||||
) ||
|
||||
isModelAvailableInServer(
|
||||
serverConfig.customModels,
|
||||
jsonBody?.model as string,
|
||||
ServiceProvider.Azure as string,
|
||||
)
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { auth } from "../../auth";
|
||||
import { getServerSideConfig } from "@/app/config/server";
|
||||
import { GEMINI_BASE_URL, ModelProvider } from "@/app/constant";
|
||||
import {
|
||||
ApiPath,
|
||||
GEMINI_BASE_URL,
|
||||
Google,
|
||||
ModelProvider,
|
||||
} from "@/app/constant";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
async function handle(
|
||||
req: NextRequest,
|
||||
@@ -13,32 +21,6 @@ async function handle(
|
||||
return NextResponse.json({ body: "OK" }, { status: 200 });
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
|
||||
const serverConfig = getServerSideConfig();
|
||||
|
||||
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll("/api/google/", "");
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
|
||||
const authResult = auth(req, ModelProvider.GeminiPro);
|
||||
if (authResult.error) {
|
||||
return NextResponse.json(authResult, {
|
||||
@@ -49,9 +31,9 @@ async function handle(
|
||||
const bearToken = req.headers.get("x-goog-api-key") ?? "";
|
||||
const token = bearToken.trim().replaceAll("Bearer ", "").trim();
|
||||
|
||||
const key = token ? token : serverConfig.googleApiKey;
|
||||
const apiKey = token ? token : serverConfig.googleApiKey;
|
||||
|
||||
if (!key) {
|
||||
if (!apiKey) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: true,
|
||||
@@ -62,8 +44,63 @@ async function handle(
|
||||
},
|
||||
);
|
||||
}
|
||||
try {
|
||||
const response = await request(req, apiKey);
|
||||
return response;
|
||||
} catch (e) {
|
||||
console.error("[Google] ", e);
|
||||
return NextResponse.json(prettyObject(e));
|
||||
}
|
||||
}
|
||||
|
||||
const fetchUrl = `${baseUrl}/${path}?key=${key}`;
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "edge";
|
||||
export const preferredRegion = [
|
||||
"bom1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
||||
|
||||
async function request(req: NextRequest, apiKey: string) {
|
||||
const controller = new AbortController();
|
||||
|
||||
let baseUrl = serverConfig.googleUrl || GEMINI_BASE_URL;
|
||||
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Google, "");
|
||||
|
||||
if (!baseUrl.startsWith("http")) {
|
||||
baseUrl = `https://${baseUrl}`;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, -1);
|
||||
}
|
||||
|
||||
console.log("[Proxy] ", path);
|
||||
console.log("[Base Url]", baseUrl);
|
||||
|
||||
const timeoutId = setTimeout(
|
||||
() => {
|
||||
controller.abort();
|
||||
},
|
||||
10 * 60 * 1000,
|
||||
);
|
||||
const fetchUrl = `${baseUrl}${path}?key=${apiKey}${
|
||||
req?.nextUrl?.searchParams?.get("alt") === "sse" ? "&alt=sse" : ""
|
||||
}`;
|
||||
|
||||
console.log("[Fetch Url] ", fetchUrl);
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
@@ -95,22 +132,3 @@ async function handle(
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
|
||||
export const GET = handle;
|
||||
export const POST = handle;
|
||||
|
||||
export const runtime = "edge";
|
||||
export const preferredRegion = [
|
||||
"bom1",
|
||||
"cle1",
|
||||
"cpt1",
|
||||
"gru1",
|
||||
"hnd1",
|
||||
"iad1",
|
||||
"icn1",
|
||||
"kix1",
|
||||
"pdx1",
|
||||
"sfo1",
|
||||
"sin1",
|
||||
"syd1",
|
||||
];
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
export function makeAzurePath(path: string, apiVersion: string) {
|
||||
// should omit /v1 prefix
|
||||
path = path.replaceAll("v1/", "");
|
||||
|
||||
// should add api-key to query string
|
||||
path += `${path.includes("?") ? "&" : "?"}api-version=${apiVersion}`;
|
||||
|
||||
return path;
|
||||
}
|
||||
@@ -10,6 +10,10 @@ import { ChatGPTApi } from "./platforms/openai";
|
||||
import { FileApi, FileInfo } from "./platforms/utils";
|
||||
import { GeminiProApi } from "./platforms/google";
|
||||
import { ClaudeApi } from "./platforms/anthropic";
|
||||
import { ErnieApi } from "./platforms/baidu";
|
||||
import { DoubaoApi } from "./platforms/bytedance";
|
||||
import { QwenApi } from "./platforms/alibaba";
|
||||
|
||||
export const ROLES = ["system", "user", "assistant"] as const;
|
||||
export type MessageRole = (typeof ROLES)[number];
|
||||
|
||||
@@ -33,6 +37,7 @@ export interface RequestMessage {
|
||||
|
||||
export interface LLMConfig {
|
||||
model: string;
|
||||
providerName?: string;
|
||||
temperature?: number;
|
||||
top_p?: number;
|
||||
stream?: boolean;
|
||||
@@ -101,6 +106,7 @@ export interface LLMUsage {
|
||||
|
||||
export interface LLMModel {
|
||||
name: string;
|
||||
displayName?: string;
|
||||
available: boolean;
|
||||
provider: LLMModelProvider;
|
||||
}
|
||||
@@ -160,6 +166,15 @@ export class ClientApi {
|
||||
case ModelProvider.Claude:
|
||||
this.llm = new ClaudeApi();
|
||||
break;
|
||||
case ModelProvider.Ernie:
|
||||
this.llm = new ErnieApi();
|
||||
break;
|
||||
case ModelProvider.Doubao:
|
||||
this.llm = new DoubaoApi();
|
||||
break;
|
||||
case ModelProvider.Qwen:
|
||||
this.llm = new QwenApi();
|
||||
break;
|
||||
default:
|
||||
this.llm = new ChatGPTApi();
|
||||
}
|
||||
@@ -214,43 +229,100 @@ export class ClientApi {
|
||||
|
||||
export function getHeaders(ignoreHeaders?: boolean) {
|
||||
const accessStore = useAccessStore.getState();
|
||||
let headers: Record<string, string> = {};
|
||||
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
|
||||
const isGoogle =
|
||||
modelConfig.model.startsWith("gemini") &&
|
||||
!accessStore.isUseOpenAIEndpointForAllModels;
|
||||
if (!ignoreHeaders && !isGoogle) {
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-requested-with": "XMLHttpRequest",
|
||||
Accept: "application/json",
|
||||
const chatStore = useChatStore.getState();
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
};
|
||||
|
||||
const clientConfig = getClientConfig();
|
||||
|
||||
function getConfig() {
|
||||
const modelConfig = chatStore.currentSession().mask.modelConfig;
|
||||
const isGoogle = modelConfig.providerName == ServiceProvider.Google;
|
||||
const isAzure = modelConfig.providerName === ServiceProvider.Azure;
|
||||
const isAnthropic = modelConfig.providerName === ServiceProvider.Anthropic;
|
||||
const isBaidu = modelConfig.providerName == ServiceProvider.Baidu;
|
||||
const isByteDance = modelConfig.providerName === ServiceProvider.ByteDance;
|
||||
const isAlibaba = modelConfig.providerName === ServiceProvider.Alibaba;
|
||||
const isEnabledAccessControl = accessStore.enabledAccessControl();
|
||||
const apiKey = isGoogle
|
||||
? accessStore.googleApiKey
|
||||
: isAzure
|
||||
? accessStore.azureApiKey
|
||||
: isAnthropic
|
||||
? accessStore.anthropicApiKey
|
||||
: isByteDance
|
||||
? accessStore.bytedanceApiKey
|
||||
: isAlibaba
|
||||
? accessStore.alibabaApiKey
|
||||
: accessStore.openaiApiKey;
|
||||
return {
|
||||
isGoogle,
|
||||
isAzure,
|
||||
isAnthropic,
|
||||
isBaidu,
|
||||
isByteDance,
|
||||
isAlibaba,
|
||||
apiKey,
|
||||
isEnabledAccessControl,
|
||||
};
|
||||
}
|
||||
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
||||
let authHeader = "Authorization";
|
||||
const apiKey = isGoogle
|
||||
? accessStore.googleApiKey
|
||||
: isAzure
|
||||
? accessStore.azureApiKey
|
||||
: accessStore.openaiApiKey;
|
||||
|
||||
const makeBearer = (s: string) =>
|
||||
`${isGoogle || isAzure ? "" : "Bearer "}${s.trim()}`;
|
||||
const validString = (x: string) => x && x.length > 0;
|
||||
function getAuthHeader(): string {
|
||||
return isAzure ? "api-key" : isAnthropic ? "x-api-key" : "Authorization";
|
||||
}
|
||||
|
||||
// use user's api key first
|
||||
if (validString(apiKey)) {
|
||||
authHeader = isGoogle ? "x-goog-api-key" : authHeader;
|
||||
headers[authHeader] = makeBearer(apiKey);
|
||||
if (isAzure) headers["api-key"] = makeBearer(apiKey);
|
||||
} else if (
|
||||
accessStore.enabledAccessControl() &&
|
||||
validString(accessStore.accessCode)
|
||||
) {
|
||||
headers[authHeader] = makeBearer(
|
||||
function getBearerToken(apiKey: string, noBearer: boolean = false): string {
|
||||
return validString(apiKey)
|
||||
? `${noBearer ? "" : "Bearer "}${apiKey.trim()}`
|
||||
: "";
|
||||
}
|
||||
|
||||
function validString(x: string): boolean {
|
||||
return x?.length > 0;
|
||||
}
|
||||
const {
|
||||
isGoogle,
|
||||
isAzure,
|
||||
isAnthropic,
|
||||
isBaidu,
|
||||
apiKey,
|
||||
isEnabledAccessControl,
|
||||
} = getConfig();
|
||||
// when using google api in app, not set auth header
|
||||
if (isGoogle && clientConfig?.isApp) return headers;
|
||||
// when using baidu api in app, not set auth header
|
||||
if (isBaidu && clientConfig?.isApp) return headers;
|
||||
|
||||
const authHeader = getAuthHeader();
|
||||
|
||||
const bearerToken = getBearerToken(apiKey, isAzure || isAnthropic);
|
||||
|
||||
if (bearerToken) {
|
||||
headers[authHeader] = bearerToken;
|
||||
} else if (isEnabledAccessControl && validString(accessStore.accessCode)) {
|
||||
headers["Authorization"] = getBearerToken(
|
||||
ACCESS_CODE_PREFIX + accessStore.accessCode,
|
||||
);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
export function getClientApi(provider: ServiceProvider): ClientApi {
|
||||
switch (provider) {
|
||||
case ServiceProvider.Google:
|
||||
return new ClientApi(ModelProvider.GeminiPro);
|
||||
case ServiceProvider.Anthropic:
|
||||
return new ClientApi(ModelProvider.Claude);
|
||||
case ServiceProvider.Baidu:
|
||||
return new ClientApi(ModelProvider.Ernie);
|
||||
case ServiceProvider.ByteDance:
|
||||
return new ClientApi(ModelProvider.Doubao);
|
||||
case ServiceProvider.Alibaba:
|
||||
return new ClientApi(ModelProvider.Qwen);
|
||||
default:
|
||||
return new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
}
|
||||
|
||||
284
app/client/platforms/alibaba.ts
Normal file
284
app/client/platforms/alibaba.ts
Normal file
@@ -0,0 +1,284 @@
|
||||
"use client";
|
||||
import {
|
||||
ApiPath,
|
||||
Alibaba,
|
||||
ALIBABA_BASE_URL,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
|
||||
import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
CreateRAGStoreOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
MultimodalContent,
|
||||
SpeechOptions,
|
||||
TranscriptionOptions,
|
||||
} from "../api";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
data: Array<{
|
||||
id: string;
|
||||
object: string;
|
||||
root: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface RequestInput {
|
||||
messages: {
|
||||
role: "system" | "user" | "assistant";
|
||||
content: string | MultimodalContent[];
|
||||
}[];
|
||||
}
|
||||
interface RequestParam {
|
||||
result_format: string;
|
||||
incremental_output?: boolean;
|
||||
temperature: number;
|
||||
repetition_penalty?: number;
|
||||
top_p: number;
|
||||
max_tokens?: number;
|
||||
}
|
||||
interface RequestPayload {
|
||||
model: string;
|
||||
input: RequestInput;
|
||||
parameters: RequestParam;
|
||||
}
|
||||
|
||||
export class QwenApi implements LLMApi {
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
transcription(options: TranscriptionOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.alibabaUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
baseUrl = isApp ? ALIBABA_BASE_URL : ApiPath.Alibaba;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Alibaba)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res?.output?.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
content: getMessageTextContent(v),
|
||||
}));
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
},
|
||||
};
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const requestPayload: RequestPayload = {
|
||||
model: modelConfig.model,
|
||||
input: {
|
||||
messages,
|
||||
},
|
||||
parameters: {
|
||||
result_format: "message",
|
||||
incremental_output: shouldStream,
|
||||
temperature: modelConfig.temperature,
|
||||
// max_tokens: modelConfig.max_tokens,
|
||||
top_p: modelConfig.top_p === 1 ? 0.99 : modelConfig.top_p, // qwen top_p is should be < 1
|
||||
},
|
||||
};
|
||||
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(Alibaba.ChatPath);
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: {
|
||||
...getHeaders(),
|
||||
"X-DashScope-SSE": shouldStream ? "enable" : "disable",
|
||||
},
|
||||
};
|
||||
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
if (finished || controller.signal.aborted) {
|
||||
responseText += remainText;
|
||||
console.log("[Response Animation] finished");
|
||||
if (responseText?.length === 0) {
|
||||
options.onError?.(new Error("empty response from server"));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (remainText.length > 0) {
|
||||
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||
const fetchText = remainText.slice(0, fetchCount);
|
||||
responseText += fetchText;
|
||||
remainText = remainText.slice(fetchCount);
|
||||
options.onUpdate?.(responseText, fetchText);
|
||||
}
|
||||
|
||||
requestAnimationFrame(animateResponseText);
|
||||
}
|
||||
|
||||
// start animaion
|
||||
animateResponseText();
|
||||
|
||||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
}
|
||||
};
|
||||
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log(
|
||||
"[Alibaba] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
}
|
||||
|
||||
if (
|
||||
!res.ok ||
|
||||
!res.headers
|
||||
.get("content-type")
|
||||
?.startsWith(EventStreamContentType) ||
|
||||
res.status !== 200
|
||||
) {
|
||||
const responseTexts = [responseText];
|
||||
let extraInfo = await res.clone().text();
|
||||
try {
|
||||
const resJson = await res.clone().json();
|
||||
extraInfo = prettyObject(resJson);
|
||||
} catch {}
|
||||
|
||||
if (res.status === 401) {
|
||||
responseTexts.push(Locale.Error.Unauthorized);
|
||||
}
|
||||
|
||||
if (extraInfo) {
|
||||
responseTexts.push(extraInfo);
|
||||
}
|
||||
|
||||
responseText = responseTexts.join("\n\n");
|
||||
|
||||
return finish();
|
||||
}
|
||||
},
|
||||
onmessage(msg) {
|
||||
if (msg.data === "[DONE]" || finished) {
|
||||
return finish();
|
||||
}
|
||||
const text = msg.data;
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.output.choices as Array<{
|
||||
message: { content: string };
|
||||
}>;
|
||||
const delta = choices[0]?.message?.content;
|
||||
if (delta) {
|
||||
remainText += delta;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[Request] parse error", text, msg);
|
||||
}
|
||||
},
|
||||
onclose() {
|
||||
finish();
|
||||
},
|
||||
onerror(e) {
|
||||
options.onError?.(e);
|
||||
throw e;
|
||||
},
|
||||
openWhenHidden: true,
|
||||
});
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
export { Alibaba };
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
CreateRAGStoreOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
MultimodalContent,
|
||||
SpeechOptions,
|
||||
@@ -11,7 +12,6 @@ import {
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||
import { RequestMessage } from "@/app/typing";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
fetchEventSource,
|
||||
@@ -20,6 +20,8 @@ import {
|
||||
import Locale from "../../locales";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getMessageTextContent, isVisionModel } from "@/app/utils";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
|
||||
export type MultiBlockContent = {
|
||||
type: "image" | "text";
|
||||
@@ -112,7 +114,12 @@ export class ClaudeApi implements LLMApi {
|
||||
},
|
||||
};
|
||||
|
||||
const messages = [...options.messages];
|
||||
// try get base64image from local cache image_url
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
const content = await preProcessImageContent(v.content);
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
const keys = ["system", "user"];
|
||||
|
||||
@@ -210,11 +217,10 @@ export class ClaudeApi implements LLMApi {
|
||||
body: JSON.stringify(requestBody),
|
||||
signal: controller.signal,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
"x-api-key": accessStore.anthropicApiKey,
|
||||
...getHeaders(), // get common headers
|
||||
"anthropic-version": accessStore.anthropicApiVersion,
|
||||
Authorization: getAuthKey(accessStore.anthropicApiKey),
|
||||
// do not send `anthropicApiKey` in browser!!!
|
||||
// Authorization: getAuthKey(accessStore.anthropicApiKey),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -396,7 +402,8 @@ export class ClaudeApi implements LLMApi {
|
||||
|
||||
baseUrl = trimEnd(baseUrl, "/");
|
||||
|
||||
return `${baseUrl}/${path}`;
|
||||
// try rebuild url, when using cloudflare ai gateway in client
|
||||
return cloudflareAIGatewayUrl(`${baseUrl}/${path}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -409,27 +416,3 @@ function trimEnd(s: string, end = " ") {
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
function bearer(value: string) {
|
||||
return `Bearer ${value.trim()}`;
|
||||
}
|
||||
|
||||
function getAuthKey(apiKey = "") {
|
||||
const accessStore = useAccessStore.getState();
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
let authKey = "";
|
||||
|
||||
if (apiKey) {
|
||||
// use user's api key first
|
||||
authKey = bearer(apiKey);
|
||||
} else if (
|
||||
accessStore.enabledAccessControl() &&
|
||||
!isApp &&
|
||||
!!accessStore.accessCode
|
||||
) {
|
||||
// or use access code
|
||||
authKey = bearer(ACCESS_CODE_PREFIX + accessStore.accessCode);
|
||||
}
|
||||
|
||||
return authKey;
|
||||
}
|
||||
|
||||
289
app/client/platforms/baidu.ts
Normal file
289
app/client/platforms/baidu.ts
Normal file
@@ -0,0 +1,289 @@
|
||||
"use client";
|
||||
import {
|
||||
ApiPath,
|
||||
Baidu,
|
||||
BAIDU_BASE_URL,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { getAccessToken } from "@/app/utils/baidu";
|
||||
|
||||
import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
CreateRAGStoreOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
MultimodalContent,
|
||||
SpeechOptions,
|
||||
TranscriptionOptions,
|
||||
} from "../api";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
data: Array<{
|
||||
id: string;
|
||||
object: string;
|
||||
root: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface RequestPayload {
|
||||
messages: {
|
||||
role: "system" | "user" | "assistant";
|
||||
content: string | MultimodalContent[];
|
||||
}[];
|
||||
stream?: boolean;
|
||||
model: string;
|
||||
temperature: number;
|
||||
presence_penalty: number;
|
||||
frequency_penalty: number;
|
||||
top_p: number;
|
||||
max_tokens?: number;
|
||||
}
|
||||
|
||||
export class ErnieApi implements LLMApi {
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
transcription(options: TranscriptionOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.baiduUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
// do not use proxy for baidubce api
|
||||
baseUrl = isApp ? BAIDU_BASE_URL : ApiPath.Baidu;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Baidu)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
content: getMessageTextContent(v),
|
||||
}));
|
||||
|
||||
// "error_code": 336006, "error_msg": "the length of messages must be an odd number",
|
||||
if (messages.length % 2 === 0) {
|
||||
messages.unshift({
|
||||
role: "user",
|
||||
content: " ",
|
||||
});
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
},
|
||||
};
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const requestPayload: RequestPayload = {
|
||||
messages,
|
||||
stream: shouldStream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
};
|
||||
|
||||
console.log("[Request] Baidu payload: ", requestPayload);
|
||||
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
let chatPath = this.path(Baidu.ChatPath(modelConfig.model));
|
||||
|
||||
// getAccessToken can not run in browser, because cors error
|
||||
if (!!getClientConfig()?.isApp) {
|
||||
const accessStore = useAccessStore.getState();
|
||||
if (accessStore.useCustomConfig) {
|
||||
if (accessStore.isValidBaidu()) {
|
||||
const { access_token } = await getAccessToken(
|
||||
accessStore.baiduApiKey,
|
||||
accessStore.baiduSecretKey,
|
||||
);
|
||||
chatPath = `${chatPath}${
|
||||
chatPath.includes("?") ? "&" : "?"
|
||||
}access_token=${access_token}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
if (finished || controller.signal.aborted) {
|
||||
responseText += remainText;
|
||||
console.log("[Response Animation] finished");
|
||||
if (responseText?.length === 0) {
|
||||
options.onError?.(new Error("empty response from server"));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (remainText.length > 0) {
|
||||
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||
const fetchText = remainText.slice(0, fetchCount);
|
||||
responseText += fetchText;
|
||||
remainText = remainText.slice(fetchCount);
|
||||
options.onUpdate?.(responseText, fetchText);
|
||||
}
|
||||
|
||||
requestAnimationFrame(animateResponseText);
|
||||
}
|
||||
|
||||
// start animaion
|
||||
animateResponseText();
|
||||
|
||||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
}
|
||||
};
|
||||
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log("[Baidu] request response content type: ", contentType);
|
||||
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
}
|
||||
|
||||
if (
|
||||
!res.ok ||
|
||||
!res.headers
|
||||
.get("content-type")
|
||||
?.startsWith(EventStreamContentType) ||
|
||||
res.status !== 200
|
||||
) {
|
||||
const responseTexts = [responseText];
|
||||
let extraInfo = await res.clone().text();
|
||||
try {
|
||||
const resJson = await res.clone().json();
|
||||
extraInfo = prettyObject(resJson);
|
||||
} catch {}
|
||||
|
||||
if (res.status === 401) {
|
||||
responseTexts.push(Locale.Error.Unauthorized);
|
||||
}
|
||||
|
||||
if (extraInfo) {
|
||||
responseTexts.push(extraInfo);
|
||||
}
|
||||
|
||||
responseText = responseTexts.join("\n\n");
|
||||
|
||||
return finish();
|
||||
}
|
||||
},
|
||||
onmessage(msg) {
|
||||
if (msg.data === "[DONE]" || finished) {
|
||||
return finish();
|
||||
}
|
||||
const text = msg.data;
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
const delta = json?.result;
|
||||
if (delta) {
|
||||
remainText += delta;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[Request] parse error", text, msg);
|
||||
}
|
||||
},
|
||||
onclose() {
|
||||
finish();
|
||||
},
|
||||
onerror(e) {
|
||||
options.onError?.(e);
|
||||
throw e;
|
||||
},
|
||||
openWhenHidden: true,
|
||||
});
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = resJson?.result;
|
||||
options.onFinish(message);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
export { Baidu };
|
||||
271
app/client/platforms/bytedance.ts
Normal file
271
app/client/platforms/bytedance.ts
Normal file
@@ -0,0 +1,271 @@
|
||||
"use client";
|
||||
import {
|
||||
ApiPath,
|
||||
ByteDance,
|
||||
BYTEDANCE_BASE_URL,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
|
||||
import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
CreateRAGStoreOptions,
|
||||
getHeaders,
|
||||
LLMApi,
|
||||
LLMModel,
|
||||
MultimodalContent,
|
||||
SpeechOptions,
|
||||
TranscriptionOptions,
|
||||
} from "../api";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { getMessageTextContent } from "@/app/utils";
|
||||
|
||||
export interface OpenAIListModelResponse {
|
||||
object: string;
|
||||
data: Array<{
|
||||
id: string;
|
||||
object: string;
|
||||
root: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface RequestPayload {
|
||||
messages: {
|
||||
role: "system" | "user" | "assistant";
|
||||
content: string | MultimodalContent[];
|
||||
}[];
|
||||
stream?: boolean;
|
||||
model: string;
|
||||
temperature: number;
|
||||
presence_penalty: number;
|
||||
frequency_penalty: number;
|
||||
top_p: number;
|
||||
max_tokens?: number;
|
||||
}
|
||||
|
||||
export class DoubaoApi implements LLMApi {
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
transcription(options: TranscriptionOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.bytedanceUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
baseUrl = isApp ? BYTEDANCE_BASE_URL : ApiPath.ByteDance;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ByteDance)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
return res.choices?.at(0)?.message?.content ?? "";
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
content: getMessageTextContent(v),
|
||||
}));
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
},
|
||||
};
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const requestPayload: RequestPayload = {
|
||||
messages,
|
||||
stream: shouldStream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
};
|
||||
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(ByteDance.ChatPath);
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
signal: controller.signal,
|
||||
headers: getHeaders(),
|
||||
};
|
||||
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
REQUEST_TIMEOUT_MS,
|
||||
);
|
||||
|
||||
if (shouldStream) {
|
||||
let responseText = "";
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
if (finished || controller.signal.aborted) {
|
||||
responseText += remainText;
|
||||
console.log("[Response Animation] finished");
|
||||
if (responseText?.length === 0) {
|
||||
options.onError?.(new Error("empty response from server"));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (remainText.length > 0) {
|
||||
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
|
||||
const fetchText = remainText.slice(0, fetchCount);
|
||||
responseText += fetchText;
|
||||
remainText = remainText.slice(fetchCount);
|
||||
options.onUpdate?.(responseText, fetchText);
|
||||
}
|
||||
|
||||
requestAnimationFrame(animateResponseText);
|
||||
}
|
||||
|
||||
// start animaion
|
||||
animateResponseText();
|
||||
|
||||
const finish = () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
}
|
||||
};
|
||||
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
fetchEventSource(chatPath, {
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log(
|
||||
"[ByteDance] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
}
|
||||
|
||||
if (
|
||||
!res.ok ||
|
||||
!res.headers
|
||||
.get("content-type")
|
||||
?.startsWith(EventStreamContentType) ||
|
||||
res.status !== 200
|
||||
) {
|
||||
const responseTexts = [responseText];
|
||||
let extraInfo = await res.clone().text();
|
||||
try {
|
||||
const resJson = await res.clone().json();
|
||||
extraInfo = prettyObject(resJson);
|
||||
} catch {}
|
||||
|
||||
if (res.status === 401) {
|
||||
responseTexts.push(Locale.Error.Unauthorized);
|
||||
}
|
||||
|
||||
if (extraInfo) {
|
||||
responseTexts.push(extraInfo);
|
||||
}
|
||||
|
||||
responseText = responseTexts.join("\n\n");
|
||||
|
||||
return finish();
|
||||
}
|
||||
},
|
||||
onmessage(msg) {
|
||||
if (msg.data === "[DONE]" || finished) {
|
||||
return finish();
|
||||
}
|
||||
const text = msg.data;
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.choices as Array<{
|
||||
delta: { content: string };
|
||||
}>;
|
||||
const delta = choices[0]?.delta?.content;
|
||||
if (delta) {
|
||||
remainText += delta;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[Request] parse error", text, msg);
|
||||
}
|
||||
},
|
||||
onclose() {
|
||||
finish();
|
||||
},
|
||||
onerror(e) {
|
||||
options.onError?.(e);
|
||||
throw e;
|
||||
},
|
||||
openWhenHidden: true,
|
||||
});
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
const message = this.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
}
|
||||
} catch (e) {
|
||||
console.log("[Request] failed to make a chat request", e);
|
||||
options.onError?.(e as Error);
|
||||
}
|
||||
}
|
||||
async usage() {
|
||||
return {
|
||||
used: 0,
|
||||
total: 0,
|
||||
};
|
||||
}
|
||||
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
export { ByteDance };
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import { ApiPath, Google, REQUEST_TIMEOUT_MS } from "@/app/constant";
|
||||
import {
|
||||
AgentChatOptions,
|
||||
ChatOptions,
|
||||
@@ -13,25 +13,61 @@ import {
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { DEFAULT_API_HOST } from "@/app/constant";
|
||||
import Locale from "../../locales";
|
||||
import {
|
||||
EventStreamContentType,
|
||||
fetchEventSource,
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
getMessageImages,
|
||||
isVisionModel,
|
||||
} from "@/app/utils";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
import options from "cheerio/lib/options";
|
||||
|
||||
export class GeminiProApi implements LLMApi {
|
||||
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
transcription(options: TranscriptionOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
speech(options: SpeechOptions): Promise<ArrayBuffer> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
toolAgentChat(options: AgentChatOptions): Promise<void> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
createRAGStore(options: CreateRAGStoreOptions): Promise<string> {
|
||||
throw new Error("Method not implemented.");
|
||||
}
|
||||
path(path: string): string {
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.googleUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
baseUrl = isApp
|
||||
? DEFAULT_API_HOST + `/api/proxy/google?key=${accessStore.googleApiKey}`
|
||||
: ApiPath.Google;
|
||||
}
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Google)) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
let chatPath = [baseUrl, path].join("/");
|
||||
|
||||
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
|
||||
return chatPath;
|
||||
}
|
||||
extractMessage(res: any) {
|
||||
console.log("[Response] gemini-pro response: ", res);
|
||||
|
||||
@@ -42,9 +78,16 @@ export class GeminiProApi implements LLMApi {
|
||||
);
|
||||
}
|
||||
async chat(options: ChatOptions): Promise<void> {
|
||||
// const apiClient = this;
|
||||
const apiClient = this;
|
||||
let multimodal = false;
|
||||
const messages = options.messages.map((v) => {
|
||||
|
||||
// try get base64image from local cache image_url
|
||||
const _messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
const content = await preProcessImageContent(v.content);
|
||||
_messages.push({ role: v.role, content });
|
||||
}
|
||||
const messages = _messages.map((v) => {
|
||||
let parts: any[] = [{ text: getMessageTextContent(v) }];
|
||||
if (isVisionModel(options.config.model)) {
|
||||
const images = getMessageImages(v);
|
||||
@@ -86,6 +129,9 @@ export class GeminiProApi implements LLMApi {
|
||||
// if (visionModel && messages.length > 1) {
|
||||
// options.onError?.(new Error("Multiturn chat is not enabled for models/gemini-pro-vision"));
|
||||
// }
|
||||
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
@@ -107,50 +153,30 @@ export class GeminiProApi implements LLMApi {
|
||||
safetySettings: [
|
||||
{
|
||||
category: "HARM_CATEGORY_HARASSMENT",
|
||||
threshold: "BLOCK_ONLY_HIGH",
|
||||
threshold: accessStore.googleSafetySettings,
|
||||
},
|
||||
{
|
||||
category: "HARM_CATEGORY_HATE_SPEECH",
|
||||
threshold: "BLOCK_ONLY_HIGH",
|
||||
threshold: accessStore.googleSafetySettings,
|
||||
},
|
||||
{
|
||||
category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
|
||||
threshold: "BLOCK_ONLY_HIGH",
|
||||
threshold: accessStore.googleSafetySettings,
|
||||
},
|
||||
{
|
||||
category: "HARM_CATEGORY_DANGEROUS_CONTENT",
|
||||
threshold: "BLOCK_ONLY_HIGH",
|
||||
threshold: accessStore.googleSafetySettings,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const accessStore = useAccessStore.getState();
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
if (accessStore.useCustomConfig) {
|
||||
baseUrl = accessStore.googleUrl;
|
||||
}
|
||||
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
|
||||
let shouldStream = !!options.config.stream;
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
try {
|
||||
// let baseUrl = accessStore.googleUrl;
|
||||
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
|
||||
const chatPath = this.path(Google.ChatPath(modelConfig.model));
|
||||
|
||||
if (!baseUrl) {
|
||||
baseUrl = isApp
|
||||
? DEFAULT_API_HOST +
|
||||
"/api/proxy/google/" +
|
||||
Google.ChatPath(modelConfig.model)
|
||||
: this.path(Google.ChatPath(modelConfig.model));
|
||||
}
|
||||
|
||||
if (isApp) {
|
||||
baseUrl += `?key=${accessStore.googleApiKey}`;
|
||||
}
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
@@ -169,10 +195,11 @@ export class GeminiProApi implements LLMApi {
|
||||
let remainText = "";
|
||||
let finished = false;
|
||||
|
||||
let existingTexts: string[] = [];
|
||||
const finish = () => {
|
||||
finished = true;
|
||||
options.onFinish(existingTexts.join(""));
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
options.onFinish(responseText + remainText);
|
||||
}
|
||||
};
|
||||
|
||||
// animate response to make it looks smooth
|
||||
@@ -197,75 +224,83 @@ export class GeminiProApi implements LLMApi {
|
||||
// start animaion
|
||||
animateResponseText();
|
||||
|
||||
fetch(
|
||||
baseUrl.replace("generateContent", "streamGenerateContent"),
|
||||
chatPayload,
|
||||
)
|
||||
.then((response) => {
|
||||
const reader = response?.body?.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let partialData = "";
|
||||
controller.signal.onabort = finish;
|
||||
|
||||
return reader?.read().then(function processText({
|
||||
done,
|
||||
value,
|
||||
}): Promise<any> {
|
||||
if (done) {
|
||||
if (response.status !== 200) {
|
||||
try {
|
||||
let data = JSON.parse(ensureProperEnding(partialData));
|
||||
if (data && data[0].error) {
|
||||
options.onError?.(new Error(data[0].error.message));
|
||||
} else {
|
||||
options.onError?.(new Error("Request failed"));
|
||||
}
|
||||
} catch (_) {
|
||||
options.onError?.(new Error("Request failed"));
|
||||
}
|
||||
}
|
||||
fetchEventSource(chatPath, {
|
||||
...chatPayload,
|
||||
async onopen(res) {
|
||||
clearTimeout(requestTimeoutId);
|
||||
const contentType = res.headers.get("content-type");
|
||||
console.log(
|
||||
"[Gemini] request response content type: ",
|
||||
contentType,
|
||||
);
|
||||
|
||||
console.log("Stream complete");
|
||||
// options.onFinish(responseText + remainText);
|
||||
finished = true;
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
partialData += decoder.decode(value, { stream: true });
|
||||
if (contentType?.startsWith("text/plain")) {
|
||||
responseText = await res.clone().text();
|
||||
return finish();
|
||||
}
|
||||
|
||||
if (
|
||||
!res.ok ||
|
||||
!res.headers
|
||||
.get("content-type")
|
||||
?.startsWith(EventStreamContentType) ||
|
||||
res.status !== 200
|
||||
) {
|
||||
const responseTexts = [responseText];
|
||||
let extraInfo = await res.clone().text();
|
||||
try {
|
||||
let data = JSON.parse(ensureProperEnding(partialData));
|
||||
const resJson = await res.clone().json();
|
||||
extraInfo = prettyObject(resJson);
|
||||
} catch {}
|
||||
|
||||
const textArray = data.reduce(
|
||||
(acc: string[], item: { candidates: any[] }) => {
|
||||
const texts = item.candidates.map((candidate) =>
|
||||
candidate.content.parts
|
||||
.map((part: { text: any }) => part.text)
|
||||
.join(""),
|
||||
);
|
||||
return acc.concat(texts);
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
if (textArray.length > existingTexts.length) {
|
||||
const deltaArray = textArray.slice(existingTexts.length);
|
||||
existingTexts = textArray;
|
||||
remainText += deltaArray.join("");
|
||||
}
|
||||
} catch (error) {
|
||||
// console.log("[Response Animation] error: ", error,partialData);
|
||||
// skip error message when parsing json
|
||||
if (res.status === 401) {
|
||||
responseTexts.push(Locale.Error.Unauthorized);
|
||||
}
|
||||
|
||||
return reader.read().then(processText);
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error:", error);
|
||||
options.onError?.(error as Error);
|
||||
});
|
||||
if (extraInfo) {
|
||||
responseTexts.push(extraInfo);
|
||||
}
|
||||
|
||||
responseText = responseTexts.join("\n\n");
|
||||
|
||||
return finish();
|
||||
}
|
||||
},
|
||||
onmessage(msg) {
|
||||
if (msg.data === "[DONE]" || finished) {
|
||||
return finish();
|
||||
}
|
||||
const text = msg.data;
|
||||
try {
|
||||
const json = JSON.parse(text);
|
||||
const delta = apiClient.extractMessage(json);
|
||||
|
||||
if (delta) {
|
||||
remainText += delta;
|
||||
}
|
||||
|
||||
const blockReason = json?.promptFeedback?.blockReason;
|
||||
if (blockReason) {
|
||||
// being blocked
|
||||
console.log(`[Google] [Safety Ratings] result:`, blockReason);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("[Request] parse error", text, msg);
|
||||
}
|
||||
},
|
||||
onclose() {
|
||||
finish();
|
||||
},
|
||||
onerror(e) {
|
||||
options.onError?.(e);
|
||||
throw e;
|
||||
},
|
||||
openWhenHidden: true,
|
||||
});
|
||||
} else {
|
||||
const res = await fetch(baseUrl, chatPayload);
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
clearTimeout(requestTimeoutId);
|
||||
|
||||
const resJson = await res.json();
|
||||
@@ -279,7 +314,7 @@ export class GeminiProApi implements LLMApi {
|
||||
),
|
||||
);
|
||||
}
|
||||
const message = this.extractMessage(resJson);
|
||||
const message = apiClient.extractMessage(resJson);
|
||||
options.onFinish(message);
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -293,14 +328,4 @@ export class GeminiProApi implements LLMApi {
|
||||
async models(): Promise<LLMModel[]> {
|
||||
return [];
|
||||
}
|
||||
path(path: string): string {
|
||||
return "/api/google/" + path;
|
||||
}
|
||||
}
|
||||
|
||||
function ensureProperEnding(str: string) {
|
||||
if (str.startsWith("[") && !str.endsWith("]")) {
|
||||
return str + "]";
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
"use client";
|
||||
// azure and openai, using same models. so using same LLMApi.
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
DEFAULT_MODELS,
|
||||
OpenaiPath,
|
||||
Azure,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
ServiceProvider,
|
||||
} from "@/app/constant";
|
||||
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
|
||||
import { collectModelsWithDefaultModel } from "@/app/utils/model";
|
||||
import { preProcessImageContent } from "@/app/utils/chat";
|
||||
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
|
||||
|
||||
import {
|
||||
AgentChatOptions,
|
||||
@@ -28,7 +33,6 @@ import {
|
||||
} from "@fortaine/fetch-event-source";
|
||||
import { prettyObject } from "@/app/utils/format";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import { makeAzurePath } from "@/app/azure";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
getMessageImages,
|
||||
@@ -44,7 +48,7 @@ export interface OpenAIListModelResponse {
|
||||
}>;
|
||||
}
|
||||
|
||||
interface RequestPayload {
|
||||
export interface RequestPayload {
|
||||
messages: {
|
||||
role: "system" | "user" | "assistant";
|
||||
content: string | MultimodalContent[];
|
||||
@@ -66,39 +70,38 @@ export class ChatGPTApi implements LLMApi {
|
||||
|
||||
let baseUrl = "";
|
||||
|
||||
const isAzure = path.includes("deployments");
|
||||
if (accessStore.useCustomConfig) {
|
||||
const isAzure = accessStore.provider === ServiceProvider.Azure;
|
||||
|
||||
if (isAzure && !accessStore.isValidAzure()) {
|
||||
throw Error(
|
||||
"incomplete azure config, please check it in your settings page",
|
||||
);
|
||||
}
|
||||
|
||||
if (isAzure) {
|
||||
path = makeAzurePath(path, accessStore.azureApiVersion);
|
||||
}
|
||||
|
||||
baseUrl = isAzure ? accessStore.azureUrl : accessStore.openaiUrl;
|
||||
}
|
||||
|
||||
if (baseUrl.length === 0) {
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
baseUrl = isApp
|
||||
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
|
||||
: ApiPath.OpenAI;
|
||||
const apiPath = isAzure ? ApiPath.Azure : ApiPath.OpenAI;
|
||||
baseUrl = isApp ? DEFAULT_API_HOST + "/proxy" + apiPath : apiPath;
|
||||
}
|
||||
|
||||
if (baseUrl.endsWith("/")) {
|
||||
baseUrl = baseUrl.slice(0, baseUrl.length - 1);
|
||||
}
|
||||
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.OpenAI)) {
|
||||
if (
|
||||
!baseUrl.startsWith("http") &&
|
||||
!isAzure &&
|
||||
!baseUrl.startsWith(ApiPath.OpenAI)
|
||||
) {
|
||||
baseUrl = "https://" + baseUrl;
|
||||
}
|
||||
|
||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||
|
||||
return [baseUrl, path].join("/");
|
||||
// try rebuild url, when using cloudflare ai gateway in client
|
||||
return cloudflareAIGatewayUrl([baseUrl, path].join("/"));
|
||||
}
|
||||
|
||||
extractMessage(res: any) {
|
||||
@@ -186,16 +189,20 @@ export class ChatGPTApi implements LLMApi {
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const visionModel = isVisionModel(options.config.model);
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
content: visionModel ? v.content : getMessageTextContent(v),
|
||||
}));
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
const content = visionModel
|
||||
? await preProcessImageContent(v.content)
|
||||
: getMessageTextContent(v);
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
...{
|
||||
model: options.config.model,
|
||||
providerName: options.config.providerName,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -223,7 +230,35 @@ export class ChatGPTApi implements LLMApi {
|
||||
options.onController?.(controller);
|
||||
|
||||
try {
|
||||
const chatPath = this.path(OpenaiPath.ChatPath, modelConfig.model);
|
||||
let chatPath = "";
|
||||
if (modelConfig.providerName === ServiceProvider.Azure) {
|
||||
// find model, and get displayName as deployName
|
||||
const { models: configModels, customModels: configCustomModels } =
|
||||
useAppConfig.getState();
|
||||
const {
|
||||
defaultModel,
|
||||
customModels: accessCustomModels,
|
||||
useCustomConfig,
|
||||
} = useAccessStore.getState();
|
||||
const models = collectModelsWithDefaultModel(
|
||||
configModels,
|
||||
[configCustomModels, accessCustomModels].join(","),
|
||||
defaultModel,
|
||||
);
|
||||
const model = models.find(
|
||||
(model) =>
|
||||
model.name === modelConfig.model &&
|
||||
model?.provider?.providerName === ServiceProvider.Azure,
|
||||
);
|
||||
chatPath = this.path(
|
||||
Azure.ChatPath(
|
||||
(model?.displayName ?? model?.name) as string,
|
||||
useCustomConfig ? useAccessStore.getState().azureApiVersion : "",
|
||||
),
|
||||
);
|
||||
} else {
|
||||
chatPath = this.path(OpenaiPath.ChatPath);
|
||||
}
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { getClientApi } from "@/app/utils";
|
||||
import { ClientApi, getHeaders } from "../api";
|
||||
import { ClientApi, getClientApi, getHeaders } from "../api";
|
||||
import { ChatSession } from "@/app/store";
|
||||
|
||||
export interface FileInfo {
|
||||
@@ -37,7 +36,7 @@ export class FileApi {
|
||||
|
||||
async uploadForRag(file: any, session: ChatSession): Promise<FileInfo> {
|
||||
var fileInfo = await this.upload(file);
|
||||
var api: ClientApi = getClientApi(session.mask.modelConfig.model);
|
||||
var api: ClientApi = getClientApi(session.mask.modelConfig.providerName);
|
||||
let partial = await api.llm.createRAGStore({
|
||||
chatSessionId: session.id,
|
||||
fileInfos: [fileInfo],
|
||||
|
||||
@@ -71,7 +71,7 @@ import {
|
||||
isSupportRAGModel,
|
||||
} from "../utils";
|
||||
|
||||
import { compressImage } from "@/app/utils/chat";
|
||||
import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
|
||||
|
||||
import dynamic from "next/dynamic";
|
||||
|
||||
@@ -102,6 +102,7 @@ import {
|
||||
Path,
|
||||
REQUEST_TIMEOUT_MS,
|
||||
UNFINISHED_INPUT,
|
||||
ServiceProvider,
|
||||
} from "../constant";
|
||||
import { Avatar } from "./emoji";
|
||||
import { ContextPrompts, MaskAvatar, MaskConfig } from "./mask";
|
||||
@@ -269,11 +270,11 @@ function useSubmitHandler() {
|
||||
};
|
||||
}
|
||||
|
||||
export type RenderPompt = Pick<Prompt, "title" | "content">;
|
||||
export type RenderPrompt = Pick<Prompt, "title" | "content">;
|
||||
|
||||
export function PromptHints(props: {
|
||||
prompts: RenderPompt[];
|
||||
onPromptSelect: (prompt: RenderPompt) => void;
|
||||
prompts: RenderPrompt[];
|
||||
onPromptSelect: (prompt: RenderPrompt) => void;
|
||||
}) {
|
||||
const noPrompts = props.prompts.length === 0;
|
||||
const [selectIndex, setSelectIndex] = useState(0);
|
||||
@@ -502,6 +503,9 @@ export function ChatActions(props: {
|
||||
|
||||
// switch model
|
||||
const currentModel = chatStore.currentSession().mask.modelConfig.model;
|
||||
const currentProviderName =
|
||||
chatStore.currentSession().mask.modelConfig?.providerName ||
|
||||
ServiceProvider.OpenAI;
|
||||
const allModels = useAllModels();
|
||||
const models = useMemo(() => {
|
||||
const filteredModels = allModels.filter((m) => m.available);
|
||||
@@ -517,6 +521,14 @@ export function ChatActions(props: {
|
||||
return filteredModels;
|
||||
}
|
||||
}, [allModels]);
|
||||
const currentModelName = useMemo(() => {
|
||||
const model = models.find(
|
||||
(m) =>
|
||||
m.name == currentModel &&
|
||||
m?.provider?.providerName == currentProviderName,
|
||||
);
|
||||
return model?.displayName ?? "";
|
||||
}, [models, currentModel, currentProviderName]);
|
||||
const [showModelSelector, setShowModelSelector] = useState(false);
|
||||
const [showUploadImage, setShowUploadImage] = useState(false);
|
||||
const [showUploadFile, setShowUploadFile] = useState(false);
|
||||
@@ -542,13 +554,17 @@ export function ChatActions(props: {
|
||||
const isUnavaliableModel = !models.some((m) => m.name === currentModel);
|
||||
if (isUnavaliableModel && models.length > 0) {
|
||||
// show next model to default model if exist
|
||||
let nextModel: ModelType = (
|
||||
models.find((model) => model.isDefault) || models[0]
|
||||
).name;
|
||||
chatStore.updateCurrentSession(
|
||||
(session) => (session.mask.modelConfig.model = nextModel),
|
||||
let nextModel = models.find((model) => model.isDefault) || models[0];
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
session.mask.modelConfig.model = nextModel.name;
|
||||
session.mask.modelConfig.providerName = nextModel?.provider
|
||||
?.providerName as ServiceProvider;
|
||||
});
|
||||
showToast(
|
||||
nextModel?.provider?.providerName == "ByteDance"
|
||||
? nextModel.displayName
|
||||
: nextModel.name,
|
||||
);
|
||||
showToast(nextModel);
|
||||
}
|
||||
}, [chatStore, currentModel, models]);
|
||||
|
||||
@@ -584,7 +600,6 @@ export function ChatActions(props: {
|
||||
icon={props.uploading ? <LoadingButtonIcon /> : <ImageIcon />}
|
||||
/>
|
||||
)}
|
||||
|
||||
{showUploadFile && (
|
||||
<ChatAction
|
||||
onClick={props.uploadFile}
|
||||
@@ -607,6 +622,7 @@ export function ChatActions(props: {
|
||||
</>
|
||||
}
|
||||
/>
|
||||
|
||||
<ChatAction
|
||||
onClick={props.showPromptHints}
|
||||
text={Locale.Chat.InputActions.Prompt}
|
||||
@@ -620,13 +636,6 @@ export function ChatActions(props: {
|
||||
text={Locale.Chat.InputActions.Masks}
|
||||
icon={<MaskIcon />}
|
||||
/>
|
||||
|
||||
<ChatAction
|
||||
onClick={() => setShowModelSelector(true)}
|
||||
text={currentModel}
|
||||
icon={<RobotIcon />}
|
||||
/>
|
||||
|
||||
{config.pluginConfig.enable &&
|
||||
/^gpt(?!.*03\d{2}$).*$/.test(currentModel) &&
|
||||
currentModel != "gpt-4-vision-preview" && (
|
||||
@@ -641,21 +650,43 @@ export function ChatActions(props: {
|
||||
/>
|
||||
)}
|
||||
|
||||
<ChatAction
|
||||
onClick={() => setShowModelSelector(true)}
|
||||
text={currentModelName}
|
||||
icon={<RobotIcon />}
|
||||
/>
|
||||
|
||||
{showModelSelector && (
|
||||
<Selector
|
||||
defaultSelectedValue={currentModel}
|
||||
defaultSelectedValue={`${currentModel}@${currentProviderName}`}
|
||||
items={models.map((m) => ({
|
||||
title: m.displayName,
|
||||
value: m.name,
|
||||
title: `${m.displayName}${
|
||||
m?.provider?.providerName
|
||||
? "(" + m?.provider?.providerName + ")"
|
||||
: ""
|
||||
}`,
|
||||
value: `${m.name}@${m?.provider?.providerName}`,
|
||||
}))}
|
||||
onClose={() => setShowModelSelector(false)}
|
||||
onSelection={(s) => {
|
||||
if (s.length === 0) return;
|
||||
const [model, providerName] = s[0].split("@");
|
||||
chatStore.updateCurrentSession((session) => {
|
||||
session.mask.modelConfig.model = s[0] as ModelType;
|
||||
session.mask.modelConfig.model = model as ModelType;
|
||||
session.mask.modelConfig.providerName =
|
||||
providerName as ServiceProvider;
|
||||
session.mask.syncGlobalConfig = false;
|
||||
});
|
||||
showToast(s[0]);
|
||||
if (providerName == "ByteDance") {
|
||||
const selectedModel = models.find(
|
||||
(m) =>
|
||||
m.name == model &&
|
||||
m?.provider?.providerName == providerName,
|
||||
);
|
||||
showToast(selectedModel?.displayName ?? "");
|
||||
} else {
|
||||
showToast(model);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
@@ -792,7 +823,7 @@ function _Chat() {
|
||||
|
||||
// prompt hints
|
||||
const promptStore = usePromptStore();
|
||||
const [promptHints, setPromptHints] = useState<RenderPompt[]>([]);
|
||||
const [promptHints, setPromptHints] = useState<RenderPrompt[]>([]);
|
||||
const onSearch = useDebouncedCallback(
|
||||
(text: string) => {
|
||||
const matchedPrompts = promptStore.search(text);
|
||||
@@ -905,7 +936,7 @@ function _Chat() {
|
||||
setAutoScroll(true);
|
||||
};
|
||||
|
||||
const onPromptSelect = (prompt: RenderPompt) => {
|
||||
const onPromptSelect = (prompt: RenderPrompt) => {
|
||||
setTimeout(() => {
|
||||
setPromptHints([]);
|
||||
|
||||
@@ -1319,7 +1350,7 @@ function _Chat() {
|
||||
...(await new Promise<string[]>((res, rej) => {
|
||||
setUploading(true);
|
||||
const imagesData: string[] = [];
|
||||
compressImage(file, 256 * 1024)
|
||||
uploadImageRemote(file)
|
||||
.then((dataUrl) => {
|
||||
imagesData.push(dataUrl);
|
||||
setUploading(false);
|
||||
@@ -1361,7 +1392,7 @@ function _Chat() {
|
||||
const imagesData: string[] = [];
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = event.target.files[i];
|
||||
compressImage(file, 256 * 1024)
|
||||
uploadImageRemote(file)
|
||||
.then((dataUrl) => {
|
||||
imagesData.push(dataUrl);
|
||||
if (
|
||||
|
||||
@@ -21,7 +21,6 @@ import { IconButton } from "./button";
|
||||
import {
|
||||
copyToClipboard,
|
||||
downloadAs,
|
||||
getClientApi,
|
||||
getMessageImages,
|
||||
useMobileScreen,
|
||||
} from "../utils";
|
||||
@@ -43,11 +42,10 @@ import { toBlob, toPng } from "html-to-image";
|
||||
import { DEFAULT_MASK_AVATAR } from "../store/mask";
|
||||
|
||||
import { prettyObject } from "../utils/format";
|
||||
import { EXPORT_MESSAGE_CLASS_NAME, ModelProvider } from "../constant";
|
||||
import { EXPORT_MESSAGE_CLASS_NAME } from "../constant";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { ClientApi } from "../client/api";
|
||||
import { type ClientApi, getClientApi } from "../client/api";
|
||||
import { getMessageTextContent } from "../utils";
|
||||
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||
|
||||
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
||||
loading: () => <LoadingIcon />,
|
||||
@@ -284,7 +282,7 @@ export function RenderExport(props: {
|
||||
return {
|
||||
id: i.toString(),
|
||||
role: role as any,
|
||||
content: role === "user" ? v.textContent ?? "" : v.innerHTML,
|
||||
content: role === "user" ? (v.textContent ?? "") : v.innerHTML,
|
||||
date: "",
|
||||
};
|
||||
});
|
||||
@@ -320,7 +318,7 @@ export function PreviewActions(props: {
|
||||
const onRenderMsgs = (msgs: ChatMessage[]) => {
|
||||
setShouldExport(false);
|
||||
|
||||
var api: ClientApi = getClientApi(config.modelConfig.model);
|
||||
const api: ClientApi = getClientApi(config.modelConfig.providerName);
|
||||
|
||||
api
|
||||
.share(msgs)
|
||||
|
||||
@@ -9,10 +9,10 @@ import styles from "./home.module.scss";
|
||||
import BotIcon from "../icons/bot.svg";
|
||||
import LoadingIcon from "../icons/three-dots.svg";
|
||||
|
||||
import { getClientApi, useMobileScreen } from "../utils";
|
||||
import { useMobileScreen } from "../utils";
|
||||
|
||||
import dynamic from "next/dynamic";
|
||||
import { ModelProvider, Path, SlotID } from "../constant";
|
||||
import { Path, SlotID } from "../constant";
|
||||
import { ErrorBoundary } from "./error";
|
||||
|
||||
import { getISOLang, getLang } from "../locales";
|
||||
@@ -27,9 +27,8 @@ import { SideBar } from "./sidebar";
|
||||
import { useAppConfig } from "../store/config";
|
||||
import { AuthPage } from "./auth";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { ClientApi } from "../client/api";
|
||||
import { type ClientApi, getClientApi } from "../client/api";
|
||||
import { useAccessStore } from "../store";
|
||||
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||
|
||||
export function Loading(props: { noLogo?: boolean }) {
|
||||
return (
|
||||
@@ -178,7 +177,7 @@ function Screen() {
|
||||
export function useLoadData() {
|
||||
const config = useAppConfig();
|
||||
|
||||
var api: ClientApi = getClientApi(config.modelConfig.model);
|
||||
const api: ClientApi = getClientApi(config.modelConfig.providerName);
|
||||
|
||||
useEffect(() => {
|
||||
(async () => {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { ServiceProvider } from "@/app/constant";
|
||||
import { ModalConfigValidator, ModelConfig } from "../store";
|
||||
|
||||
import Locale from "../locales";
|
||||
@@ -10,25 +11,25 @@ export function ModelConfigList(props: {
|
||||
updateConfig: (updater: (config: ModelConfig) => void) => void;
|
||||
}) {
|
||||
const allModels = useAllModels();
|
||||
const value = `${props.modelConfig.model}@${props.modelConfig?.providerName}`;
|
||||
|
||||
return (
|
||||
<>
|
||||
<ListItem title={Locale.Settings.Model}>
|
||||
<Select
|
||||
value={props.modelConfig.model}
|
||||
value={value}
|
||||
onChange={(e) => {
|
||||
props.updateConfig(
|
||||
(config) =>
|
||||
(config.model = ModalConfigValidator.model(
|
||||
e.currentTarget.value,
|
||||
)),
|
||||
);
|
||||
const [model, providerName] = e.currentTarget.value.split("@");
|
||||
props.updateConfig((config) => {
|
||||
config.model = ModalConfigValidator.model(model);
|
||||
config.providerName = providerName as ServiceProvider;
|
||||
});
|
||||
}}
|
||||
>
|
||||
{allModels
|
||||
.filter((v) => v.available)
|
||||
.map((v, i) => (
|
||||
<option value={v.name} key={i}>
|
||||
<option value={`${v.name}@${v.provider?.providerName}`} key={i}>
|
||||
{v.displayName}({v.provider?.providerName})
|
||||
</option>
|
||||
))}
|
||||
@@ -92,7 +93,7 @@ export function ModelConfigList(props: {
|
||||
></input>
|
||||
</ListItem>
|
||||
|
||||
{props.modelConfig.model.startsWith("gemini") ? null : (
|
||||
{props.modelConfig?.providerName == ServiceProvider.Google ? null : (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.PresencePenalty.Title}
|
||||
|
||||
@@ -53,7 +53,11 @@ import Link from "next/link";
|
||||
import {
|
||||
Anthropic,
|
||||
Azure,
|
||||
Baidu,
|
||||
ByteDance,
|
||||
Alibaba,
|
||||
Google,
|
||||
GoogleSafetySettingsThreshold,
|
||||
OPENAI_BASE_URL,
|
||||
Path,
|
||||
RELEASE_URL,
|
||||
@@ -657,6 +661,389 @@ export function Settings() {
|
||||
const clientConfig = useMemo(() => getClientConfig(), []);
|
||||
const showAccessCode = enabledAccessControl && !clientConfig?.isApp;
|
||||
|
||||
const accessCodeComponent = showAccessCode && (
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.AccessCode.Title}
|
||||
subTitle={Locale.Settings.Access.AccessCode.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.accessCode}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.AccessCode.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.accessCode = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
);
|
||||
|
||||
const useCustomConfigComponent = // Conditionally render the following ListItem based on clientConfig.isApp
|
||||
!clientConfig?.isApp && ( // only show if isApp is false
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.CustomEndpoint.Title}
|
||||
subTitle={Locale.Settings.Access.CustomEndpoint.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={accessStore.useCustomConfig}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.useCustomConfig = e.currentTarget.checked),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
);
|
||||
|
||||
const openAIConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.OpenAI && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.OpenAI.Endpoint.Title}
|
||||
subTitle={Locale.Settings.Access.OpenAI.Endpoint.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.openaiUrl}
|
||||
placeholder={OPENAI_BASE_URL}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.openaiUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.OpenAI.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.OpenAI.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.openaiApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.OpenAI.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.openaiApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const azureConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.Azure && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Azure.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Azure.Endpoint.SubTitle + Azure.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.azureUrl}
|
||||
placeholder={Azure.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.azureUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Azure.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.azureApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.Azure.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.azureApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Azure.ApiVerion.Title}
|
||||
subTitle={Locale.Settings.Access.Azure.ApiVerion.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.azureApiVersion}
|
||||
placeholder="2023-08-01-preview"
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.azureApiVersion = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const googleConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.Google && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Google.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Google.Endpoint.SubTitle +
|
||||
Google.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.googleUrl}
|
||||
placeholder={Google.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.googleUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Google.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.Google.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.googleApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.Google.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.googleApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Google.ApiVersion.Title}
|
||||
subTitle={Locale.Settings.Access.Google.ApiVersion.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.googleApiVersion}
|
||||
placeholder="2023-08-01-preview"
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.googleApiVersion = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Google.GoogleSafetySettings.Title}
|
||||
subTitle={Locale.Settings.Access.Google.GoogleSafetySettings.SubTitle}
|
||||
>
|
||||
<Select
|
||||
value={accessStore.googleSafetySettings}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.googleSafetySettings = e.target
|
||||
.value as GoogleSafetySettingsThreshold),
|
||||
);
|
||||
}}
|
||||
>
|
||||
{Object.entries(GoogleSafetySettingsThreshold).map(([k, v]) => (
|
||||
<option value={v} key={k}>
|
||||
{k}
|
||||
</option>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const anthropicConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.Anthropic && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Anthropic.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Anthropic.Endpoint.SubTitle +
|
||||
Anthropic.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.anthropicUrl}
|
||||
placeholder={Anthropic.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.anthropicUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Anthropic.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.Anthropic.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.anthropicApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.Anthropic.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.anthropicApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Anthropic.ApiVerion.Title}
|
||||
subTitle={Locale.Settings.Access.Anthropic.ApiVerion.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.anthropicApiVersion}
|
||||
placeholder={Anthropic.Vision}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.anthropicApiVersion = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const baiduConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.Baidu && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Baidu.Endpoint.Title}
|
||||
subTitle={Locale.Settings.Access.Baidu.Endpoint.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.baiduUrl}
|
||||
placeholder={Baidu.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.baiduUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Baidu.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.Baidu.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.baiduApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.Baidu.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.baiduApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Baidu.SecretKey.Title}
|
||||
subTitle={Locale.Settings.Access.Baidu.SecretKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.baiduSecretKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.Baidu.SecretKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.baiduSecretKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const byteDanceConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.ByteDance && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.ByteDance.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.ByteDance.Endpoint.SubTitle +
|
||||
ByteDance.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.bytedanceUrl}
|
||||
placeholder={ByteDance.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.bytedanceUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.ByteDance.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.ByteDance.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.bytedanceApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.ByteDance.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.bytedanceApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
const alibabaConfigComponent = accessStore.provider ===
|
||||
ServiceProvider.Alibaba && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Alibaba.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Alibaba.Endpoint.SubTitle +
|
||||
Alibaba.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.alibabaUrl}
|
||||
placeholder={Alibaba.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) => (access.alibabaUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Alibaba.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.Alibaba.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.alibabaApiKey}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.Alibaba.ApiKey.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.alibabaApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
|
||||
return (
|
||||
<ErrorBoundary>
|
||||
<div className="window-header" data-tauri-drag-region>
|
||||
@@ -907,46 +1294,12 @@ export function Settings() {
|
||||
</List>
|
||||
|
||||
<List id={SlotID.CustomModel}>
|
||||
{showAccessCode && (
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.AccessCode.Title}
|
||||
subTitle={Locale.Settings.Access.AccessCode.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.accessCode}
|
||||
type="text"
|
||||
placeholder={Locale.Settings.Access.AccessCode.Placeholder}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) => (access.accessCode = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
)}
|
||||
{accessCodeComponent}
|
||||
|
||||
{!accessStore.hideUserApiKey && (
|
||||
<>
|
||||
{
|
||||
// Conditionally render the following ListItem based on clientConfig.isApp
|
||||
!clientConfig?.isApp && ( // only show if isApp is false
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.CustomEndpoint.Title}
|
||||
subTitle={Locale.Settings.Access.CustomEndpoint.SubTitle}
|
||||
>
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={accessStore.useCustomConfig}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.useCustomConfig = e.currentTarget.checked),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
)
|
||||
}
|
||||
{useCustomConfigComponent}
|
||||
|
||||
{accessStore.useCustomConfig && (
|
||||
<>
|
||||
<ListItem
|
||||
@@ -971,229 +1324,13 @@ export function Settings() {
|
||||
</Select>
|
||||
</ListItem>
|
||||
|
||||
{accessStore.provider === ServiceProvider.OpenAI && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.OpenAI.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.OpenAI.Endpoint.SubTitle
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.openaiUrl}
|
||||
placeholder={OPENAI_BASE_URL}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.openaiUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.OpenAI.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.OpenAI.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.openaiApiKey}
|
||||
type="text"
|
||||
placeholder={
|
||||
Locale.Settings.Access.OpenAI.ApiKey.Placeholder
|
||||
}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.openaiApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
</>
|
||||
)}
|
||||
{accessStore.provider === ServiceProvider.Azure && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Azure.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Azure.Endpoint.SubTitle +
|
||||
Azure.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.azureUrl}
|
||||
placeholder={Azure.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.azureUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Azure.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.Azure.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.azureApiKey}
|
||||
type="text"
|
||||
placeholder={
|
||||
Locale.Settings.Access.Azure.ApiKey.Placeholder
|
||||
}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.azureApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Azure.ApiVerion.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Azure.ApiVerion.SubTitle
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.azureApiVersion}
|
||||
placeholder="2024-02-15-preview"
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.azureApiVersion =
|
||||
e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
</>
|
||||
)}
|
||||
{accessStore.provider === ServiceProvider.Google && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Google.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Google.Endpoint.SubTitle +
|
||||
Google.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.googleUrl}
|
||||
placeholder={Google.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.googleUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Google.ApiKey.Title}
|
||||
subTitle={Locale.Settings.Access.Google.ApiKey.SubTitle}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.googleApiKey}
|
||||
type="text"
|
||||
placeholder={
|
||||
Locale.Settings.Access.Google.ApiKey.Placeholder
|
||||
}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.googleApiKey = e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Google.ApiVersion.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Google.ApiVersion.SubTitle
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.googleApiVersion}
|
||||
placeholder="2024-02-15-preview"
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.googleApiVersion =
|
||||
e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
</>
|
||||
)}
|
||||
{accessStore.provider === ServiceProvider.Anthropic && (
|
||||
<>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Anthropic.Endpoint.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Anthropic.Endpoint.SubTitle +
|
||||
Anthropic.ExampleEndpoint
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.anthropicUrl}
|
||||
placeholder={Anthropic.ExampleEndpoint}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.anthropicUrl = e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Anthropic.ApiKey.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Anthropic.ApiKey.SubTitle
|
||||
}
|
||||
>
|
||||
<PasswordInput
|
||||
value={accessStore.anthropicApiKey}
|
||||
type="text"
|
||||
placeholder={
|
||||
Locale.Settings.Access.Anthropic.ApiKey.Placeholder
|
||||
}
|
||||
onChange={(e) => {
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.anthropicApiKey =
|
||||
e.currentTarget.value),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.Access.Anthropic.ApiVerion.Title}
|
||||
subTitle={
|
||||
Locale.Settings.Access.Anthropic.ApiVerion.SubTitle
|
||||
}
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={accessStore.anthropicApiVersion}
|
||||
placeholder={Anthropic.Vision}
|
||||
onChange={(e) =>
|
||||
accessStore.update(
|
||||
(access) =>
|
||||
(access.anthropicApiVersion =
|
||||
e.currentTarget.value),
|
||||
)
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
</>
|
||||
)}
|
||||
{openAIConfigComponent}
|
||||
{azureConfigComponent}
|
||||
{googleConfigComponent}
|
||||
{anthropicConfigComponent}
|
||||
{baiduConfigComponent}
|
||||
{byteDanceConfigComponent}
|
||||
{alibabaConfigComponent}
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
|
||||
@@ -21,7 +21,7 @@ declare global {
|
||||
ENABLE_BALANCE_QUERY?: string; // allow user to query balance or not
|
||||
DISABLE_FAST_LINK?: string; // disallow parse settings from url or not
|
||||
CUSTOM_MODELS?: string; // to control custom models
|
||||
DEFAULT_MODEL?: string; // to cnntrol default model in every new chat window
|
||||
DEFAULT_MODEL?: string; // to control default model in every new chat window
|
||||
|
||||
// azure only
|
||||
AZURE_URL?: string; // https://{azure-url}/openai/deployments
|
||||
@@ -35,6 +35,24 @@ declare global {
|
||||
// google tag manager
|
||||
GTM_ID?: string;
|
||||
|
||||
// anthropic only
|
||||
ANTHROPIC_URL?: string;
|
||||
ANTHROPIC_API_KEY?: string;
|
||||
ANTHROPIC_API_VERSION?: string;
|
||||
|
||||
// baidu only
|
||||
BAIDU_URL?: string;
|
||||
BAIDU_API_KEY?: string;
|
||||
BAIDU_SECRET_KEY?: string;
|
||||
|
||||
// bytedance only
|
||||
BYTEDANCE_URL?: string;
|
||||
BYTEDANCE_API_KEY?: string;
|
||||
|
||||
// alibaba only
|
||||
ALIBABA_URL?: string;
|
||||
ALIBABA_API_KEY?: string;
|
||||
|
||||
// custom template for preprocessing user input
|
||||
DEFAULT_INPUT_TEMPLATE?: string;
|
||||
}
|
||||
@@ -93,6 +111,9 @@ export const getServerSideConfig = () => {
|
||||
const isGoogle = !!process.env.GOOGLE_API_KEY;
|
||||
const isAnthropic = !!process.env.ANTHROPIC_API_KEY;
|
||||
|
||||
const isBaidu = !!process.env.BAIDU_API_KEY;
|
||||
const isBytedance = !!process.env.BYTEDANCE_API_KEY;
|
||||
const isAlibaba = !!process.env.ALIBABA_API_KEY;
|
||||
// const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
|
||||
// const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
|
||||
// const randomIndex = Math.floor(Math.random() * apiKeys.length);
|
||||
@@ -124,6 +145,19 @@ export const getServerSideConfig = () => {
|
||||
anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
|
||||
anthropicUrl: process.env.ANTHROPIC_URL,
|
||||
|
||||
isBaidu,
|
||||
baiduUrl: process.env.BAIDU_URL,
|
||||
baiduApiKey: getApiKey(process.env.BAIDU_API_KEY),
|
||||
baiduSecretKey: process.env.BAIDU_SECRET_KEY,
|
||||
|
||||
isBytedance,
|
||||
bytedanceApiKey: getApiKey(process.env.BYTEDANCE_API_KEY),
|
||||
bytedanceUrl: process.env.BYTEDANCE_URL,
|
||||
|
||||
isAlibaba,
|
||||
alibabaUrl: process.env.ALIBABA_URL,
|
||||
alibabaApiKey: getApiKey(process.env.ALIBABA_API_KEY),
|
||||
|
||||
gtmId: process.env.GTM_ID,
|
||||
|
||||
needCode: ACCESS_CODES.size > 0,
|
||||
|
||||
146
app/constant.ts
146
app/constant.ts
@@ -13,6 +13,16 @@ export const OPENAI_BASE_URL = "https://api.openai.com";
|
||||
export const GEMINI_BASE_URL = "https://generativelanguage.googleapis.com/";
|
||||
export const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
|
||||
|
||||
export const BAIDU_BASE_URL = "https://aip.baidubce.com";
|
||||
export const BAIDU_OATUH_URL = `${BAIDU_BASE_URL}/oauth/2.0/token`;
|
||||
|
||||
export const BYTEDANCE_BASE_URL = "https://ark.cn-beijing.volces.com";
|
||||
|
||||
export const ALIBABA_BASE_URL = "https://dashscope.aliyuncs.com/api/";
|
||||
|
||||
export const CACHE_URL_PREFIX = "/api/cache";
|
||||
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
|
||||
|
||||
export enum Path {
|
||||
Home = "/",
|
||||
Chat = "/chat",
|
||||
@@ -25,9 +35,14 @@ export enum Path {
|
||||
|
||||
export enum ApiPath {
|
||||
Cors = "",
|
||||
Azure = "/api/azure",
|
||||
OpenAI = "/api/openai",
|
||||
GoogleAI = "/api/google",
|
||||
Anthropic = "/api/anthropic",
|
||||
Google = "/api/google",
|
||||
Baidu = "/api/baidu",
|
||||
ByteDance = "/api/bytedance",
|
||||
Alibaba = "/api/alibaba",
|
||||
}
|
||||
|
||||
export enum SlotID {
|
||||
@@ -73,12 +88,27 @@ export enum ServiceProvider {
|
||||
Azure = "Azure",
|
||||
Google = "Google",
|
||||
Anthropic = "Anthropic",
|
||||
Baidu = "Baidu",
|
||||
ByteDance = "ByteDance",
|
||||
Alibaba = "Alibaba",
|
||||
}
|
||||
|
||||
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
|
||||
// BLOCK_NONE will not block any content, and BLOCK_ONLY_HIGH will block only high-risk content.
|
||||
export enum GoogleSafetySettingsThreshold {
|
||||
BLOCK_NONE = "BLOCK_NONE",
|
||||
BLOCK_ONLY_HIGH = "BLOCK_ONLY_HIGH",
|
||||
BLOCK_MEDIUM_AND_ABOVE = "BLOCK_MEDIUM_AND_ABOVE",
|
||||
BLOCK_LOW_AND_ABOVE = "BLOCK_LOW_AND_ABOVE",
|
||||
}
|
||||
|
||||
export enum ModelProvider {
|
||||
GPT = "GPT",
|
||||
GeminiPro = "GeminiPro",
|
||||
Claude = "Claude",
|
||||
Ernie = "Ernie",
|
||||
Doubao = "Doubao",
|
||||
Qwen = "Qwen",
|
||||
}
|
||||
|
||||
export const Anthropic = {
|
||||
@@ -98,12 +128,48 @@ export const OpenaiPath = {
|
||||
};
|
||||
|
||||
export const Azure = {
|
||||
ExampleEndpoint: "https://{resource-url}/openai/deployments",
|
||||
ChatPath: (deployName: string, apiVersion: string) =>
|
||||
`deployments/${deployName}/chat/completions?api-version=${apiVersion}`,
|
||||
ExampleEndpoint: "https://{resource-url}/openai/deployments/{deploy-id}",
|
||||
};
|
||||
|
||||
export const Google = {
|
||||
ExampleEndpoint: "https://generativelanguage.googleapis.com/",
|
||||
ChatPath: (modelName: string) => `v1beta/models/${modelName}:generateContent`,
|
||||
ChatPath: (modelName: string) =>
|
||||
`v1beta/models/${modelName}:streamGenerateContent`,
|
||||
};
|
||||
|
||||
export const Baidu = {
|
||||
ExampleEndpoint: BAIDU_BASE_URL,
|
||||
ChatPath: (modelName: string) => {
|
||||
let endpoint = modelName;
|
||||
if (modelName === "ernie-4.0-8k") {
|
||||
endpoint = "completions_pro";
|
||||
}
|
||||
if (modelName === "ernie-4.0-8k-preview-0518") {
|
||||
endpoint = "completions_adv_pro";
|
||||
}
|
||||
if (modelName === "ernie-3.5-8k") {
|
||||
endpoint = "completions";
|
||||
}
|
||||
if (modelName === "ernie-speed-128k") {
|
||||
endpoint = "ernie-speed-128k";
|
||||
}
|
||||
if (modelName === "ernie-speed-8k") {
|
||||
endpoint = "ernie_speed";
|
||||
}
|
||||
return `rpc/2.0/ai_custom/v1/wenxinworkshop/chat/${endpoint}`;
|
||||
},
|
||||
};
|
||||
|
||||
export const ByteDance = {
|
||||
ExampleEndpoint: "https://ark.cn-beijing.volces.com/api/",
|
||||
ChatPath: "api/v3/chat/completions",
|
||||
};
|
||||
|
||||
export const Alibaba = {
|
||||
ExampleEndpoint: ALIBABA_BASE_URL,
|
||||
ChatPath: "v1/services/aigc/text-generation/generation",
|
||||
};
|
||||
|
||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||
@@ -124,7 +190,7 @@ Latex inline: \\(x^2\\)
|
||||
Latex block: $$e=mc^2$$
|
||||
`;
|
||||
|
||||
export const SUMMARIZE_MODEL = "gpt-3.5-turbo";
|
||||
export const SUMMARIZE_MODEL = "gpt-4o-mini";
|
||||
export const GEMINI_SUMMARIZE_MODEL = "gemini-pro";
|
||||
|
||||
export const KnowledgeCutOffDate: Record<string, string> = {
|
||||
@@ -134,6 +200,8 @@ export const KnowledgeCutOffDate: Record<string, string> = {
|
||||
"gpt-4-turbo-preview": "2023-12",
|
||||
"gpt-4o": "2023-10",
|
||||
"gpt-4o-2024-05-13": "2023-10",
|
||||
"gpt-4o-mini": "2023-10",
|
||||
"gpt-4o-mini-2024-07-18": "2023-10",
|
||||
"gpt-4-vision-preview": "2023-04",
|
||||
// After improvements,
|
||||
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
|
||||
@@ -170,8 +238,11 @@ const openaiModels = [
|
||||
"gpt-4-turbo-preview",
|
||||
"gpt-4o",
|
||||
"gpt-4o-2024-05-13",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o-mini-2024-07-18",
|
||||
"gpt-4-vision-preview",
|
||||
"gpt-4-turbo-2024-04-09",
|
||||
"gpt-4-1106-preview",
|
||||
];
|
||||
|
||||
const googleModels = [
|
||||
@@ -191,6 +262,39 @@ const anthropicModels = [
|
||||
"claude-3-5-sonnet-20240620",
|
||||
];
|
||||
|
||||
const baiduModels = [
|
||||
"ernie-4.0-turbo-8k",
|
||||
"ernie-4.0-8k",
|
||||
"ernie-4.0-8k-preview",
|
||||
"ernie-4.0-8k-preview-0518",
|
||||
"ernie-4.0-8k-latest",
|
||||
"ernie-3.5-8k",
|
||||
"ernie-3.5-8k-0205",
|
||||
"ernie-speed-128k",
|
||||
"ernie-speed-8k",
|
||||
"ernie-lite-8k",
|
||||
"ernie-tiny-8k",
|
||||
];
|
||||
|
||||
const bytedanceModels = [
|
||||
"Doubao-lite-4k",
|
||||
"Doubao-lite-32k",
|
||||
"Doubao-lite-128k",
|
||||
"Doubao-pro-4k",
|
||||
"Doubao-pro-32k",
|
||||
"Doubao-pro-128k",
|
||||
];
|
||||
|
||||
const alibabaModes = [
|
||||
"qwen-turbo",
|
||||
"qwen-plus",
|
||||
"qwen-max",
|
||||
"qwen-max-0428",
|
||||
"qwen-max-0403",
|
||||
"qwen-max-0107",
|
||||
"qwen-max-longcontext",
|
||||
];
|
||||
|
||||
export const DEFAULT_MODELS = [
|
||||
...openaiModels.map((name) => ({
|
||||
name,
|
||||
@@ -201,6 +305,15 @@ export const DEFAULT_MODELS = [
|
||||
providerType: "openai",
|
||||
},
|
||||
})),
|
||||
...openaiModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
provider: {
|
||||
id: "azure",
|
||||
providerName: "Azure",
|
||||
providerType: "azure",
|
||||
},
|
||||
})),
|
||||
...googleModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
@@ -219,6 +332,33 @@ export const DEFAULT_MODELS = [
|
||||
providerType: "anthropic",
|
||||
},
|
||||
})),
|
||||
...baiduModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
provider: {
|
||||
id: "baidu",
|
||||
providerName: "Baidu",
|
||||
providerType: "baidu",
|
||||
},
|
||||
})),
|
||||
...bytedanceModels.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
provider: {
|
||||
id: "bytedance",
|
||||
providerName: "ByteDance",
|
||||
providerType: "bytedance",
|
||||
},
|
||||
})),
|
||||
...alibabaModes.map((name) => ({
|
||||
name,
|
||||
available: true,
|
||||
provider: {
|
||||
id: "alibaba",
|
||||
providerName: "Alibaba",
|
||||
providerType: "alibaba",
|
||||
},
|
||||
})),
|
||||
] as const;
|
||||
|
||||
export const CHAT_PAGE_SIZE = 15;
|
||||
|
||||
@@ -3,7 +3,7 @@ import "./styles/globals.scss";
|
||||
import "./styles/markdown.scss";
|
||||
import "./styles/highlight.scss";
|
||||
import { getClientConfig } from "./config/client";
|
||||
import { type Metadata } from "next";
|
||||
import type { Metadata, Viewport } from "next";
|
||||
import { SpeedInsights } from "@vercel/speed-insights/next";
|
||||
import { getServerSideConfig } from "./config/server";
|
||||
import { GoogleTagManager } from "@next/third-parties/google";
|
||||
@@ -12,21 +12,22 @@ const serverConfig = getServerSideConfig();
|
||||
export const metadata: Metadata = {
|
||||
title: "NextChat",
|
||||
description: "Your personal ChatGPT Chat Bot.",
|
||||
viewport: {
|
||||
width: "device-width",
|
||||
initialScale: 1,
|
||||
maximumScale: 1,
|
||||
},
|
||||
themeColor: [
|
||||
{ media: "(prefers-color-scheme: light)", color: "#e7f8ff" },
|
||||
{ media: "(prefers-color-scheme: dark)", color: "#1b262a" },
|
||||
],
|
||||
appleWebApp: {
|
||||
title: "NextChat",
|
||||
statusBarStyle: "default",
|
||||
},
|
||||
};
|
||||
|
||||
export const viewport: Viewport = {
|
||||
width: "device-width",
|
||||
initialScale: 1,
|
||||
maximumScale: 1,
|
||||
themeColor: [
|
||||
{ media: "(prefers-color-scheme: light)", color: "#fafafa" },
|
||||
{ media: "(prefers-color-scheme: dark)", color: "#151515" },
|
||||
],
|
||||
};
|
||||
|
||||
export default function RootLayout({
|
||||
children,
|
||||
}: {
|
||||
|
||||
@@ -354,6 +354,48 @@ const cn = {
|
||||
Title: "API 版本(仅适用于 gemini-pro)",
|
||||
SubTitle: "选择一个特定的 API 版本",
|
||||
},
|
||||
GoogleSafetySettings: {
|
||||
Title: "Google 安全过滤级别",
|
||||
SubTitle: "设置内容过滤级别",
|
||||
},
|
||||
},
|
||||
Baidu: {
|
||||
ApiKey: {
|
||||
Title: "API Key",
|
||||
SubTitle: "使用自定义 Baidu API Key",
|
||||
Placeholder: "Baidu API Key",
|
||||
},
|
||||
SecretKey: {
|
||||
Title: "Secret Key",
|
||||
SubTitle: "使用自定义 Baidu Secret Key",
|
||||
Placeholder: "Baidu Secret Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "不支持自定义前往.env配置",
|
||||
},
|
||||
},
|
||||
ByteDance: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义 ByteDance API Key",
|
||||
Placeholder: "ByteDance API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
Alibaba: {
|
||||
ApiKey: {
|
||||
Title: "接口密钥",
|
||||
SubTitle: "使用自定义阿里云API Key",
|
||||
Placeholder: "Alibaba Cloud API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "接口地址",
|
||||
SubTitle: "样例:",
|
||||
},
|
||||
},
|
||||
CustomModel: {
|
||||
Title: "自定义模型名",
|
||||
|
||||
@@ -334,7 +334,7 @@ const en: LocaleType = {
|
||||
|
||||
Endpoint: {
|
||||
Title: "Endpoint Address",
|
||||
SubTitle: "Example:",
|
||||
SubTitle: "Example: ",
|
||||
},
|
||||
|
||||
ApiVerion: {
|
||||
@@ -342,6 +342,44 @@ const en: LocaleType = {
|
||||
SubTitle: "Select and input a specific API version",
|
||||
},
|
||||
},
|
||||
Baidu: {
|
||||
ApiKey: {
|
||||
Title: "Baidu API Key",
|
||||
SubTitle: "Use a custom Baidu API Key",
|
||||
Placeholder: "Baidu API Key",
|
||||
},
|
||||
SecretKey: {
|
||||
Title: "Baidu Secret Key",
|
||||
SubTitle: "Use a custom Baidu Secret Key",
|
||||
Placeholder: "Baidu Secret Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Endpoint Address",
|
||||
SubTitle: "not supported, configure in .env",
|
||||
},
|
||||
},
|
||||
ByteDance: {
|
||||
ApiKey: {
|
||||
Title: "ByteDance API Key",
|
||||
SubTitle: "Use a custom ByteDance API Key",
|
||||
Placeholder: "ByteDance API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Endpoint Address",
|
||||
SubTitle: "Example: ",
|
||||
},
|
||||
},
|
||||
Alibaba: {
|
||||
ApiKey: {
|
||||
Title: "Alibaba API Key",
|
||||
SubTitle: "Use a custom Alibaba Cloud API Key",
|
||||
Placeholder: "Alibaba Cloud API Key",
|
||||
},
|
||||
Endpoint: {
|
||||
Title: "Endpoint Address",
|
||||
SubTitle: "Example: ",
|
||||
},
|
||||
},
|
||||
CustomModel: {
|
||||
Title: "Custom Models",
|
||||
SubTitle: "Custom model options, seperated by comma",
|
||||
@@ -355,13 +393,17 @@ const en: LocaleType = {
|
||||
|
||||
Endpoint: {
|
||||
Title: "Endpoint Address",
|
||||
SubTitle: "Example:",
|
||||
SubTitle: "Example: ",
|
||||
},
|
||||
|
||||
ApiVersion: {
|
||||
Title: "API Version (specific to gemini-pro)",
|
||||
SubTitle: "Select a specific API version",
|
||||
},
|
||||
GoogleSafetySettings: {
|
||||
Title: "Google Safety Settings",
|
||||
SubTitle: "Select a safety filtering level",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -97,12 +97,47 @@ function setItem(key: string, value: string) {
|
||||
|
||||
function getLanguage() {
|
||||
try {
|
||||
return navigator.language.toLowerCase();
|
||||
const locale = new Intl.Locale(navigator.language).maximize();
|
||||
const region = locale?.region?.toLowerCase();
|
||||
// 1. check region code in ALL_LANGS
|
||||
if (AllLangs.includes(region as Lang)) {
|
||||
return region as Lang;
|
||||
}
|
||||
// 2. check language code in ALL_LANGS
|
||||
if (AllLangs.includes(locale.language as Lang)) {
|
||||
return locale.language as Lang;
|
||||
}
|
||||
return DEFAULT_LANG;
|
||||
} catch {
|
||||
return DEFAULT_LANG;
|
||||
}
|
||||
}
|
||||
|
||||
export function getLang(): Lang {
|
||||
const savedLang = getItem(LANG_KEY);
|
||||
|
||||
if (AllLangs.includes((savedLang ?? "") as Lang)) {
|
||||
return savedLang as Lang;
|
||||
}
|
||||
|
||||
return getLanguage();
|
||||
}
|
||||
|
||||
export function changeLang(lang: Lang) {
|
||||
setItem(LANG_KEY, lang);
|
||||
location.reload();
|
||||
}
|
||||
|
||||
export function getISOLang() {
|
||||
const isoLangString: Record<string, string> = {
|
||||
cn: "zh-Hans",
|
||||
tw: "zh-Hant",
|
||||
};
|
||||
|
||||
const lang = getLang();
|
||||
return isoLangString[lang] ?? lang;
|
||||
}
|
||||
|
||||
const DEFAULT_STT_LANG = "zh-CN";
|
||||
export const STT_LANG_MAP: Record<Lang, string> = {
|
||||
cn: "zh-CN",
|
||||
@@ -125,6 +160,7 @@ export const STT_LANG_MAP: Record<Lang, string> = {
|
||||
bn: "bn-BD",
|
||||
sk: "sk-SK",
|
||||
};
|
||||
|
||||
export function getSTTLang(): string {
|
||||
try {
|
||||
return STT_LANG_MAP[getLang()];
|
||||
@@ -132,36 +168,3 @@ export function getSTTLang(): string {
|
||||
return DEFAULT_STT_LANG;
|
||||
}
|
||||
}
|
||||
|
||||
export function getLang(): Lang {
|
||||
const savedLang = getItem(LANG_KEY);
|
||||
|
||||
if (AllLangs.includes((savedLang ?? "") as Lang)) {
|
||||
return savedLang as Lang;
|
||||
}
|
||||
|
||||
const lang = getLanguage();
|
||||
|
||||
for (const option of AllLangs) {
|
||||
if (lang.includes(option)) {
|
||||
return option;
|
||||
}
|
||||
}
|
||||
|
||||
return DEFAULT_LANG;
|
||||
}
|
||||
|
||||
export function changeLang(lang: Lang) {
|
||||
setItem(LANG_KEY, lang);
|
||||
location.reload();
|
||||
}
|
||||
|
||||
export function getISOLang() {
|
||||
const isoLangString: Record<string, string> = {
|
||||
cn: "zh-Hans",
|
||||
tw: "zh-Hant",
|
||||
};
|
||||
|
||||
const lang = getLang();
|
||||
return isoLangString[lang] ?? lang;
|
||||
}
|
||||
|
||||
@@ -4,11 +4,11 @@ import { SubmitKey } from "../store/config";
|
||||
const isApp = !!getClientConfig()?.isApp;
|
||||
|
||||
const tw = {
|
||||
WIP: "該功能仍在開發中……",
|
||||
WIP: "此功能仍在開發中……",
|
||||
Error: {
|
||||
Unauthorized: isApp
|
||||
? "檢測到無效 API Key,請前往[設定](/#/settings)頁檢查 API Key 是否設定正確。"
|
||||
: "存取密碼不正確或未填寫,請前往[登入](/#/auth)頁輸入正確的存取密碼,或者在[設定](/#/settings)頁填入你自己的 OpenAI API Key。",
|
||||
? "偵測到無效的 API Key,請前往[設定](/#/settings)頁面檢查 API Key 是否設定正確。"
|
||||
: "存取密碼不正確或尚未填寫,請前往[登入](/#/auth)頁面輸入正確的存取密碼,或者在[設定](/#/settings)頁面填入你自己的 OpenAI API Key。",
|
||||
},
|
||||
|
||||
Auth: {
|
||||
@@ -159,7 +159,7 @@ const tw = {
|
||||
},
|
||||
InputTemplate: {
|
||||
Title: "使用者輸入預處理",
|
||||
SubTitle: "使用者最新的一條訊息會填充到此範本",
|
||||
SubTitle: "使用者最新的一則訊息會填充到此範本",
|
||||
},
|
||||
|
||||
Update: {
|
||||
@@ -194,19 +194,19 @@ const tw = {
|
||||
},
|
||||
SyncType: {
|
||||
Title: "同步類型",
|
||||
SubTitle: "選擇喜愛的同步伺服器",
|
||||
SubTitle: "選擇偏好的同步伺服器",
|
||||
},
|
||||
Proxy: {
|
||||
Title: "啟用代理",
|
||||
SubTitle: "在瀏覽器中同步時,必須啟用代理以避免跨域限制",
|
||||
Title: "啟用代理伺服器",
|
||||
SubTitle: "在瀏覽器中同步時,啟用代理伺服器以避免跨域限制",
|
||||
},
|
||||
ProxyUrl: {
|
||||
Title: "代理地址",
|
||||
SubTitle: "僅適用於本專案自帶的跨域代理",
|
||||
Title: "代理伺服器位置",
|
||||
SubTitle: "僅適用於本專案內建的跨域代理",
|
||||
},
|
||||
|
||||
WebDav: {
|
||||
Endpoint: "WebDAV 地址",
|
||||
Endpoint: "WebDAV 位置",
|
||||
UserName: "使用者名稱",
|
||||
Password: "密碼",
|
||||
},
|
||||
@@ -218,9 +218,9 @@ const tw = {
|
||||
},
|
||||
},
|
||||
|
||||
LocalState: "本地資料",
|
||||
LocalState: "本機資料",
|
||||
Overview: (overview: any) => {
|
||||
return `${overview.chat} 次對話,${overview.message} 條訊息,${overview.prompt} 條提示詞,${overview.mask} 個角色範本`;
|
||||
return `${overview.chat} 次對話,${overview.message} 則訊息,${overview.prompt} 條提示詞,${overview.mask} 個角色範本`;
|
||||
},
|
||||
ImportFailed: "匯入失敗",
|
||||
},
|
||||
@@ -239,13 +239,13 @@ const tw = {
|
||||
Title: "停用提示詞自動補齊",
|
||||
SubTitle: "在輸入框開頭輸入 / 即可觸發自動補齊",
|
||||
},
|
||||
List: "自定義提示詞列表",
|
||||
List: "自訂提示詞列表",
|
||||
ListCount: (builtin: number, custom: number) =>
|
||||
`內建 ${builtin} 條,使用者定義 ${custom} 條`,
|
||||
`內建 ${builtin} 條,使用者自訂 ${custom} 條`,
|
||||
Edit: "編輯",
|
||||
Modal: {
|
||||
Title: "提示詞列表",
|
||||
Add: "新增一條",
|
||||
Add: "新增一則",
|
||||
Search: "搜尋提示詞",
|
||||
},
|
||||
EditModal: {
|
||||
@@ -278,40 +278,40 @@ const tw = {
|
||||
Placeholder: "請輸入存取密碼",
|
||||
},
|
||||
CustomEndpoint: {
|
||||
Title: "自定義介面 (Endpoint)",
|
||||
SubTitle: "是否使用自定義 Azure 或 OpenAI 服務",
|
||||
Title: "自訂 API 端點 (Endpoint)",
|
||||
SubTitle: "是否使用自訂 Azure 或 OpenAI 服務",
|
||||
},
|
||||
Provider: {
|
||||
Title: "模型服務商",
|
||||
SubTitle: "切換不同的服務商",
|
||||
Title: "模型供應商",
|
||||
SubTitle: "切換不同的服務供應商",
|
||||
},
|
||||
OpenAI: {
|
||||
ApiKey: {
|
||||
Title: "API Key",
|
||||
SubTitle: "使用自定義 OpenAI Key 繞過密碼存取限制",
|
||||
SubTitle: "使用自訂 OpenAI Key 繞過密碼存取限制",
|
||||
Placeholder: "OpenAI API Key",
|
||||
},
|
||||
|
||||
Endpoint: {
|
||||
Title: "介面(Endpoint) 地址",
|
||||
SubTitle: "除預設地址外,必須包含 http(s)://",
|
||||
Title: "API 端點 (Endpoint) 位址",
|
||||
SubTitle: "除預設位址外,必須包含 http(s)://",
|
||||
},
|
||||
},
|
||||
Azure: {
|
||||
ApiKey: {
|
||||
Title: "介面金鑰",
|
||||
SubTitle: "使用自定義 Azure Key 繞過密碼存取限制",
|
||||
Title: "API 金鑰",
|
||||
SubTitle: "使用自訂 Azure Key 繞過密碼存取限制",
|
||||
Placeholder: "Azure API Key",
|
||||
},
|
||||
|
||||
Endpoint: {
|
||||
Title: "介面(Endpoint) 地址",
|
||||
SubTitle: "樣例:",
|
||||
Title: "API 端點 (Endpoint) 位址",
|
||||
SubTitle: "範例:",
|
||||
},
|
||||
|
||||
ApiVerion: {
|
||||
Title: "介面版本 (azure api version)",
|
||||
SubTitle: "選擇指定的部分版本",
|
||||
Title: "API 版本 (azure api version)",
|
||||
SubTitle: "指定一個特定的 API 版本",
|
||||
},
|
||||
},
|
||||
Anthropic: {
|
||||
@@ -322,13 +322,13 @@ const tw = {
|
||||
},
|
||||
|
||||
Endpoint: {
|
||||
Title: "終端地址",
|
||||
Title: "端點位址",
|
||||
SubTitle: "範例:",
|
||||
},
|
||||
|
||||
ApiVerion: {
|
||||
Title: "API 版本 (claude api version)",
|
||||
SubTitle: "選擇一個特定的 API 版本輸入",
|
||||
SubTitle: "指定一個特定的 API 版本",
|
||||
},
|
||||
},
|
||||
Google: {
|
||||
@@ -339,7 +339,7 @@ const tw = {
|
||||
},
|
||||
|
||||
Endpoint: {
|
||||
Title: "終端地址",
|
||||
Title: "端點位址",
|
||||
SubTitle: "範例:",
|
||||
},
|
||||
|
||||
@@ -349,8 +349,8 @@ const tw = {
|
||||
},
|
||||
},
|
||||
CustomModel: {
|
||||
Title: "自定義模型名",
|
||||
SubTitle: "增加自定義模型可選項,使用英文逗號隔開",
|
||||
Title: "自訂模型名稱",
|
||||
SubTitle: "增加自訂模型可選擇項目,使用英文逗號隔開",
|
||||
},
|
||||
},
|
||||
|
||||
@@ -400,7 +400,7 @@ const tw = {
|
||||
Context: {
|
||||
Toast: (x: any) => `已設定 ${x} 條前置上下文`,
|
||||
Edit: "前置上下文和歷史記憶",
|
||||
Add: "新增一條",
|
||||
Add: "新增一則",
|
||||
Clear: "上下文已清除",
|
||||
Revert: "恢復上下文",
|
||||
},
|
||||
@@ -425,16 +425,16 @@ const tw = {
|
||||
EditModal: {
|
||||
Title: (readonly: boolean) =>
|
||||
`編輯預設角色範本 ${readonly ? "(唯讀)" : ""}`,
|
||||
Download: "下載預設",
|
||||
Clone: "複製預設",
|
||||
Download: "下載預設值",
|
||||
Clone: "以此預設值建立副本",
|
||||
},
|
||||
Config: {
|
||||
Avatar: "角色頭像",
|
||||
Name: "角色名稱",
|
||||
Sync: {
|
||||
Title: "使用全域性設定",
|
||||
SubTitle: "目前對話是否使用全域性模型設定",
|
||||
Confirm: "目前對話的自定義設定將會被自動覆蓋,確認啟用全域性設定?",
|
||||
Title: "使用全域設定",
|
||||
SubTitle: "目前對話是否使用全域模型設定",
|
||||
Confirm: "目前對話的自訂設定將會被自動覆蓋,確認啟用全域設定?",
|
||||
},
|
||||
HideContext: {
|
||||
Title: "隱藏預設對話",
|
||||
@@ -450,15 +450,15 @@ const tw = {
|
||||
NewChat: {
|
||||
Return: "返回",
|
||||
Skip: "跳過",
|
||||
NotShow: "不再呈現",
|
||||
NotShow: "不再顯示",
|
||||
ConfirmNoShow: "確認停用?停用後可以隨時在設定中重新啟用。",
|
||||
Title: "挑選一個角色範本",
|
||||
SubTitle: "現在開始,與角色範本背後的靈魂思維碰撞",
|
||||
More: "搜尋更多",
|
||||
},
|
||||
URLCommand: {
|
||||
Code: "檢測到連結中已經包含存取密碼,是否自動填入?",
|
||||
Settings: "檢測到連結中包含了預設設定,是否自動填入?",
|
||||
Code: "偵測到連結中已經包含存取密碼,是否自動填入?",
|
||||
Settings: "偵測到連結中包含了預設設定,是否自動填入?",
|
||||
},
|
||||
UI: {
|
||||
Confirm: "確認",
|
||||
|
||||
25
app/masks/build.ts
Normal file
25
app/masks/build.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { CN_MASKS } from "./cn";
|
||||
import { TW_MASKS } from "./tw";
|
||||
import { EN_MASKS } from "./en";
|
||||
|
||||
import { type BuiltinMask } from "./typing";
|
||||
|
||||
const BUILTIN_MASKS: Record<string, BuiltinMask[]> = {
|
||||
cn: CN_MASKS,
|
||||
tw: TW_MASKS,
|
||||
en: EN_MASKS,
|
||||
};
|
||||
|
||||
const dirname = path.dirname(__filename);
|
||||
|
||||
fs.writeFile(
|
||||
dirname + "/../../public/masks.json",
|
||||
JSON.stringify(BUILTIN_MASKS, null, 4),
|
||||
function (error) {
|
||||
if (error) {
|
||||
console.error("[Build] failed to build masks", error);
|
||||
}
|
||||
},
|
||||
);
|
||||
@@ -1,7 +1,4 @@
|
||||
import { Mask } from "../store/mask";
|
||||
import { CN_MASKS } from "./cn";
|
||||
import { TW_MASKS } from "./tw";
|
||||
import { EN_MASKS } from "./en";
|
||||
|
||||
import { type BuiltinMask } from "./typing";
|
||||
export { type BuiltinMask } from "./typing";
|
||||
@@ -22,8 +19,20 @@ export const BUILTIN_MASK_STORE = {
|
||||
},
|
||||
};
|
||||
|
||||
export const BUILTIN_MASKS: BuiltinMask[] = [
|
||||
...CN_MASKS,
|
||||
...TW_MASKS,
|
||||
...EN_MASKS,
|
||||
].map((m) => BUILTIN_MASK_STORE.add(m));
|
||||
export const BUILTIN_MASKS: BuiltinMask[] = [];
|
||||
|
||||
if (typeof window != "undefined") {
|
||||
// run in browser skip in next server
|
||||
fetch("/masks.json")
|
||||
.then((res) => res.json())
|
||||
.catch((error) => {
|
||||
console.error("[Fetch] failed to fetch masks", error);
|
||||
return { cn: [], tw: [], en: [] };
|
||||
})
|
||||
.then((masks) => {
|
||||
const { cn = [], tw = [], en = [] } = masks;
|
||||
return [...cn, ...tw, ...en].map((m) => {
|
||||
BUILTIN_MASKS.push(BUILTIN_MASK_STORE.add(m));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {
|
||||
ApiPath,
|
||||
DEFAULT_API_HOST,
|
||||
GoogleSafetySettingsThreshold,
|
||||
ServiceProvider,
|
||||
StoreKey,
|
||||
} from "../constant";
|
||||
@@ -12,10 +13,33 @@ import { DEFAULT_CONFIG } from "./config";
|
||||
|
||||
let fetchState = 0; // 0 not fetch, 1 fetching, 2 done
|
||||
|
||||
const DEFAULT_OPENAI_URL =
|
||||
getClientConfig()?.buildMode === "export"
|
||||
? DEFAULT_API_HOST + "/api/proxy/openai"
|
||||
: ApiPath.OpenAI;
|
||||
const isApp = getClientConfig()?.buildMode === "export";
|
||||
|
||||
const DEFAULT_OPENAI_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/openai"
|
||||
: ApiPath.OpenAI;
|
||||
|
||||
const DEFAULT_GOOGLE_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/google"
|
||||
: ApiPath.Google;
|
||||
|
||||
const DEFAULT_ANTHROPIC_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/anthropic"
|
||||
: ApiPath.Anthropic;
|
||||
|
||||
const DEFAULT_BAIDU_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/baidu"
|
||||
: ApiPath.Baidu;
|
||||
|
||||
const DEFAULT_BYTEDANCE_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/bytedance"
|
||||
: ApiPath.ByteDance;
|
||||
|
||||
const DEFAULT_ALIBABA_URL = isApp
|
||||
? DEFAULT_API_HOST + "/api/proxy/alibaba"
|
||||
: ApiPath.Alibaba;
|
||||
|
||||
console.log("DEFAULT_ANTHROPIC_URL", DEFAULT_ANTHROPIC_URL);
|
||||
|
||||
const DEFAULT_ACCESS_STATE = {
|
||||
accessCode: "",
|
||||
@@ -33,14 +57,28 @@ const DEFAULT_ACCESS_STATE = {
|
||||
azureApiVersion: "2024-02-15-preview",
|
||||
|
||||
// google ai studio
|
||||
googleUrl: "",
|
||||
googleUrl: DEFAULT_GOOGLE_URL,
|
||||
googleApiKey: "",
|
||||
googleApiVersion: "v1",
|
||||
googleSafetySettings: GoogleSafetySettingsThreshold.BLOCK_ONLY_HIGH,
|
||||
|
||||
// anthropic
|
||||
anthropicUrl: DEFAULT_ANTHROPIC_URL,
|
||||
anthropicApiKey: "",
|
||||
anthropicApiVersion: "2023-06-01",
|
||||
anthropicUrl: "",
|
||||
|
||||
// baidu
|
||||
baiduUrl: DEFAULT_BAIDU_URL,
|
||||
baiduApiKey: "",
|
||||
baiduSecretKey: "",
|
||||
|
||||
// bytedance
|
||||
bytedanceUrl: DEFAULT_BYTEDANCE_URL,
|
||||
bytedanceApiKey: "",
|
||||
|
||||
// alibaba
|
||||
alibabaUrl: DEFAULT_ALIBABA_URL,
|
||||
alibabaApiKey: "",
|
||||
|
||||
// server config
|
||||
needCode: true,
|
||||
@@ -102,6 +140,18 @@ export const useAccessStore = createPersistStore(
|
||||
return ensure(get(), ["anthropicApiKey"]);
|
||||
},
|
||||
|
||||
isValidBaidu() {
|
||||
return ensure(get(), ["baiduApiKey", "baiduSecretKey"]);
|
||||
},
|
||||
|
||||
isValidByteDance() {
|
||||
return ensure(get(), ["bytedanceApiKey"]);
|
||||
},
|
||||
|
||||
isValidAlibaba() {
|
||||
return ensure(get(), ["alibabaApiKey"]);
|
||||
},
|
||||
|
||||
isAuthorized() {
|
||||
this.fetch();
|
||||
|
||||
@@ -111,6 +161,9 @@ export const useAccessStore = createPersistStore(
|
||||
this.isValidAzure() ||
|
||||
this.isValidGoogle() ||
|
||||
this.isValidAnthropic() ||
|
||||
this.isValidBaidu() ||
|
||||
this.isValidByteDance() ||
|
||||
this.isValidAlibaba() ||
|
||||
!this.enabledAccessControl() ||
|
||||
(this.enabledAccessControl() && ensure(get(), ["accessCode"]))
|
||||
);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { trimTopic, getMessageTextContent, getClientApi } from "../utils";
|
||||
import { trimTopic, getMessageTextContent } from "../utils";
|
||||
|
||||
import Locale, { getLang } from "../locales";
|
||||
import { showToast } from "../components/ui-lib";
|
||||
@@ -9,13 +9,17 @@ import {
|
||||
DEFAULT_MODELS,
|
||||
DEFAULT_SYSTEM_TEMPLATE,
|
||||
KnowledgeCutOffDate,
|
||||
ModelProvider,
|
||||
StoreKey,
|
||||
SUMMARIZE_MODEL,
|
||||
GEMINI_SUMMARIZE_MODEL,
|
||||
MYFILES_BROWSER_TOOLS_SYSTEM_PROMPT,
|
||||
} from "../constant";
|
||||
import { ClientApi, RequestMessage, MultimodalContent } from "../client/api";
|
||||
import { getClientApi } from "../client/api";
|
||||
import type {
|
||||
ClientApi,
|
||||
RequestMessage,
|
||||
MultimodalContent,
|
||||
} from "../client/api";
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
import { prettyObject } from "../utils/format";
|
||||
import { estimateTokenLength } from "../utils/token";
|
||||
@@ -28,7 +32,6 @@ export interface ChatToolMessage {
|
||||
}
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { FileInfo } from "../client/platforms/utils";
|
||||
import { identifyDefaultClaudeModel } from "../utils/checkers";
|
||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||
import { useAccessStore } from "./access";
|
||||
|
||||
@@ -101,13 +104,7 @@ function createEmptySession(): ChatSession {
|
||||
}
|
||||
|
||||
function getSummarizeModel(currentModel: string) {
|
||||
// if the current model does not exist in the default model
|
||||
// example azure services cannot use SUMMARIZE_MODEL
|
||||
const model = DEFAULT_MODELS.find((m) => m.name === currentModel);
|
||||
console.log("model", model);
|
||||
if (!model) return currentModel;
|
||||
if (model.provider.providerType === "google") return GEMINI_SUMMARIZE_MODEL;
|
||||
// if it is using gpt-* models, force to use 3.5 to summarize
|
||||
// if it is using gpt-* models, force to use 4o-mini to summarize
|
||||
if (currentModel.startsWith("gpt")) {
|
||||
const configStore = useAppConfig.getState();
|
||||
const accessStore = useAccessStore.getState();
|
||||
@@ -374,7 +371,7 @@ export const useChatStore = createPersistStore(
|
||||
model: modelConfig.model,
|
||||
toolMessages: [],
|
||||
});
|
||||
var api: ClientApi = getClientApi(modelConfig.model);
|
||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||
const isEnableRAG =
|
||||
session.attachFiles && session.attachFiles.length > 0;
|
||||
// get recent messages
|
||||
@@ -671,7 +668,7 @@ ${file.partial}
|
||||
const session = get().currentSession();
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
|
||||
var api: ClientApi = getClientApi(modelConfig.model);
|
||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||
|
||||
// remove error messages if any
|
||||
const messages = session.messages;
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
DEFAULT_TTS_VOICE,
|
||||
DEFAULT_TTS_VOICES,
|
||||
StoreKey,
|
||||
ServiceProvider,
|
||||
} from "../constant";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
|
||||
@@ -61,6 +62,7 @@ export const DEFAULT_CONFIG = {
|
||||
|
||||
modelConfig: {
|
||||
model: "gpt-3.5-turbo" as ModelType,
|
||||
providerName: "OpenAI" as ServiceProvider,
|
||||
temperature: 0.5,
|
||||
top_p: 1,
|
||||
max_tokens: 4000,
|
||||
@@ -173,12 +175,12 @@ export const useAppConfig = createPersistStore(
|
||||
|
||||
for (const model of oldModels) {
|
||||
model.available = false;
|
||||
modelMap[model.name] = model;
|
||||
modelMap[`${model.name}@${model?.provider?.id}`] = model;
|
||||
}
|
||||
|
||||
for (const model of newModels) {
|
||||
model.available = true;
|
||||
modelMap[model.name] = model;
|
||||
modelMap[`${model.name}@${model?.provider?.id}`] = model;
|
||||
}
|
||||
|
||||
set(() => ({
|
||||
@@ -225,7 +227,7 @@ export const useAppConfig = createPersistStore(
|
||||
state.modelConfig.template =
|
||||
state.modelConfig.template !== DEFAULT_INPUT_TEMPLATE
|
||||
? state.modelConfig.template
|
||||
: config?.template ?? DEFAULT_INPUT_TEMPLATE;
|
||||
: (config?.template ?? DEFAULT_INPUT_TEMPLATE);
|
||||
}
|
||||
|
||||
return state as any;
|
||||
|
||||
@@ -154,7 +154,7 @@ export const usePromptStore = createPersistStore(
|
||||
fetch(PROMPT_URL)
|
||||
.then((res) => res.json())
|
||||
.then((res) => {
|
||||
let fetchPrompts = [res.en, res.cn];
|
||||
let fetchPrompts = [res.en, res.tw, res.cn];
|
||||
if (getLang() === "cn") {
|
||||
fetchPrompts = fetchPrompts.reverse();
|
||||
}
|
||||
@@ -166,7 +166,7 @@ export const usePromptStore = createPersistStore(
|
||||
title,
|
||||
content,
|
||||
createdAt: Date.now(),
|
||||
} as Prompt),
|
||||
}) as Prompt,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -175,7 +175,8 @@ export const usePromptStore = createPersistStore(
|
||||
const allPromptsForSearch = builtinPrompts
|
||||
.reduce((pre, cur) => pre.concat(cur), [])
|
||||
.filter((v) => !!v.title && !!v.content);
|
||||
SearchService.count.builtin = res.en.length + res.cn.length;
|
||||
SearchService.count.builtin =
|
||||
res.en.length + res.cn.length + res.tw.length;
|
||||
SearchService.init(allPromptsForSearch, userPrompts);
|
||||
});
|
||||
},
|
||||
|
||||
@@ -115,7 +115,7 @@ body {
|
||||
}
|
||||
|
||||
::-webkit-scrollbar {
|
||||
--bar-width: 5px;
|
||||
--bar-width: 10px;
|
||||
width: var(--bar-width);
|
||||
height: var(--bar-width);
|
||||
}
|
||||
|
||||
25
app/utils.ts
25
app/utils.ts
@@ -1,10 +1,8 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { showToast } from "./components/ui-lib";
|
||||
import Locale from "./locales";
|
||||
import { ClientApi, RequestMessage } from "./client/api";
|
||||
import { DEFAULT_MODELS, ModelProvider } from "./constant";
|
||||
import { identifyDefaultClaudeModel } from "./utils/checkers";
|
||||
import { useAccessStore } from "./store";
|
||||
import { RequestMessage } from "./client/api";
|
||||
import { DEFAULT_MODELS } from "./constant";
|
||||
|
||||
export function trimTopic(topic: string) {
|
||||
// Fix an issue where double quotes still show in the Indonesian language
|
||||
@@ -259,6 +257,7 @@ export function isVisionModel(model: string) {
|
||||
"gemini-1.5-pro",
|
||||
"gemini-1.5-flash",
|
||||
"gpt-4o",
|
||||
"gpt-4o-mini",
|
||||
];
|
||||
const isGpt4Turbo =
|
||||
model.includes("gpt-4-turbo") && !model.includes("preview");
|
||||
@@ -274,6 +273,8 @@ export function isSupportRAGModel(modelName: string) {
|
||||
"gpt-4-turbo-2024-04-09",
|
||||
"gpt-4o",
|
||||
"gpt-4o-2024-05-13",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o-mini-2024-07-18",
|
||||
];
|
||||
if (specialModels.some((keyword) => modelName === keyword)) return true;
|
||||
if (isVisionModel(modelName)) return false;
|
||||
@@ -281,19 +282,3 @@ export function isSupportRAGModel(modelName: string) {
|
||||
(model) => model.name === modelName,
|
||||
);
|
||||
}
|
||||
|
||||
export function getClientApi(modelName: string): ClientApi {
|
||||
const accessStore = useAccessStore.getState();
|
||||
if (accessStore.isUseOpenAIEndpointForAllModels) {
|
||||
return new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
var api: ClientApi;
|
||||
if (modelName.startsWith("gemini")) {
|
||||
api = new ClientApi(ModelProvider.GeminiPro);
|
||||
} else if (identifyDefaultClaudeModel(modelName)) {
|
||||
api = new ClientApi(ModelProvider.Claude);
|
||||
} else {
|
||||
api = new ClientApi(ModelProvider.GPT);
|
||||
}
|
||||
return api;
|
||||
}
|
||||
|
||||
23
app/utils/baidu.ts
Normal file
23
app/utils/baidu.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { BAIDU_OATUH_URL } from "../constant";
|
||||
/**
|
||||
* 使用 AK,SK 生成鉴权签名(Access Token)
|
||||
* @return 鉴权签名信息
|
||||
*/
|
||||
export async function getAccessToken(
|
||||
clientId: string,
|
||||
clientSecret: string,
|
||||
): Promise<{
|
||||
access_token: string;
|
||||
expires_in: number;
|
||||
error?: number;
|
||||
}> {
|
||||
const res = await fetch(
|
||||
`${BAIDU_OATUH_URL}?grant_type=client_credentials&client_id=${clientId}&client_secret=${clientSecret}`,
|
||||
{
|
||||
method: "POST",
|
||||
mode: "cors",
|
||||
},
|
||||
);
|
||||
const resJson = await res.json();
|
||||
return resJson;
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import heic2any from "heic2any";
|
||||
import { CACHE_URL_PREFIX, UPLOAD_URL } from "@/app/constant";
|
||||
import { RequestMessage } from "@/app/client/api";
|
||||
|
||||
export function compressImage(file: File, maxSize: number): Promise<string> {
|
||||
export function compressImage(file: Blob, maxSize: number): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onload = (readerEvent: any) => {
|
||||
@@ -40,15 +41,104 @@ export function compressImage(file: File, maxSize: number): Promise<string> {
|
||||
reader.onerror = reject;
|
||||
|
||||
if (file.type.includes("heic")) {
|
||||
heic2any({ blob: file, toType: "image/jpeg" })
|
||||
.then((blob) => {
|
||||
reader.readAsDataURL(blob as Blob);
|
||||
})
|
||||
.catch((e) => {
|
||||
reject(e);
|
||||
});
|
||||
try {
|
||||
const heic2any = require("heic2any");
|
||||
heic2any({ blob: file, toType: "image/jpeg" })
|
||||
.then((blob: Blob) => {
|
||||
reader.readAsDataURL(blob);
|
||||
})
|
||||
.catch((e: any) => {
|
||||
reject(e);
|
||||
});
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
}
|
||||
}
|
||||
|
||||
reader.readAsDataURL(file);
|
||||
});
|
||||
}
|
||||
|
||||
export async function preProcessImageContent(
|
||||
content: RequestMessage["content"],
|
||||
) {
|
||||
if (typeof content === "string") {
|
||||
return content;
|
||||
}
|
||||
const result = [];
|
||||
for (const part of content) {
|
||||
if (part?.type == "image_url" && part?.image_url?.url) {
|
||||
try {
|
||||
const url = await cacheImageToBase64Image(part?.image_url?.url);
|
||||
result.push({ type: part.type, image_url: { url } });
|
||||
} catch (error) {
|
||||
console.error("Error processing image URL:", error);
|
||||
}
|
||||
} else {
|
||||
result.push({ ...part });
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const imageCaches: Record<string, string> = {};
|
||||
export function cacheImageToBase64Image(imageUrl: string) {
|
||||
if (imageUrl.includes(CACHE_URL_PREFIX)) {
|
||||
if (!imageCaches[imageUrl]) {
|
||||
const reader = new FileReader();
|
||||
return fetch(imageUrl, {
|
||||
method: "GET",
|
||||
mode: "cors",
|
||||
credentials: "include",
|
||||
})
|
||||
.then((res) => res.blob())
|
||||
.then(
|
||||
async (blob) =>
|
||||
(imageCaches[imageUrl] = await compressImage(blob, 256 * 1024)),
|
||||
); // compressImage
|
||||
}
|
||||
return Promise.resolve(imageCaches[imageUrl]);
|
||||
}
|
||||
return Promise.resolve(imageUrl);
|
||||
}
|
||||
|
||||
export function base64Image2Blob(base64Data: string, contentType: string) {
|
||||
const byteCharacters = atob(base64Data);
|
||||
const byteNumbers = new Array(byteCharacters.length);
|
||||
for (let i = 0; i < byteCharacters.length; i++) {
|
||||
byteNumbers[i] = byteCharacters.charCodeAt(i);
|
||||
}
|
||||
const byteArray = new Uint8Array(byteNumbers);
|
||||
return new Blob([byteArray], { type: contentType });
|
||||
}
|
||||
|
||||
export function uploadImage(file: File): Promise<string> {
|
||||
if (!window._SW_ENABLED) {
|
||||
// if serviceWorker register error, using compressImage
|
||||
return compressImage(file, 256 * 1024);
|
||||
}
|
||||
const body = new FormData();
|
||||
body.append("file", file);
|
||||
return fetch(UPLOAD_URL, {
|
||||
method: "post",
|
||||
body,
|
||||
mode: "cors",
|
||||
credentials: "include",
|
||||
})
|
||||
.then((res) => res.json())
|
||||
.then((res) => {
|
||||
console.log("res", res);
|
||||
if (res?.code == 0 && res?.data) {
|
||||
return res?.data;
|
||||
}
|
||||
throw Error(`upload Error: ${res?.msg}`);
|
||||
});
|
||||
}
|
||||
|
||||
export function removeImage(imageUrl: string) {
|
||||
return fetch(imageUrl, {
|
||||
method: "DELETE",
|
||||
mode: "cors",
|
||||
credentials: "include",
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
import { useAccessStore } from "../store/access";
|
||||
import { useAppConfig } from "../store/config";
|
||||
import { collectModels } from "./model";
|
||||
|
||||
export function identifyDefaultClaudeModel(modelName: string) {
|
||||
const accessStore = useAccessStore.getState();
|
||||
const configStore = useAppConfig.getState();
|
||||
|
||||
const allModals = collectModels(
|
||||
configStore.models,
|
||||
[configStore.customModels, accessStore.customModels].join(","),
|
||||
);
|
||||
|
||||
const modelMeta = allModals.find((m) => m.name === modelName);
|
||||
|
||||
return (
|
||||
modelName.startsWith("claude") &&
|
||||
modelMeta &&
|
||||
modelMeta.provider?.providerType === "anthropic"
|
||||
);
|
||||
}
|
||||
26
app/utils/cloudflare.ts
Normal file
26
app/utils/cloudflare.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
export function cloudflareAIGatewayUrl(fetchUrl: string) {
|
||||
// rebuild fetchUrl, if using cloudflare ai gateway
|
||||
// document: https://developers.cloudflare.com/ai-gateway/providers/openai/
|
||||
|
||||
const paths = fetchUrl.split("/");
|
||||
if ("gateway.ai.cloudflare.com" == paths[2]) {
|
||||
// is cloudflare.com ai gateway
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/azure-openai/{resource_name}/{deployment_name}/chat/completions?api-version=2023-05-15'
|
||||
if ("azure-openai" == paths[6]) {
|
||||
// is azure gateway
|
||||
return paths.slice(0, 8).concat(paths.slice(-3)).join("/"); // rebuild ai gateway azure_url
|
||||
}
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/openai/chat/completions
|
||||
if ("openai" == paths[6]) {
|
||||
// is openai gateway
|
||||
return paths.slice(0, 7).concat(paths.slice(-2)).join("/"); // rebuild ai gateway openai_url
|
||||
}
|
||||
// https://gateway.ai.cloudflare.com/v1/{account_id}/{gateway_id}/anthropic/v1/messages \
|
||||
if ("anthropic" == paths[6]) {
|
||||
// is anthropic gateway
|
||||
return paths.slice(0, 7).concat(paths.slice(-2)).join("/"); // rebuild ai gateway anthropic_url
|
||||
}
|
||||
// TODO: Amazon Bedrock, Groq, HuggingFace...
|
||||
}
|
||||
return fetchUrl;
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useMemo } from "react";
|
||||
import { useAccessStore, useAppConfig } from "../store";
|
||||
import { collectModels, collectModelsWithDefaultModel } from "./model";
|
||||
import { collectModelsWithDefaultModel } from "./model";
|
||||
|
||||
export function useAllModels() {
|
||||
const accessStore = useAccessStore();
|
||||
@@ -11,7 +11,12 @@ export function useAllModels() {
|
||||
[configStore.customModels, accessStore.customModels].join(","),
|
||||
accessStore.defaultModel,
|
||||
);
|
||||
}, [accessStore.customModels, configStore.customModels, configStore.models]);
|
||||
}, [
|
||||
accessStore.customModels,
|
||||
accessStore.defaultModel,
|
||||
configStore.customModels,
|
||||
configStore.models,
|
||||
]);
|
||||
|
||||
return models;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { DEFAULT_MODELS } from "../constant";
|
||||
import { LLMModel } from "../client/api";
|
||||
|
||||
const customProvider = (modelName: string) => ({
|
||||
id: modelName,
|
||||
providerName: "",
|
||||
const customProvider = (providerName: string) => ({
|
||||
id: providerName.toLowerCase(),
|
||||
providerName: providerName,
|
||||
providerType: "custom",
|
||||
});
|
||||
|
||||
@@ -23,7 +24,8 @@ export function collectModelTable(
|
||||
|
||||
// default models
|
||||
models.forEach((m) => {
|
||||
modelTable[m.name] = {
|
||||
// using <modelName>@<providerId> as fullName
|
||||
modelTable[`${m.name}@${m?.provider?.id}`] = {
|
||||
...m,
|
||||
displayName: m.name, // 'provider' is copied over if it exists
|
||||
};
|
||||
@@ -37,7 +39,7 @@ export function collectModelTable(
|
||||
const available = !m.startsWith("-");
|
||||
const nameConfig =
|
||||
m.startsWith("+") || m.startsWith("-") ? m.slice(1) : m;
|
||||
const [name, displayName] = nameConfig.split("=");
|
||||
let [name, displayName] = nameConfig.split("=");
|
||||
|
||||
// enable or disable all models
|
||||
if (name === "all") {
|
||||
@@ -45,12 +47,45 @@ export function collectModelTable(
|
||||
(model) => (model.available = available),
|
||||
);
|
||||
} else {
|
||||
modelTable[name] = {
|
||||
name,
|
||||
displayName: displayName || name,
|
||||
available,
|
||||
provider: modelTable[name]?.provider ?? customProvider(name), // Use optional chaining
|
||||
};
|
||||
// 1. find model by name, and set available value
|
||||
const [customModelName, customProviderName] = name.split("@");
|
||||
let count = 0;
|
||||
for (const fullName in modelTable) {
|
||||
const [modelName, providerName] = fullName.split("@");
|
||||
if (
|
||||
customModelName == modelName &&
|
||||
(customProviderName === undefined ||
|
||||
customProviderName === providerName)
|
||||
) {
|
||||
count += 1;
|
||||
modelTable[fullName]["available"] = available;
|
||||
// swap name and displayName for bytedance
|
||||
if (providerName === "bytedance") {
|
||||
[name, displayName] = [displayName, modelName];
|
||||
modelTable[fullName]["name"] = name;
|
||||
}
|
||||
if (displayName) {
|
||||
modelTable[fullName]["displayName"] = displayName;
|
||||
}
|
||||
}
|
||||
}
|
||||
// 2. if model not exists, create new model with available value
|
||||
if (count === 0) {
|
||||
let [customModelName, customProviderName] = name.split("@");
|
||||
const provider = customProvider(
|
||||
customProviderName || customModelName,
|
||||
);
|
||||
// swap name and displayName for bytedance
|
||||
if (displayName && provider.providerName == "ByteDance") {
|
||||
[customModelName, displayName] = [displayName, customModelName];
|
||||
}
|
||||
modelTable[`${customModelName}@${provider?.id}`] = {
|
||||
name: customModelName,
|
||||
displayName: displayName || customModelName,
|
||||
available,
|
||||
provider, // Use optional chaining
|
||||
};
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -100,3 +135,13 @@ export function collectModelsWithDefaultModel(
|
||||
const allModels = Object.values(modelTable);
|
||||
return allModels;
|
||||
}
|
||||
|
||||
export function isModelAvailableInServer(
|
||||
customModels: string,
|
||||
modelName: string,
|
||||
providerName: string,
|
||||
) {
|
||||
const fullName = `${modelName}@${providerName}`;
|
||||
const modelTable = collectModelTable(DEFAULT_MODELS, customModels);
|
||||
return modelTable[fullName]?.available === false;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user