加强不可用模型过滤

This commit is contained in:
sijinhui 2023-12-22 21:20:30 +08:00
parent 36d993cc5a
commit 07c48ef7fd
2 changed files with 12 additions and 13 deletions

View File

@ -10,6 +10,7 @@ export async function requestOpenai(
req: NextRequest,
cloneBody: any,
isAzure: boolean,
current_model: string,
) {
const controller = new AbortController();
@ -65,26 +66,20 @@ export async function requestOpenai(
signal: controller.signal,
};
// #1815 try to refuse gpt4 request
if (serverConfig.customModels && cloneBody) {
// #1815 try to refuse some model request
if (current_model) {
try {
const modelTable = collectModelTable(
DEFAULT_MODELS,
serverConfig.customModels,
);
// const clonedBody = await req.text();
fetchOptions.body = cloneBody;
const jsonBody = JSON.parse(cloneBody) as {
model?: string;
};
// not undefined and is false
if (!modelTable[jsonBody?.model ?? ""].available) {
if (!modelTable[current_model ?? ""].available) {
return NextResponse.json(
{
error: true,
message: `you are not allowed to use ${jsonBody?.model} model`,
message: `you are not allowed to use ${current_model} model`,
},
{
status: 403,
@ -92,7 +87,7 @@ export async function requestOpenai(
);
}
} catch (e) {
console.error("[OpenAI] gpt4 filter", e);
console.error("[OpenAI] gpt model filter", e);
}
}

View File

@ -27,7 +27,6 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
m.id === "gpt-3.5-turbo-16k" ||
m.id === "gpt-4-32k",
);
return remoteModelRes;
}
@ -105,7 +104,12 @@ async function handle(
// }
try {
const response = await requestOpenai(req, cloneBody, isAzure);
const response = await requestOpenai(
req,
cloneBody,
isAzure,
jsonBody?.model ?? "",
);
// list models
if (subpath === OpenaiPath.ListModelPath && response.status === 200) {