mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-13 05:23:42 +08:00
Merge branch 'main' into main
This commit is contained in:
@@ -40,6 +40,7 @@ export const getBuildConfig = () => {
|
||||
buildMode,
|
||||
isApp,
|
||||
template: process.env.DEFAULT_INPUT_TEMPLATE ?? DEFAULT_INPUT_TEMPLATE,
|
||||
visionModels: process.env.VISION_MODELS || "",
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -303,6 +303,22 @@ export const DEFAULT_TTS_VOICES = [
|
||||
"shimmer",
|
||||
];
|
||||
|
||||
export const VISION_MODEL_REGEXES = [
|
||||
/vision/,
|
||||
/gpt-4o/,
|
||||
/claude-3/,
|
||||
/gemini-1\.5/,
|
||||
/gemini-exp/,
|
||||
/gemini-2\.0/,
|
||||
/learnlm/,
|
||||
/qwen-vl/,
|
||||
/qwen2-vl/,
|
||||
/gpt-4-turbo(?!.*preview)/, // Matches "gpt-4-turbo" but not "gpt-4-turbo-preview"
|
||||
/^dall-e-3$/, // Matches exactly "dall-e-3"
|
||||
];
|
||||
|
||||
export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
|
||||
|
||||
const openaiModels = [
|
||||
"gpt-3.5-turbo",
|
||||
"gpt-3.5-turbo-1106",
|
||||
@@ -352,12 +368,21 @@ const bedrockModels = [
|
||||
const googleModels = [
|
||||
"gemini-1.0-pro", // Deprecated on 2/15/2025
|
||||
"gemini-1.5-pro-latest",
|
||||
"gemini-1.5-pro",
|
||||
"gemini-1.5-pro-002",
|
||||
"gemini-1.5-pro-exp-0827",
|
||||
"gemini-1.5-flash-latest",
|
||||
"gemini-1.5-flash-8b-latest",
|
||||
"gemini-1.5-flash",
|
||||
"gemini-1.5-flash-8b",
|
||||
"gemini-1.5-flash-002",
|
||||
"gemini-1.5-flash-exp-0827",
|
||||
"learnlm-1.5-pro-experimental",
|
||||
"gemini-exp-1114",
|
||||
"gemini-exp-1121",
|
||||
"learnlm-1.5-pro-experimental",
|
||||
"gemini-exp-1206",
|
||||
"gemini-2.0-flash-exp",
|
||||
"gemini-2.0-flash-thinking-exp-1219",
|
||||
];
|
||||
|
||||
const anthropicModels = [
|
||||
|
34
app/utils.ts
34
app/utils.ts
@@ -5,6 +5,8 @@ import { RequestMessage } from "./client/api";
|
||||
import { ServiceProvider } from "./constant";
|
||||
// import { fetch as tauriFetch, ResponseType } from "@tauri-apps/api/http";
|
||||
import { fetch as tauriStreamFetch } from "./utils/stream";
|
||||
import { VISION_MODEL_REGEXES, EXCLUDE_VISION_MODEL_REGEXES } from "./constant";
|
||||
import { getClientConfig } from "./config/client";
|
||||
|
||||
export function trimTopic(topic: string) {
|
||||
// Fix an issue where double quotes still show in the Indonesian language
|
||||
@@ -252,30 +254,16 @@ export function getMessageImages(message: RequestMessage): string[] {
|
||||
}
|
||||
|
||||
export function isVisionModel(model: string) {
|
||||
// Note: This is a better way using the TypeScript feature instead of `&&` or `||` (ts v5.5.0-dev.20240314 I've been using)
|
||||
|
||||
const excludeKeywords = ["claude-3-5-haiku-20241022"];
|
||||
const visionKeywords = [
|
||||
"vision",
|
||||
"gpt-4o",
|
||||
"claude-3",
|
||||
"gemini-1.5",
|
||||
"gemini-exp",
|
||||
"gemini-2.0",
|
||||
"learnlm",
|
||||
"qwen-vl",
|
||||
"qwen2-vl",
|
||||
"nova-lite",
|
||||
"nova-pro",
|
||||
];
|
||||
const isGpt4Turbo =
|
||||
model.includes("gpt-4-turbo") && !model.includes("preview");
|
||||
|
||||
const clientConfig = getClientConfig();
|
||||
const envVisionModels = clientConfig?.visionModels
|
||||
?.split(",")
|
||||
.map((m) => m.trim());
|
||||
if (envVisionModels?.includes(model)) {
|
||||
return true;
|
||||
}
|
||||
return (
|
||||
!excludeKeywords.some((keyword) => model.includes(keyword)) &&
|
||||
(visionKeywords.some((keyword) => model.includes(keyword)) ||
|
||||
isGpt4Turbo ||
|
||||
isDalle3(model))
|
||||
!EXCLUDE_VISION_MODEL_REGEXES.some((regex) => regex.test(model)) &&
|
||||
VISION_MODEL_REGEXES.some((regex) => regex.test(model))
|
||||
);
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user