diff --git a/app/api/openai.ts b/app/api/openai.ts
index bbba69e56..2b5deca8b 100644
--- a/app/api/openai.ts
+++ b/app/api/openai.ts
@@ -14,7 +14,7 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
if (config.disableGPT4) {
remoteModelRes.data = remoteModelRes.data.filter(
(m) =>
- !(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
+ !(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o") || m.id.startsWith("o1")) ||
m.id.startsWith("gpt-4o-mini"),
);
}
diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts
index 7c1588440..15cfb7ca6 100644
--- a/app/client/platforms/openai.ts
+++ b/app/client/platforms/openai.ts
@@ -224,7 +224,7 @@ export class ChatGPTApi implements LLMApi {
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
requestPayload = {
messages,
- stream: !isO1 ? options.config.stream : false,
+ stream: options.config.stream,
model: modelConfig.model,
temperature: !isO1 ? modelConfig.temperature : 1,
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
@@ -247,7 +247,7 @@ export class ChatGPTApi implements LLMApi {
console.log("[Request] openai payload: ", requestPayload);
- const shouldStream = !isDalle3 && !!options.config.stream && !isO1;
+ const shouldStream = !isDalle3 && !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
diff --git a/app/components/emoji.tsx b/app/components/emoji.tsx
index 6db746c46..d75cdda92 100644
--- a/app/components/emoji.tsx
+++ b/app/components/emoji.tsx
@@ -37,7 +37,8 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
return (
{props.model?.startsWith("gpt-4") ||
- props.model?.startsWith("chatgpt-4o") ? (
+ props.model?.startsWith("chatgpt-4o") ||
+ props.model?.startsWith("o1") ? (
) : (
diff --git a/app/config/server.ts b/app/config/server.ts
index 485f950da..289fb4ad5 100644
--- a/app/config/server.ts
+++ b/app/config/server.ts
@@ -129,14 +129,15 @@ export const getServerSideConfig = () => {
if (customModels) customModels += ",";
customModels += DEFAULT_MODELS.filter(
(m) =>
- (m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o")) &&
+ (m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o") || m.name.startsWith("o1")) &&
!m.name.startsWith("gpt-4o-mini"),
)
.map((m) => "-" + m.name)
.join(",");
if (
(defaultModel.startsWith("gpt-4") ||
- defaultModel.startsWith("chatgpt-4o")) &&
+ defaultModel.startsWith("chatgpt-4o") ||
+ defaultModel.startsWith("o1")) &&
!defaultModel.startsWith("gpt-4o-mini")
)
defaultModel = "";
diff --git a/app/constant.ts b/app/constant.ts
index f1a1996b8..6d0260fb1 100644
--- a/app/constant.ts
+++ b/app/constant.ts
@@ -264,6 +264,7 @@ export const KnowledgeCutOffDate: Record = {
"gpt-4o": "2023-10",
"gpt-4o-2024-05-13": "2023-10",
"gpt-4o-2024-08-06": "2023-10",
+ "gpt-4o-2024-11-20": "2023-10",
"chatgpt-4o-latest": "2023-10",
"gpt-4o-mini": "2023-10",
"gpt-4o-mini-2024-07-18": "2023-10",
@@ -303,6 +304,7 @@ const openaiModels = [
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
+ "gpt-4o-2024-11-20",
"chatgpt-4o-latest",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
@@ -318,6 +320,9 @@ const googleModels = [
"gemini-1.0-pro",
"gemini-1.5-pro-latest",
"gemini-1.5-flash-latest",
+ "gemini-exp-1114",
+ "gemini-exp-1121",
+ "learnlm-1.5-pro-experimental",
"gemini-pro-vision",
];
@@ -554,7 +559,7 @@ export const internalAllowedWebDavEndpoints = [
"https://app.koofr.net/dav/Koofr",
];
-export const DEFAULT_GA_ID = "G-89WN60ZK2E";
+export const DEFAULT_GA_ID = "";
export const PLUGINS = [
{ name: "Plugins", path: Path.Plugins },
{ name: "Stable Diffusion", path: Path.Sd },
diff --git a/app/utils.ts b/app/utils.ts
index e3db9336a..b121e660e 100644
--- a/app/utils.ts
+++ b/app/utils.ts
@@ -265,6 +265,8 @@ export function isVisionModel(model: string) {
"gpt-4o",
"claude-3",
"gemini-1.5",
+ "gemini-exp",
+ "learnlm",
"qwen-vl",
"qwen2-vl",
];