diff --git a/app/api/openai.ts b/app/api/openai.ts
index 9c55950e5..aaae96d91 100644
--- a/app/api/openai.ts
+++ b/app/api/openai.ts
@@ -19,7 +19,9 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
if (config.disableGPT4) {
remoteModelRes.data = remoteModelRes.data.filter(
- (m) => !m.id.startsWith("gpt-4") || m.id.startsWith("gpt-4o-mini"),
+ (m) =>
+ !(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
+ m.id.startsWith("gpt-4o-mini"),
);
}
diff --git a/app/client/platforms/anthropic.ts b/app/client/platforms/anthropic.ts
index fce675a16..7dd39c9cd 100644
--- a/app/client/platforms/anthropic.ts
+++ b/app/client/platforms/anthropic.ts
@@ -203,7 +203,7 @@ export class ClaudeApi implements LLMApi {
const [tools, funcs] = usePluginStore
.getState()
.getAsTools(
- useChatStore.getState().currentSession().mask?.plugin as string[],
+ useChatStore.getState().currentSession().mask?.plugin || [],
);
return stream(
path,
diff --git a/app/client/platforms/moonshot.ts b/app/client/platforms/moonshot.ts
index c38d3317b..cd10d2f6c 100644
--- a/app/client/platforms/moonshot.ts
+++ b/app/client/platforms/moonshot.ts
@@ -125,7 +125,7 @@ export class MoonshotApi implements LLMApi {
const [tools, funcs] = usePluginStore
.getState()
.getAsTools(
- useChatStore.getState().currentSession().mask?.plugin as string[],
+ useChatStore.getState().currentSession().mask?.plugin || [],
);
return stream(
chatPath,
diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts
index 8e5da2711..d1b9ab3fd 100644
--- a/app/client/platforms/openai.ts
+++ b/app/client/platforms/openai.ts
@@ -257,7 +257,7 @@ export class ChatGPTApi implements LLMApi {
const [tools, funcs] = usePluginStore
.getState()
.getAsTools(
- useChatStore.getState().currentSession().mask?.plugin as string[],
+ useChatStore.getState().currentSession().mask?.plugin || [],
);
// console.log("getAsTools", tools, funcs);
stream(
@@ -420,8 +420,10 @@ export class ChatGPTApi implements LLMApi {
});
const resJson = (await res.json()) as OpenAIListModelResponse;
- const chatModels = resJson.data?.filter((m) => m.id.startsWith("gpt-"));
- // console.log("[Models]", chatModels);
+ const chatModels = resJson.data?.filter(
+ (m) => m.id.startsWith("gpt-") || m.id.startsWith("chatgpt-"),
+ );
+ console.log("[Models]", chatModels);
if (!chatModels) {
return [];
diff --git a/app/components/emoji.tsx b/app/components/emoji.tsx
index b3d55a896..40735274f 100644
--- a/app/components/emoji.tsx
+++ b/app/components/emoji.tsx
@@ -38,7 +38,8 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
if (props.model) {
return (
- {props.model?.startsWith("gpt-4") ? (
+ {props.model?.startsWith("gpt-4") ||
+ props.model?.startsWith("chatgpt-4o") ? (
) : (
diff --git a/app/config/server.ts b/app/config/server.ts
index 098e4a9aa..315910e00 100644
--- a/app/config/server.ts
+++ b/app/config/server.ts
@@ -122,12 +122,15 @@ export const getServerSideConfig = () => {
if (disableGPT4) {
if (customModels) customModels += ",";
customModels += DEFAULT_MODELS.filter(
- (m) => m.name.startsWith("gpt-4") && !m.name.startsWith("gpt-4o-mini"),
+ (m) =>
+ (m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o")) &&
+ !m.name.startsWith("gpt-4o-mini"),
)
.map((m) => "-" + m.name)
.join(",");
if (
- defaultModel.startsWith("gpt-4") &&
+ (defaultModel.startsWith("gpt-4") ||
+ defaultModel.startsWith("chatgpt-4o")) &&
!defaultModel.startsWith("gpt-4o-mini")
)
defaultModel = "";
diff --git a/app/constant.ts b/app/constant.ts
index 0c482b569..c9aa264c6 100644
--- a/app/constant.ts
+++ b/app/constant.ts
@@ -248,6 +248,7 @@ export const KnowledgeCutOffDate: Record = {
"gpt-4o": "2023-10",
"gpt-4o-2024-05-13": "2023-10",
"gpt-4o-2024-08-06": "2023-10",
+ "chatgpt-4o-latest": "2023-10",
"gpt-4o-mini": "2023-10",
"gpt-4o-mini-2024-07-18": "2023-10",
"gpt-4-vision-preview": "2023-04",
@@ -270,6 +271,7 @@ const openaiModels = [
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
+ "chatgpt-4o-latest",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
"gpt-4-vision-preview",
diff --git a/app/store/chat.ts b/app/store/chat.ts
index d25471a65..a66cef0d1 100644
--- a/app/store/chat.ts
+++ b/app/store/chat.ts
@@ -109,7 +109,6 @@ function createEmptySession(): ChatSession {
mask: createEmptyMask(),
};
}
-
// if it is using gpt-* models, force to use 4o-mini to summarize
const ChatFetchTaskPool: Record = {};
@@ -117,8 +116,8 @@ function getSummarizeModel(currentModel: string): {
name: string;
providerName: string | undefined;
} {
- // if it is using gpt-* models, force to use 3.5 to summarize
- if (currentModel.startsWith("gpt")) {
+ // if it is using gpt-* models, force to use 4o-mini to summarize
+ if (currentModel.startsWith("gpt") || currentModel.startsWith("chatgpt")) {
const configStore = useAppConfig.getState();
const accessStore = useAccessStore.getState();
const allModel = collectModelsWithDefaultModel(
@@ -831,7 +830,8 @@ export const useChatStore = createPersistStore(
// system prompts, to get close to OpenAI Web ChatGPT
const shouldInjectSystemPrompts =
modelConfig.enableInjectSystemPrompts &&
- session.mask.modelConfig.model.startsWith("gpt-");
+ (session.mask.modelConfig.model.startsWith("gpt-") ||
+ session.mask.modelConfig.model.startsWith("chatgpt-"));
var systemPrompts: ChatMessage[] = [];
systemPrompts = shouldInjectSystemPrompts
diff --git a/app/store/plugin.ts b/app/store/plugin.ts
index 629dbd558..c80ca3755 100644
--- a/app/store/plugin.ts
+++ b/app/store/plugin.ts
@@ -199,7 +199,7 @@ export const usePluginStore = createPersistStore(
getAsTools(ids: string[]) {
const plugins = get().plugins;
- const selected = ids
+ const selected = (ids || [])
.map((id) => plugins[id])
.filter((i) => i)
.map((p) => FunctionToolService.add(p));
diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json
index 0f2a84a53..78835d24d 100644
--- a/src-tauri/tauri.conf.json
+++ b/src-tauri/tauri.conf.json
@@ -9,7 +9,7 @@
},
"package": {
"productName": "NextChat",
- "version": "2.15.0"
+ "version": "2.15.1"
},
"tauri": {
"allowlist": {