mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-01 15:46:39 +08:00
Merge remote-tracking branch 'upstream/main' into dev
# Conflicts: # app/client/platforms/openai.ts # app/store/chat.ts
This commit is contained in:
commit
f8da04bfef
@ -19,7 +19,9 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
|
||||
|
||||
if (config.disableGPT4) {
|
||||
remoteModelRes.data = remoteModelRes.data.filter(
|
||||
(m) => !m.id.startsWith("gpt-4") || m.id.startsWith("gpt-4o-mini"),
|
||||
(m) =>
|
||||
!(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
|
||||
m.id.startsWith("gpt-4o-mini"),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -203,7 +203,7 @@ export class ClaudeApi implements LLMApi {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin as string[],
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return stream(
|
||||
path,
|
||||
|
@ -125,7 +125,7 @@ export class MoonshotApi implements LLMApi {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin as string[],
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return stream(
|
||||
chatPath,
|
||||
|
@ -257,7 +257,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin as string[],
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
// console.log("getAsTools", tools, funcs);
|
||||
stream(
|
||||
@ -420,8 +420,10 @@ export class ChatGPTApi implements LLMApi {
|
||||
});
|
||||
|
||||
const resJson = (await res.json()) as OpenAIListModelResponse;
|
||||
const chatModels = resJson.data?.filter((m) => m.id.startsWith("gpt-"));
|
||||
// console.log("[Models]", chatModels);
|
||||
const chatModels = resJson.data?.filter(
|
||||
(m) => m.id.startsWith("gpt-") || m.id.startsWith("chatgpt-"),
|
||||
);
|
||||
console.log("[Models]", chatModels);
|
||||
|
||||
if (!chatModels) {
|
||||
return [];
|
||||
|
@ -38,7 +38,8 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
|
||||
if (props.model) {
|
||||
return (
|
||||
<div className="no-dark">
|
||||
{props.model?.startsWith("gpt-4") ? (
|
||||
{props.model?.startsWith("gpt-4") ||
|
||||
props.model?.startsWith("chatgpt-4o") ? (
|
||||
<BlackBotIcon className="user-avatar" />
|
||||
) : (
|
||||
<BotIcon className="user-avatar" />
|
||||
|
@ -122,12 +122,15 @@ export const getServerSideConfig = () => {
|
||||
if (disableGPT4) {
|
||||
if (customModels) customModels += ",";
|
||||
customModels += DEFAULT_MODELS.filter(
|
||||
(m) => m.name.startsWith("gpt-4") && !m.name.startsWith("gpt-4o-mini"),
|
||||
(m) =>
|
||||
(m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o")) &&
|
||||
!m.name.startsWith("gpt-4o-mini"),
|
||||
)
|
||||
.map((m) => "-" + m.name)
|
||||
.join(",");
|
||||
if (
|
||||
defaultModel.startsWith("gpt-4") &&
|
||||
(defaultModel.startsWith("gpt-4") ||
|
||||
defaultModel.startsWith("chatgpt-4o")) &&
|
||||
!defaultModel.startsWith("gpt-4o-mini")
|
||||
)
|
||||
defaultModel = "";
|
||||
|
@ -248,6 +248,7 @@ export const KnowledgeCutOffDate: Record<string, string> = {
|
||||
"gpt-4o": "2023-10",
|
||||
"gpt-4o-2024-05-13": "2023-10",
|
||||
"gpt-4o-2024-08-06": "2023-10",
|
||||
"chatgpt-4o-latest": "2023-10",
|
||||
"gpt-4o-mini": "2023-10",
|
||||
"gpt-4o-mini-2024-07-18": "2023-10",
|
||||
"gpt-4-vision-preview": "2023-04",
|
||||
@ -270,6 +271,7 @@ const openaiModels = [
|
||||
"gpt-4o",
|
||||
"gpt-4o-2024-05-13",
|
||||
"gpt-4o-2024-08-06",
|
||||
"chatgpt-4o-latest",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o-mini-2024-07-18",
|
||||
"gpt-4-vision-preview",
|
||||
|
@ -109,7 +109,6 @@ function createEmptySession(): ChatSession {
|
||||
mask: createEmptyMask(),
|
||||
};
|
||||
}
|
||||
|
||||
// if it is using gpt-* models, force to use 4o-mini to summarize
|
||||
const ChatFetchTaskPool: Record<string, any> = {};
|
||||
|
||||
@ -117,8 +116,8 @@ function getSummarizeModel(currentModel: string): {
|
||||
name: string;
|
||||
providerName: string | undefined;
|
||||
} {
|
||||
// if it is using gpt-* models, force to use 3.5 to summarize
|
||||
if (currentModel.startsWith("gpt")) {
|
||||
// if it is using gpt-* models, force to use 4o-mini to summarize
|
||||
if (currentModel.startsWith("gpt") || currentModel.startsWith("chatgpt")) {
|
||||
const configStore = useAppConfig.getState();
|
||||
const accessStore = useAccessStore.getState();
|
||||
const allModel = collectModelsWithDefaultModel(
|
||||
@ -831,7 +830,8 @@ export const useChatStore = createPersistStore(
|
||||
// system prompts, to get close to OpenAI Web ChatGPT
|
||||
const shouldInjectSystemPrompts =
|
||||
modelConfig.enableInjectSystemPrompts &&
|
||||
session.mask.modelConfig.model.startsWith("gpt-");
|
||||
(session.mask.modelConfig.model.startsWith("gpt-") ||
|
||||
session.mask.modelConfig.model.startsWith("chatgpt-"));
|
||||
|
||||
var systemPrompts: ChatMessage[] = [];
|
||||
systemPrompts = shouldInjectSystemPrompts
|
||||
|
@ -199,7 +199,7 @@ export const usePluginStore = createPersistStore(
|
||||
|
||||
getAsTools(ids: string[]) {
|
||||
const plugins = get().plugins;
|
||||
const selected = ids
|
||||
const selected = (ids || [])
|
||||
.map((id) => plugins[id])
|
||||
.filter((i) => i)
|
||||
.map((p) => FunctionToolService.add(p));
|
||||
|
@ -9,7 +9,7 @@
|
||||
},
|
||||
"package": {
|
||||
"productName": "NextChat",
|
||||
"version": "2.15.0"
|
||||
"version": "2.15.1"
|
||||
},
|
||||
"tauri": {
|
||||
"allowlist": {
|
||||
|
Loading…
Reference in New Issue
Block a user