mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-14 05:03:43 +08:00
Merge branch 'ChatGPTNextWeb:main' into main
This commit is contained in:
@@ -7,7 +7,10 @@ import {
|
||||
ChatMessageTool,
|
||||
usePluginStore,
|
||||
} from "@/app/store";
|
||||
import { streamWithThink } from "@/app/utils/chat";
|
||||
import {
|
||||
preProcessImageContentForAlibabaDashScope,
|
||||
streamWithThink,
|
||||
} from "@/app/utils/chat";
|
||||
import {
|
||||
ChatOptions,
|
||||
getHeaders,
|
||||
@@ -15,12 +18,14 @@ import {
|
||||
LLMModel,
|
||||
SpeechOptions,
|
||||
MultimodalContent,
|
||||
MultimodalContentForAlibaba,
|
||||
} from "../api";
|
||||
import { getClientConfig } from "@/app/config/client";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
getMessageTextContentWithoutThinking,
|
||||
getTimeoutMSByModel,
|
||||
isVisionModel,
|
||||
} from "@/app/utils";
|
||||
import { fetch } from "@/app/utils/stream";
|
||||
|
||||
@@ -89,14 +94,6 @@ export class QwenApi implements LLMApi {
|
||||
}
|
||||
|
||||
async chat(options: ChatOptions) {
|
||||
const messages = options.messages.map((v) => ({
|
||||
role: v.role,
|
||||
content:
|
||||
v.role === "assistant"
|
||||
? getMessageTextContentWithoutThinking(v)
|
||||
: getMessageTextContent(v),
|
||||
}));
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
@@ -105,6 +102,21 @@ export class QwenApi implements LLMApi {
|
||||
},
|
||||
};
|
||||
|
||||
const visionModel = isVisionModel(options.config.model);
|
||||
|
||||
const messages: ChatOptions["messages"] = [];
|
||||
for (const v of options.messages) {
|
||||
const content = (
|
||||
visionModel
|
||||
? await preProcessImageContentForAlibabaDashScope(v.content)
|
||||
: v.role === "assistant"
|
||||
? getMessageTextContentWithoutThinking(v)
|
||||
: getMessageTextContent(v)
|
||||
) as any;
|
||||
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
const shouldStream = !!options.config.stream;
|
||||
const requestPayload: RequestPayload = {
|
||||
model: modelConfig.model,
|
||||
@@ -129,7 +141,7 @@ export class QwenApi implements LLMApi {
|
||||
"X-DashScope-SSE": shouldStream ? "enable" : "disable",
|
||||
};
|
||||
|
||||
const chatPath = this.path(Alibaba.ChatPath);
|
||||
const chatPath = this.path(Alibaba.ChatPath(modelConfig.model));
|
||||
const chatPayload = {
|
||||
method: "POST",
|
||||
body: JSON.stringify(requestPayload),
|
||||
@@ -162,7 +174,7 @@ export class QwenApi implements LLMApi {
|
||||
const json = JSON.parse(text);
|
||||
const choices = json.output.choices as Array<{
|
||||
message: {
|
||||
content: string | null;
|
||||
content: string | null | MultimodalContentForAlibaba[];
|
||||
tool_calls: ChatMessageTool[];
|
||||
reasoning_content: string | null;
|
||||
};
|
||||
@@ -212,7 +224,9 @@ export class QwenApi implements LLMApi {
|
||||
} else if (content && content.length > 0) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: content,
|
||||
content: Array.isArray(content)
|
||||
? content.map((item) => item.text).join(",")
|
||||
: content,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -75,6 +75,25 @@ export class DeepSeekApi implements LLMApi {
|
||||
}
|
||||
}
|
||||
|
||||
// 检测并修复消息顺序,确保除system外的第一个消息是user
|
||||
const filteredMessages: ChatOptions["messages"] = [];
|
||||
let hasFoundFirstUser = false;
|
||||
|
||||
for (const msg of messages) {
|
||||
if (msg.role === "system") {
|
||||
// Keep all system messages
|
||||
filteredMessages.push(msg);
|
||||
} else if (msg.role === "user") {
|
||||
// User message directly added
|
||||
filteredMessages.push(msg);
|
||||
hasFoundFirstUser = true;
|
||||
} else if (hasFoundFirstUser) {
|
||||
// After finding the first user message, all subsequent non-system messages are retained.
|
||||
filteredMessages.push(msg);
|
||||
}
|
||||
// If hasFoundFirstUser is false and it is not a system message, it will be skipped.
|
||||
}
|
||||
|
||||
const modelConfig = {
|
||||
...useAppConfig.getState().modelConfig,
|
||||
...useChatStore.getState().currentSession().mask.modelConfig,
|
||||
@@ -85,7 +104,7 @@ export class DeepSeekApi implements LLMApi {
|
||||
};
|
||||
|
||||
const requestPayload: RequestPayload = {
|
||||
messages,
|
||||
messages: filteredMessages,
|
||||
stream: options.config.stream,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
|
||||
Reference in New Issue
Block a user