mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-09-26 21:26:37 +08:00
chuyen ve api dashscope compatible - toi uu vision qwen-vl
This commit is contained in:
parent
4aaa9db666
commit
e3fc9eef8f
@ -36,7 +36,9 @@ async function request(req: NextRequest) {
|
||||
const controller = new AbortController();
|
||||
|
||||
// alibaba use base url or just remove the path
|
||||
let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.Alibaba, "");
|
||||
let path = `${req.nextUrl.pathname}`
|
||||
.replaceAll(ApiPath.Alibaba, "")
|
||||
.replace("/api", "");
|
||||
|
||||
let baseUrl = serverConfig.alibabaUrl || ALIBABA_BASE_URL;
|
||||
|
||||
@ -59,6 +61,9 @@ async function request(req: NextRequest) {
|
||||
);
|
||||
|
||||
const fetchUrl = `${baseUrl}${path}`;
|
||||
|
||||
console.log("[Alibaba] fetchUrl", fetchUrl);
|
||||
|
||||
const fetchOptions: RequestInit = {
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
@ -81,13 +86,54 @@ async function request(req: NextRequest) {
|
||||
|
||||
try {
|
||||
jsonBody = JSON.parse(clonedBody);
|
||||
delete jsonBody.model; // Remove the model key
|
||||
|
||||
// Move input.messages to messages at the root level if present
|
||||
if (jsonBody.input && Array.isArray(jsonBody.input.messages)) {
|
||||
jsonBody.messages = jsonBody.input.messages;
|
||||
|
||||
// Remove input.messages to avoid duplication
|
||||
delete jsonBody.input;
|
||||
|
||||
jsonBody.stream = true;
|
||||
}
|
||||
|
||||
const current_model = jsonBody?.model;
|
||||
console.log("[Alibaba] custom models", current_model);
|
||||
|
||||
//kiem tra xem model co phai la qwen-vl hay khong (vision model)
|
||||
if (current_model && current_model.startsWith("qwen-vl")) {
|
||||
console.log("[Alibaba] current model is qwen-vl");
|
||||
console.log("xu ly hinh anh trong message");
|
||||
|
||||
// Reformat image objects in messages
|
||||
if (Array.isArray(jsonBody.messages)) {
|
||||
jsonBody.messages = jsonBody.messages.map((msg: any) => {
|
||||
if (Array.isArray(msg.content)) {
|
||||
msg.content = msg.content.map((item: any) => {
|
||||
if (item && typeof item === "object" && "image" in item) {
|
||||
return {
|
||||
type: "image_url",
|
||||
image_url: {
|
||||
url: item.image,
|
||||
},
|
||||
};
|
||||
}
|
||||
return item;
|
||||
});
|
||||
}
|
||||
return msg;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// console.log("[Alibaba] request body json", jsonBody);
|
||||
|
||||
fetchOptions.body = JSON.stringify(jsonBody);
|
||||
} catch (e) {
|
||||
fetchOptions.body = clonedBody; // fallback if not JSON
|
||||
}
|
||||
|
||||
console.log("[Alibaba] request body", fetchOptions.body);
|
||||
// console.log("[Alibaba] request body", fetchOptions.body);
|
||||
|
||||
// not undefined and is false
|
||||
// if (
|
||||
|
@ -169,57 +169,29 @@ export class QwenApi implements LLMApi {
|
||||
tools as any,
|
||||
funcs,
|
||||
controller,
|
||||
// Updated SSE parse callback for new JSON structure
|
||||
// SSE parse callback for OpenAI-style streaming
|
||||
(text: string, runTools: ChatMessageTool[]) => {
|
||||
// Parse the JSON response
|
||||
const json = JSON.parse(text);
|
||||
|
||||
console.log("[Alibaba] SSE response", json);
|
||||
|
||||
// Extract content from the new structure
|
||||
const output = json.output;
|
||||
const content = output?.text ?? "";
|
||||
const reasoning = output?.reasoning_content ?? ""; // If exists in your new structure
|
||||
|
||||
// If both are empty, return default
|
||||
if (
|
||||
(!reasoning || reasoning.length === 0) &&
|
||||
(!content || content.length === 0)
|
||||
) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
};
|
||||
// Each `text` is a line like: data: {...}
|
||||
let json: any;
|
||||
try {
|
||||
json = JSON.parse(text);
|
||||
} catch {
|
||||
return { isThinking: false, content: "" };
|
||||
}
|
||||
const delta = json.choices?.[0]?.delta;
|
||||
const content = delta?.content ?? "";
|
||||
|
||||
// If reasoning_content exists, treat as "thinking"
|
||||
if (reasoning && reasoning.length > 0) {
|
||||
return {
|
||||
isThinking: true,
|
||||
content: reasoning,
|
||||
};
|
||||
}
|
||||
// Otherwise, return the main content
|
||||
else if (content && content.length > 0) {
|
||||
return {
|
||||
isThinking: false,
|
||||
content: content,
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback
|
||||
// You can accumulate content outside if needed
|
||||
return {
|
||||
isThinking: false,
|
||||
content: "",
|
||||
content,
|
||||
};
|
||||
},
|
||||
// Hàm xử lý message liên quan đến tool_call và kết quả trả về từ tool_call
|
||||
(
|
||||
requestPayload: RequestPayload,
|
||||
toolCallMessage: any,
|
||||
toolCallResult: any[],
|
||||
) => {
|
||||
// Thêm message gọi tool và kết quả trả về vào cuối mảng messages trong payload gửi lên API
|
||||
requestPayload?.input?.messages?.splice(
|
||||
requestPayload?.input?.messages?.length,
|
||||
0,
|
||||
@ -227,7 +199,20 @@ export class QwenApi implements LLMApi {
|
||||
...toolCallResult,
|
||||
);
|
||||
},
|
||||
options, // Các tuỳ chọn khác cho hàm streamWithThink
|
||||
{
|
||||
...options,
|
||||
// Accumulate and render result as it streams
|
||||
onUpdate: (() => {
|
||||
let accumulated = "";
|
||||
return (chunk: string) => {
|
||||
accumulated += chunk;
|
||||
options.onUpdate?.(accumulated, chunk);
|
||||
};
|
||||
})(),
|
||||
onFinish: (final: string, res: any) => {
|
||||
options.onFinish?.(final, res);
|
||||
},
|
||||
},
|
||||
);
|
||||
} else {
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
|
@ -224,12 +224,19 @@ export const ByteDance = {
|
||||
|
||||
export const Alibaba = {
|
||||
ExampleEndpoint: ALIBABA_BASE_URL,
|
||||
ChatPath: (modelName: string) => {
|
||||
const URL = `api/v1/apps/${ALIBABA_APP_ID}/completion`;
|
||||
|
||||
if (modelName.includes("vl") || modelName.includes("omni")) {
|
||||
return "v1/services/aigc/multimodal-generation/generation";
|
||||
}
|
||||
ChatPath: (modelName: string) => {
|
||||
// CHUYEN DUNG CHO ALIBABA APP ID
|
||||
// const URL = `api/v1/apps/${ALIBABA_APP_ID}/completion`;
|
||||
console.log("[Alibaba] modelName", modelName);
|
||||
|
||||
// https://dashscope-intl.aliyuncs.com/compatible-mode/v1/chat/completions
|
||||
|
||||
const URL = "compatible-mode/v1/chat/completions";
|
||||
|
||||
// if (modelName.includes("vl") || modelName.includes("omni")) {
|
||||
// return "v1/services/aigc/multimodal-generation/generation";
|
||||
// }
|
||||
// return `v1/services/aigc/text-generation/generation`;
|
||||
return URL;
|
||||
},
|
||||
|
Loading…
Reference in New Issue
Block a user