优化bedrock流式消息处理的缓冲机制,简化app和后台api调用逻辑判断和处理

This commit is contained in:
glay
2024-12-07 12:18:15 +08:00
parent a75b9f7fbe
commit 0c55850641
3 changed files with 619 additions and 349 deletions

View File

@@ -7,13 +7,21 @@ import {
useAccessStore,
ChatMessageTool,
} from "@/app/store";
import { preProcessImageContent, stream } from "@/app/utils/chat";
import { preProcessImageContent } from "@/app/utils/chat";
import { getMessageTextContent, isVisionModel } from "@/app/utils";
import { ApiPath, BEDROCK_BASE_URL } from "@/app/constant";
import { getClientConfig } from "@/app/config/client";
import { extractMessage } from "@/app/utils/aws";
import {
extractMessage,
processMessage,
processChunks,
parseEventData,
sign,
} from "@/app/utils/aws";
import { RequestPayload } from "./openai";
import { fetch } from "@/app/utils/stream";
import { REQUEST_TIMEOUT_MS } from "@/app/constant";
import { prettyObject } from "@/app/utils/format";
import Locale from "@/app/locales";
const ClaudeMapper = {
assistant: "assistant",
@@ -26,18 +34,7 @@ const MistralMapper = {
user: "user",
assistant: "assistant",
} as const;
type ClaudeRole = keyof typeof ClaudeMapper;
type MistralRole = keyof typeof MistralMapper;
interface Tool {
function?: {
name?: string;
description?: string;
parameters?: any;
};
}
export class BedrockApi implements LLMApi {
speech(options: SpeechOptions): Promise<ArrayBuffer> {
throw new Error("Speech not implemented for Bedrock.");
@@ -47,6 +44,24 @@ export class BedrockApi implements LLMApi {
const model = modelConfig.model;
const visionModel = isVisionModel(modelConfig.model);
// Handle Nova models
if (model.startsWith("us.amazon.nova")) {
return {
inferenceConfig: {
max_tokens: modelConfig.max_tokens || 1000,
},
messages: messages.map((message) => ({
role: message.role,
content: [
{
type: "text",
text: getMessageTextContent(message),
},
],
})),
};
}
// Handle Titan models
if (model.startsWith("amazon.titan")) {
const inputText = messages
@@ -223,11 +238,34 @@ export class BedrockApi implements LLMApi {
);
}
let finalRequestBody = this.formatRequestBody(messages, modelConfig);
try {
const chatPath = this.path("chat");
const headers = getHeaders();
headers.XModelID = modelConfig.model;
headers.XEncryptionKey = accessStore.encryptionKey;
const isApp = !!getClientConfig()?.isApp;
const bedrockAPIPath = `${BEDROCK_BASE_URL}/model/${
modelConfig.model
}/invoke${shouldStream ? "-with-response-stream" : ""}`;
const chatPath = isApp ? bedrockAPIPath : ApiPath.Bedrock + "/chat";
const headers = isApp
? await sign({
method: "POST",
url: chatPath,
region: accessStore.awsRegion,
accessKeyId: accessStore.awsAccessKey,
secretAccessKey: accessStore.awsSecretKey,
body: finalRequestBody,
service: "bedrock",
isStreaming: shouldStream,
})
: getHeaders();
if (!isApp) {
headers.XModelID = modelConfig.model;
headers.XEncryptionKey = accessStore.encryptionKey;
headers.ShouldStream = shouldStream + "";
}
if (process.env.NODE_ENV !== "production") {
console.debug("[Bedrock Client] Request:", {
path: chatPath,
@@ -236,20 +274,14 @@ export class BedrockApi implements LLMApi {
stream: shouldStream,
});
}
const finalRequestBody = this.formatRequestBody(messages, modelConfig);
console.log(
"[Bedrock Client] Request Body:",
JSON.stringify(finalRequestBody, null, 2),
);
if (shouldStream) {
let index = -1;
const [tools, funcs] = usePluginStore
.getState()
.getAsTools(
useChatStore.getState().currentSession().mask?.plugin || [],
);
return stream(
return bedrockStream(
chatPath,
finalRequestBody,
headers,
@@ -261,59 +293,12 @@ export class BedrockApi implements LLMApi {
})),
funcs,
controller,
// parseSSE
(text: string, runTools: ChatMessageTool[]) => {
// console.log("parseSSE", text, runTools);
let chunkJson:
| undefined
| {
type: "content_block_delta" | "content_block_stop";
content_block?: {
type: "tool_use";
id: string;
name: string;
};
delta?: {
type: "text_delta" | "input_json_delta";
text?: string;
partial_json?: string;
};
index: number;
};
chunkJson = JSON.parse(text);
if (chunkJson?.content_block?.type == "tool_use") {
index += 1;
const id = chunkJson?.content_block.id;
const name = chunkJson?.content_block.name;
runTools.push({
id,
type: "function",
function: {
name,
arguments: "",
},
});
}
if (
chunkJson?.delta?.type == "input_json_delta" &&
chunkJson?.delta?.partial_json
) {
// @ts-ignore
runTools[index]["function"]["arguments"] +=
chunkJson?.delta?.partial_json;
}
return chunkJson?.delta?.text;
},
// processToolMessage, include tool_calls message and tool call results
(
requestPayload: RequestPayload,
toolCallMessage: any,
toolCallResult: any[],
) => {
// reset index value
index = -1;
const modelId = modelConfig.model;
const isMistral = modelId.startsWith("mistral.mistral");
const isClaude = modelId.includes("anthropic.claude");
@@ -384,30 +369,26 @@ export class BedrockApi implements LLMApi {
options,
);
} else {
headers.ShouldStream = "false";
const res = await fetch(chatPath, {
method: "POST",
headers,
body: JSON.stringify(finalRequestBody),
});
if (!res.ok) {
const errorText = await res.text();
console.error("[Bedrock Client] Error response:", errorText);
throw new Error(`Request failed: ${errorText}`);
try {
controller.signal.onabort = () =>
options.onFinish("", new Response(null, { status: 400 }));
const res = await fetch(chatPath, {
method: "POST",
headers: headers,
body: JSON.stringify(finalRequestBody),
});
const contentType = res.headers.get("content-type");
console.log(
"[Bedrock Not Stream Request] response content type: ",
contentType,
);
const resJson = await res.json();
const message = extractMessage(resJson);
options.onFinish(message, res);
} catch (e) {
console.error("failed to chat", e);
options.onError?.(e as Error);
}
const resJson = await res.json();
if (!resJson) {
throw new Error("Empty response from server");
}
const message = extractMessage(resJson, modelConfig.model);
if (!message) {
throw new Error("Failed to extract message from response");
}
options.onFinish(message, res);
}
} catch (e) {
console.error("[Bedrock Client] Chat error:", e);
@@ -415,26 +396,6 @@ export class BedrockApi implements LLMApi {
}
}
path(path: string): string {
const accessStore = useAccessStore.getState();
let baseUrl = accessStore.useCustomConfig ? accessStore.bedrockUrl : "";
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
const apiPath = ApiPath.Bedrock;
baseUrl = isApp ? BEDROCK_BASE_URL : apiPath;
}
baseUrl = baseUrl.endsWith("/") ? baseUrl.slice(0, -1) : baseUrl;
if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.Bedrock)) {
baseUrl = "https://" + baseUrl;
}
console.log("[Bedrock Client] API Endpoint:", baseUrl, path);
return [baseUrl, path].join("/");
}
async usage() {
return { used: 0, total: 0 };
}
@@ -443,3 +404,256 @@ export class BedrockApi implements LLMApi {
return [];
}
}
function bedrockStream(
chatPath: string,
requestPayload: any,
headers: any,
tools: any[],
funcs: Record<string, Function>,
controller: AbortController,
processToolMessage: (
requestPayload: any,
toolCallMessage: any,
toolCallResult: any[],
) => void,
options: any,
) {
let responseText = "";
let remainText = "";
let finished = false;
let running = false;
let runTools: any[] = [];
let responseRes: Response;
let index = -1;
let chunks: Uint8Array[] = []; // 使用数组存储二进制数据块
let pendingChunk: Uint8Array | null = null; // 存储不完整的数据块
// Animate response to make it looks smooth
function animateResponseText() {
if (finished || controller.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
if (responseText?.length === 0) {
options.onError?.(new Error("empty response from server"));
}
return;
}
if (remainText.length > 0) {
const fetchCount = Math.max(1, Math.round(remainText.length / 60));
const fetchText = remainText.slice(0, fetchCount);
responseText += fetchText;
remainText = remainText.slice(fetchCount);
options.onUpdate?.(responseText, fetchText);
}
requestAnimationFrame(animateResponseText);
}
// Start animation
animateResponseText();
const finish = () => {
if (!finished) {
if (!running && runTools.length > 0) {
const toolCallMessage = {
role: "assistant",
tool_calls: [...runTools],
};
running = true;
runTools.splice(0, runTools.length); // empty runTools
return Promise.all(
toolCallMessage.tool_calls.map((tool) => {
options?.onBeforeTool?.(tool);
return Promise.resolve(
funcs[tool.function.name](
tool?.function?.arguments
? JSON.parse(tool?.function?.arguments)
: {},
),
)
.then((res) => {
let content = res.data || res?.statusText;
content =
typeof content === "string"
? content
: JSON.stringify(content);
if (res.status >= 300) {
return Promise.reject(content);
}
return content;
})
.then((content) => {
options?.onAfterTool?.({
...tool,
content,
isError: false,
});
return content;
})
.catch((e) => {
options?.onAfterTool?.({
...tool,
isError: true,
errorMsg: e.toString(),
});
return e.toString();
})
.then((content) => ({
name: tool.function.name,
role: "tool",
content,
tool_call_id: tool.id,
}));
}),
).then((toolCallResult) => {
processToolMessage(requestPayload, toolCallMessage, toolCallResult);
setTimeout(() => {
// call again
console.debug("[BedrockAPI for toolCallResult] restart");
running = false;
bedrockChatApi(chatPath, headers, requestPayload, tools);
}, 60);
});
}
if (running) {
return;
}
console.debug("[BedrockAPI] end");
finished = true;
options.onFinish(responseText + remainText, responseRes);
}
};
controller.signal.onabort = finish;
async function bedrockChatApi(
chatPath: string,
headers: any,
requestPayload: any,
tools: any,
) {
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
try {
const res = await fetch(chatPath, {
method: "POST",
headers,
body: JSON.stringify({
...requestPayload,
tools: tools && tools.length ? tools : undefined,
}),
redirect: "manual",
// @ts-ignore
duplex: "half",
signal: controller.signal,
});
clearTimeout(requestTimeoutId);
responseRes = res;
const contentType = res.headers.get("content-type");
console.log(
"[Bedrock Stream Request] response content type: ",
contentType,
);
// Handle non-stream responses
if (contentType?.startsWith("text/plain")) {
responseText = await res.text();
return finish();
}
// Handle error responses
if (
!res.ok ||
res.status !== 200 ||
!contentType?.startsWith("application/vnd.amazon.eventstream")
) {
const responseTexts = [responseText];
let extraInfo = await res.text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join("\n\n");
return finish();
}
// Process the stream using chunks
const reader = res.body?.getReader();
if (!reader) {
throw new Error("No response body reader available");
}
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
// Process final pending chunk
if (pendingChunk) {
try {
const parsed = parseEventData(pendingChunk);
if (parsed) {
const result = processMessage(
parsed,
remainText,
runTools,
index,
);
remainText = result.remainText;
index = result.index;
}
} catch (e) {
console.error("[Final Chunk Process Error]:", e);
}
}
break;
}
// Add new chunk to queue
chunks.push(value);
// Process chunk queue
const result = processChunks(
chunks,
pendingChunk,
remainText,
runTools,
index,
);
chunks = result.chunks;
pendingChunk = result.pendingChunk;
remainText = result.remainText;
index = result.index;
}
} catch (err) {
console.error("[Bedrock Stream Error]:", err);
throw err;
} finally {
reader.releaseLock();
finish();
}
} catch (e) {
console.error("[Bedrock Request] error", e);
options.onError?.(e);
throw e;
}
}
console.debug("[BedrockAPI] start");
bedrockChatApi(chatPath, headers, requestPayload, tools);
}