feat: 优化CR 代码,优化音频上下文管理,修复 PCM 数据转换逻辑,确保成功连接时清除超时

This commit is contained in:
EvanWu 2025-08-05 18:35:02 +08:00
parent 044298e18a
commit 86f2c67784

View File

@ -67,7 +67,7 @@ interface RequestPayload {
}
export class QwenApi implements LLMApi {
private audioContext?: AudioContext;
private static audioContext: AudioContext | null = null;
path(path: string): string {
const accessStore = useAccessStore.getState();
@ -133,6 +133,7 @@ export class QwenApi implements LLMApi {
);
const res = await fetch(speechPath, speechPayload);
clearTimeout(requestTimeoutId); // Clear timeout on successful connection
const reader = res.body!.getReader();
const decoder = new TextDecoder();
@ -150,13 +151,12 @@ export class QwenApi implements LLMApi {
if (line.startsWith("data:")) {
const data = line.slice(5);
const json = JSON.parse(data);
if (json.output.audio.data) {
if (json.output?.audio?.data) {
yield this.PCMBase64ToAudioBuffer(json.output.audio.data);
}
}
}
}
clearTimeout(requestTimeoutId);
reader.releaseLock();
} catch (e) {
console.log("[Request] failed to make a speech request", e);
@ -371,13 +371,17 @@ export class QwenApi implements LLMApi {
}
}
// 将 PCM 字节数据转换为 AudioBuffer
private convertToAudioBuffer(pcmData: Uint8Array) {
if (!this.audioContext) {
this.audioContext = new (window.AudioContext ||
private static getAudioContext(): AudioContext {
if (!QwenApi.audioContext) {
QwenApi.audioContext = new (window.AudioContext ||
window.webkitAudioContext)();
}
const audioContext = this.audioContext;
return QwenApi.audioContext;
}
// 将 PCM 字节数据转换为 AudioBuffer
private convertToAudioBuffer(pcmData: Uint8Array) {
const audioContext = QwenApi.getAudioContext();
const channels = 1;
const sampleRate = 24000;
return new Promise<AudioBuffer>((resolve, reject) => {