mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-15 13:33:43 +08:00
feat: optimize animation
This commit is contained in:
@@ -815,3 +815,34 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.chat-message-checkmark {
|
||||
display: inline-block;
|
||||
margin-right: 5px;
|
||||
height: 12px;
|
||||
width: 12px;
|
||||
color: #13a10e;
|
||||
fill: #13a10e;
|
||||
user-select: none;
|
||||
backface-visibility: hidden;
|
||||
transform: translateZ(0px);
|
||||
}
|
||||
|
||||
.chat-message-tools-status {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
font-size: 12px;
|
||||
margin-top: 5px;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.chat-message-tools-name {
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
.chat-message-tools-details {
|
||||
margin-left: 5px;
|
||||
font-weight: bold;
|
||||
color: #999;
|
||||
}
|
||||
@@ -2039,6 +2039,30 @@ function _Chat() {
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{!isUser &&
|
||||
message.toolMessages &&
|
||||
message.toolMessages.map((tool, index) => (
|
||||
<div
|
||||
className={styles["chat-message-tools-status"]}
|
||||
key={index}
|
||||
>
|
||||
<div
|
||||
className={styles["chat-message-tools-name"]}
|
||||
>
|
||||
<CheckmarkIcon
|
||||
className={styles["chat-message-checkmark"]}
|
||||
/>
|
||||
{tool.toolName}:
|
||||
<code
|
||||
className={
|
||||
styles["chat-message-tools-details"]
|
||||
}
|
||||
>
|
||||
{tool.toolInput}
|
||||
</code>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
{message?.tools?.length == 0 && showTyping && (
|
||||
<div className={styles["chat-message-status"]}>
|
||||
{Locale.Chat.Typing}
|
||||
|
||||
1
app/components/openai-voice-visualizer/index.ts
Normal file
1
app/components/openai-voice-visualizer/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./openai-voice-visualizer";
|
||||
@@ -0,0 +1,9 @@
|
||||
.openai-voice-visualizer {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
|
||||
canvas {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,372 @@
|
||||
import { useEffect, useRef, useCallback, useState } from "react";
|
||||
import styles from "./openai-voice-visualizer.module.scss";
|
||||
import { initWebGL } from "../../utils/webgl";
|
||||
import vertexShaderSource from "../../shaders/vertex.glsl";
|
||||
import fragmentShaderSource from "../../shaders/fragment.glsl";
|
||||
import { loadImage } from "canvas";
|
||||
|
||||
const CANVAS_SIZE = 208;
|
||||
const DEFAULT_VIEWPORT_SIZE: [number, number] = [300, 300];
|
||||
const NOISE_TEXTURE_OPTIONS = {
|
||||
format: "webp",
|
||||
width: 512,
|
||||
height: 512,
|
||||
space: "srgb",
|
||||
channels: 3,
|
||||
depth: "uchar",
|
||||
density: 72,
|
||||
isProgressive: false,
|
||||
paletteBitDepth: 8,
|
||||
hasProfile: false,
|
||||
hasAlpha: false,
|
||||
src: "./noise-texture.webp",
|
||||
} as const;
|
||||
|
||||
interface ColorTheme {
|
||||
bloopColorMain: Float32Array;
|
||||
bloopColorLow: Float32Array;
|
||||
bloopColorMid: Float32Array;
|
||||
bloopColorHigh: Float32Array;
|
||||
}
|
||||
|
||||
export interface AudioData {
|
||||
avgMag: Float32Array;
|
||||
micLevel: number;
|
||||
cumulativeAudio: Float32Array;
|
||||
}
|
||||
|
||||
const hexToFloatArray = (hex: string): Float32Array => {
|
||||
const hexWithoutHash = hex.replace("#", "");
|
||||
const red = parseInt(hexWithoutHash.substring(0, 2), 16) / 255;
|
||||
const green = parseInt(hexWithoutHash.substring(2, 4), 16) / 255;
|
||||
const blue = parseInt(hexWithoutHash.substring(4, 6), 16) / 255;
|
||||
return new Float32Array([red, green, blue]);
|
||||
};
|
||||
|
||||
const colorThemes = {
|
||||
BLUE: {
|
||||
bloopColorMain: hexToFloatArray("#DCF7FF"),
|
||||
bloopColorLow: hexToFloatArray("#0181FE"),
|
||||
bloopColorMid: hexToFloatArray("#A4EFFF"),
|
||||
bloopColorHigh: hexToFloatArray("#FFFDEF"),
|
||||
},
|
||||
DARK_BLUE: {
|
||||
bloopColorMain: hexToFloatArray("#DAF5FF"),
|
||||
bloopColorLow: hexToFloatArray("#0066CC"),
|
||||
bloopColorMid: hexToFloatArray("#2EC6F5"),
|
||||
bloopColorHigh: hexToFloatArray("#72EAF5"),
|
||||
},
|
||||
GREYSCALE: {
|
||||
bloopColorMain: hexToFloatArray("#D7D7D7"),
|
||||
bloopColorLow: hexToFloatArray("#303030"),
|
||||
bloopColorMid: hexToFloatArray("#989898"),
|
||||
bloopColorHigh: hexToFloatArray("#FFFFFF"),
|
||||
},
|
||||
WHITE: {
|
||||
bloopColorMain: hexToFloatArray("#FFFFFF"),
|
||||
bloopColorLow: hexToFloatArray("#FFFFFF"),
|
||||
bloopColorMid: hexToFloatArray("#FFFFFF"),
|
||||
bloopColorHigh: hexToFloatArray("#FFFFFF"),
|
||||
},
|
||||
BLACK: {
|
||||
bloopColorMain: hexToFloatArray("#000000"),
|
||||
bloopColorLow: hexToFloatArray("#000000"),
|
||||
bloopColorMid: hexToFloatArray("#000000"),
|
||||
bloopColorHigh: hexToFloatArray("#000000"),
|
||||
},
|
||||
} as const;
|
||||
|
||||
interface OpenAIVoiceVisualizerProps {
|
||||
audioData?: AudioData;
|
||||
isActive?: boolean;
|
||||
}
|
||||
|
||||
export class NormalBlorpUniformsSetter {
|
||||
static uniformBlockName = "BlorbUniformsObject";
|
||||
private gl: WebGL2RenderingContext;
|
||||
private uniformBuffer: WebGLBuffer;
|
||||
private uniformNames: string[];
|
||||
private uniformOffsets: { [key: string]: number };
|
||||
private dataBuffer: ArrayBuffer;
|
||||
private floatView: Float32Array;
|
||||
private intView: Int32Array;
|
||||
|
||||
constructor(gl: WebGL2RenderingContext, program: WebGLProgram) {
|
||||
this.gl = gl;
|
||||
const uniformBlockIndex = gl.getUniformBlockIndex(
|
||||
program,
|
||||
NormalBlorpUniformsSetter.uniformBlockName,
|
||||
);
|
||||
const uniformBlockSize = gl.getActiveUniformBlockParameter(
|
||||
program,
|
||||
uniformBlockIndex,
|
||||
gl.UNIFORM_BLOCK_DATA_SIZE,
|
||||
);
|
||||
|
||||
this.uniformBuffer = gl.createBuffer()!;
|
||||
gl.bindBuffer(gl.UNIFORM_BUFFER, this.uniformBuffer);
|
||||
gl.bufferData(gl.UNIFORM_BUFFER, uniformBlockSize, gl.DYNAMIC_DRAW);
|
||||
|
||||
const bindingPoint = 0;
|
||||
gl.bindBufferBase(gl.UNIFORM_BUFFER, bindingPoint, this.uniformBuffer);
|
||||
gl.uniformBlockBinding(program, uniformBlockIndex, bindingPoint);
|
||||
|
||||
const uniformIndices = gl.getActiveUniformBlockParameter(
|
||||
program,
|
||||
uniformBlockIndex,
|
||||
gl.UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES,
|
||||
);
|
||||
|
||||
this.uniformNames = [];
|
||||
this.uniformOffsets = {};
|
||||
for (let i = 0; i < uniformIndices.length; i++) {
|
||||
const uniformIndex = uniformIndices[i];
|
||||
const uniformInfo = gl.getActiveUniform(program, uniformIndex);
|
||||
if (!uniformInfo) {
|
||||
throw new Error("No uniformInfo for index " + uniformIndex);
|
||||
}
|
||||
let uniformName = uniformInfo.name;
|
||||
uniformName = uniformName.replace(/\[0\]$/, "");
|
||||
const uniformOffset = gl.getActiveUniforms(
|
||||
program,
|
||||
[uniformIndex],
|
||||
gl.UNIFORM_OFFSET,
|
||||
)[0];
|
||||
this.uniformNames.push(uniformName);
|
||||
this.uniformOffsets[uniformName] = uniformOffset;
|
||||
}
|
||||
|
||||
this.dataBuffer = new ArrayBuffer(uniformBlockSize);
|
||||
this.floatView = new Float32Array(this.dataBuffer);
|
||||
this.intView = new Int32Array(this.dataBuffer);
|
||||
}
|
||||
|
||||
setVariablesAndRender(variables: {
|
||||
[key: string]: number | boolean | number[];
|
||||
}) {
|
||||
for (const uniformName of this.uniformNames) {
|
||||
const [, name] = uniformName.split(".");
|
||||
const offset = this.uniformOffsets[uniformName] / 4;
|
||||
const value = variables[name];
|
||||
|
||||
if (typeof value === "number") {
|
||||
this.floatView[offset] = value;
|
||||
} else if (typeof value === "boolean") {
|
||||
this.intView[offset] = value ? 1 : 0;
|
||||
} else if (Array.isArray(value)) {
|
||||
this.floatView.set(value, offset);
|
||||
}
|
||||
}
|
||||
|
||||
this.gl.bindBuffer(this.gl.UNIFORM_BUFFER, this.uniformBuffer);
|
||||
this.gl.bufferSubData(this.gl.UNIFORM_BUFFER, 0, this.dataBuffer);
|
||||
this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 6);
|
||||
}
|
||||
}
|
||||
|
||||
export function OpenAIVoiceVisualizer({
|
||||
audioData,
|
||||
isActive,
|
||||
}: OpenAIVoiceVisualizerProps) {
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const glRef = useRef<WebGL2RenderingContext | null>(null);
|
||||
const programRef = useRef<WebGLProgram | null>(null);
|
||||
const animationFrameRef = useRef<number>(0);
|
||||
const uniformSetterRef = useRef<NormalBlorpUniformsSetter | null>(null);
|
||||
const startTimeRef = useRef<number>(performance.now() / 1000);
|
||||
const readyTimeRef = useRef<number>(performance.now() / 1000);
|
||||
|
||||
const variablesRef = useRef({
|
||||
time: 0,
|
||||
micLevel: 0,
|
||||
stateListen: 0,
|
||||
listenTimestamp: 0,
|
||||
stateThink: 0.0,
|
||||
thinkTimestamp: 0.0,
|
||||
stateSpeak: 1,
|
||||
speakTimestamp: 0,
|
||||
readyTimestamp: 0,
|
||||
stateHalt: 0.0,
|
||||
haltTimestamp: 0.0,
|
||||
touchDownTimestamp: 0.0,
|
||||
touchUpTimestamp: 0.0,
|
||||
stateFailedToConnect: 0.0,
|
||||
failedToConnectTimestamp: 0.0,
|
||||
avgMag: new Array(4).fill(0),
|
||||
cumulativeAudio: new Array(4).fill(0),
|
||||
isNewBloop: true,
|
||||
isAdvancedBloop: true,
|
||||
bloopColorMain: [0, 0, 0],
|
||||
bloopColorLow: [0, 0, 0],
|
||||
bloopColorMid: [0, 0, 0],
|
||||
bloopColorHigh: [0, 0, 0],
|
||||
isDarkMode: false,
|
||||
screenScaleFactor: 1.0,
|
||||
viewport: DEFAULT_VIEWPORT_SIZE,
|
||||
silenceAmount: 0.0,
|
||||
silenceTimestamp: 0.0,
|
||||
fadeBloopWhileListening: false,
|
||||
});
|
||||
|
||||
const audioDataRef = useRef<AudioData>({
|
||||
avgMag: new Float32Array(4),
|
||||
micLevel: 0,
|
||||
cumulativeAudio: new Float32Array(4),
|
||||
});
|
||||
|
||||
const handleAudioData = useCallback((data: AudioData) => {
|
||||
audioDataRef.current = data;
|
||||
}, []);
|
||||
|
||||
const [viewportSize] = useState<[number, number]>(DEFAULT_VIEWPORT_SIZE);
|
||||
const [noiseTextureImage, setNoiseTextureImage] =
|
||||
useState<HTMLImageElement | null>(null);
|
||||
const getColorTheme = useCallback((isAdvanced: boolean): ColorTheme => {
|
||||
return colorThemes.BLUE;
|
||||
}, []);
|
||||
|
||||
const initializeWebGL = useCallback(() => {
|
||||
if (!canvasRef.current) return;
|
||||
|
||||
canvasRef.current.width = CANVAS_SIZE;
|
||||
canvasRef.current.height = CANVAS_SIZE;
|
||||
|
||||
const { gl, program } = initWebGL(
|
||||
canvasRef.current,
|
||||
vertexShaderSource,
|
||||
fragmentShaderSource,
|
||||
);
|
||||
|
||||
if (!gl || !program) {
|
||||
console.error("WebGL 初始化失败");
|
||||
return;
|
||||
}
|
||||
|
||||
glRef.current = gl;
|
||||
programRef.current = program;
|
||||
uniformSetterRef.current = new NormalBlorpUniformsSetter(gl, program);
|
||||
|
||||
return { gl, program };
|
||||
}, []);
|
||||
|
||||
const initializeNoiseTexture = useCallback(
|
||||
(gl: WebGL2RenderingContext, program: WebGLProgram) => {
|
||||
const noiseTexture = gl.createTexture();
|
||||
if (!noiseTexture) {
|
||||
console.error("创建噪声纹理失败");
|
||||
return;
|
||||
}
|
||||
|
||||
gl.bindTexture(gl.TEXTURE_2D, noiseTexture);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
|
||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
|
||||
|
||||
if (noiseTextureImage) {
|
||||
gl.texImage2D(
|
||||
gl.TEXTURE_2D,
|
||||
0,
|
||||
gl.RGBA,
|
||||
gl.RGBA,
|
||||
gl.UNSIGNED_BYTE,
|
||||
noiseTextureImage,
|
||||
);
|
||||
}
|
||||
|
||||
const location = gl.getUniformLocation(program, "uTextureNoise");
|
||||
gl.activeTexture(gl.TEXTURE0);
|
||||
gl.bindTexture(gl.TEXTURE_2D, noiseTexture);
|
||||
gl.uniform1i(location, 0);
|
||||
|
||||
return noiseTexture;
|
||||
},
|
||||
[noiseTextureImage],
|
||||
);
|
||||
|
||||
const renderFrame = useCallback(() => {
|
||||
if (!glRef.current || !uniformSetterRef.current) return;
|
||||
|
||||
if (!audioData) {
|
||||
handleAudioData({
|
||||
avgMag: new Float32Array(4),
|
||||
micLevel: 0,
|
||||
cumulativeAudio: new Float32Array(4),
|
||||
});
|
||||
// return;
|
||||
} else {
|
||||
handleAudioData(audioData);
|
||||
}
|
||||
|
||||
const currentFrameTime = performance.now() / 1000;
|
||||
const colorTheme = getColorTheme(true);
|
||||
|
||||
const variables = variablesRef.current;
|
||||
variables.time = currentFrameTime;
|
||||
variables.micLevel = audioDataRef.current.micLevel;
|
||||
variables.speakTimestamp = readyTimeRef.current;
|
||||
variables.readyTimestamp = startTimeRef.current;
|
||||
variables.avgMag = Array.from(audioDataRef.current.avgMag);
|
||||
variables.cumulativeAudio = Array.from(
|
||||
audioDataRef.current.cumulativeAudio,
|
||||
);
|
||||
variables.bloopColorMain = Array.from(colorTheme.bloopColorMain);
|
||||
variables.bloopColorLow = Array.from(colorTheme.bloopColorLow);
|
||||
variables.bloopColorMid = Array.from(colorTheme.bloopColorMid);
|
||||
variables.bloopColorHigh = Array.from(colorTheme.bloopColorHigh);
|
||||
variables.screenScaleFactor = window.devicePixelRatio || 1.0;
|
||||
variables.viewport = viewportSize;
|
||||
|
||||
uniformSetterRef.current.setVariablesAndRender(variables);
|
||||
animationFrameRef.current = requestAnimationFrame(renderFrame);
|
||||
}, [audioData, getColorTheme, handleAudioData, viewportSize]);
|
||||
|
||||
useEffect(() => {
|
||||
const loadNoiseTexture = async () => {
|
||||
try {
|
||||
const image = await loadImage(NOISE_TEXTURE_OPTIONS.src);
|
||||
setNoiseTextureImage(image as unknown as HTMLImageElement);
|
||||
} catch (error) {
|
||||
console.error("加载噪声纹理失败:", error);
|
||||
}
|
||||
};
|
||||
loadNoiseTexture();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const canvas = canvasRef.current;
|
||||
if (!canvas) return;
|
||||
|
||||
const webglContext = initializeWebGL();
|
||||
if (!webglContext) return;
|
||||
|
||||
const { gl, program } = webglContext;
|
||||
const noiseTexture = initializeNoiseTexture(gl, program);
|
||||
|
||||
renderFrame();
|
||||
|
||||
return () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
}
|
||||
if (gl) {
|
||||
if (noiseTexture) {
|
||||
gl.deleteTexture(noiseTexture);
|
||||
}
|
||||
gl.deleteProgram(program);
|
||||
}
|
||||
};
|
||||
}, [
|
||||
initializeWebGL,
|
||||
initializeNoiseTexture,
|
||||
renderFrame,
|
||||
audioData,
|
||||
isActive,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className={styles["openai-voice-visualizer"]}>
|
||||
<canvas ref={canvasRef} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -8,15 +8,15 @@
|
||||
height: 100%;
|
||||
padding: 20px;
|
||||
box-sizing: border-box;
|
||||
.circle-mic {
|
||||
width: 150px;
|
||||
height: 150px;
|
||||
border-radius: 50%;
|
||||
background: linear-gradient(to bottom right, #a0d8ef, #f0f8ff);
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
// .circle-mic {
|
||||
// width: 150px;
|
||||
// height: 150px;
|
||||
// border-radius: 50%;
|
||||
// background: linear-gradient(to bottom right, #a0d8ef, #f0f8ff);
|
||||
// display: flex;
|
||||
// justify-content: center;
|
||||
// align-items: center;
|
||||
// }
|
||||
.icon-center {
|
||||
font-size: 24px;
|
||||
}
|
||||
|
||||
@@ -21,6 +21,10 @@ import {
|
||||
import { AudioHandler } from "@/app/lib/audio";
|
||||
import { uploadImage } from "@/app/utils/chat";
|
||||
import { VoicePrint } from "@/app/components/voice-print";
|
||||
import {
|
||||
OpenAIVoiceVisualizer,
|
||||
AudioData,
|
||||
} from "../openai-voice-visualizer/openai-voice-visualizer";
|
||||
|
||||
interface RealtimeChatProps {
|
||||
onClose?: () => void;
|
||||
@@ -43,6 +47,7 @@ export function RealtimeChat({
|
||||
const [modality, setModality] = useState("audio");
|
||||
const [useVAD, setUseVAD] = useState(true);
|
||||
const [frequencies, setFrequencies] = useState<Uint8Array | undefined>();
|
||||
const [audioData, setAudioData] = useState<AudioData | undefined>();
|
||||
|
||||
const clientRef = useRef<RTClient | null>(null);
|
||||
const audioHandlerRef = useRef<AudioHandler | null>(null);
|
||||
@@ -292,6 +297,9 @@ export function RealtimeChat({
|
||||
if (audioHandlerRef.current) {
|
||||
const freqData = audioHandlerRef.current.getByteFrequencyData();
|
||||
setFrequencies(freqData);
|
||||
|
||||
const audioData = audioHandlerRef.current.getAudioData();
|
||||
setAudioData(audioData);
|
||||
}
|
||||
animationFrameId = requestAnimationFrame(animationFrame);
|
||||
};
|
||||
@@ -299,6 +307,7 @@ export function RealtimeChat({
|
||||
animationFrameId = requestAnimationFrame(animationFrame);
|
||||
} else {
|
||||
setFrequencies(undefined);
|
||||
setAudioData(undefined);
|
||||
}
|
||||
|
||||
return () => {
|
||||
@@ -327,11 +336,11 @@ export function RealtimeChat({
|
||||
return (
|
||||
<div className={styles["realtime-chat"]}>
|
||||
<div
|
||||
className={clsx(styles["circle-mic"], {
|
||||
[styles["pulse"]]: isRecording,
|
||||
})}
|
||||
// className={clsx(styles["circle-mic"], {
|
||||
// [styles["pulse"]]: isRecording,
|
||||
// })}
|
||||
>
|
||||
<VoicePrint frequencies={frequencies} isActive={isRecording} />
|
||||
<OpenAIVoiceVisualizer audioData={audioData} isActive={isRecording} />
|
||||
</div>
|
||||
|
||||
<div className={styles["bottom-icons"]}>
|
||||
|
||||
5
app/global.d.ts
vendored
5
app/global.d.ts
vendored
@@ -10,6 +10,11 @@ declare module "*.scss" {
|
||||
|
||||
declare module "*.svg";
|
||||
|
||||
declare module "*.glsl" {
|
||||
const content: string;
|
||||
export default content;
|
||||
}
|
||||
|
||||
declare interface Window {
|
||||
__TAURI__?: {
|
||||
writeText(text: string): Promise<void>;
|
||||
|
||||
@@ -21,6 +21,9 @@ export class AudioHandler {
|
||||
this.analyser = new AnalyserNode(this.context, { fftSize: 256 });
|
||||
this.analyserData = new Uint8Array(this.analyser.frequencyBinCount);
|
||||
this.mergeNode.connect(this.analyser);
|
||||
|
||||
this.dataArray = new Float32Array(this.analyser.frequencyBinCount);
|
||||
this.initializeBands(this.analyser.frequencyBinCount);
|
||||
}
|
||||
|
||||
getByteFrequencyData() {
|
||||
@@ -94,6 +97,7 @@ export class AudioHandler {
|
||||
this.source.disconnect();
|
||||
this.stream.getTracks().forEach((track) => track.stop());
|
||||
}
|
||||
|
||||
startStreamingPlayback() {
|
||||
this.isPlaying = true;
|
||||
this.nextPlayTime = this.context.currentTime;
|
||||
@@ -148,6 +152,7 @@ export class AudioHandler {
|
||||
this.nextPlayTime = this.context.currentTime;
|
||||
}
|
||||
}
|
||||
|
||||
_saveData(data: Int16Array, bytesPerSample = 16): Blob {
|
||||
const headerLength = 44;
|
||||
const numberOfChannels = 1;
|
||||
@@ -171,10 +176,12 @@ export class AudioHandler {
|
||||
// using data.buffer, so no need to setUint16 to view.
|
||||
return new Blob([view, data.buffer], { type: "audio/mpeg" });
|
||||
}
|
||||
|
||||
savePlayFile() {
|
||||
// @ts-ignore
|
||||
return this._saveData(new Int16Array(this.playBuffer));
|
||||
}
|
||||
|
||||
saveRecordFile(
|
||||
audioStartMillis: number | undefined,
|
||||
audioEndMillis: number | undefined,
|
||||
@@ -190,6 +197,7 @@ export class AudioHandler {
|
||||
new Int16Array(this.recordBuffer.slice(startIndex, endIndex)),
|
||||
);
|
||||
}
|
||||
|
||||
async close() {
|
||||
this.recordBuffer = [];
|
||||
this.workletNode?.disconnect();
|
||||
@@ -197,4 +205,75 @@ export class AudioHandler {
|
||||
this.stream?.getTracks().forEach((track) => track.stop());
|
||||
await this.context.close();
|
||||
}
|
||||
|
||||
private readonly NUM_BANDS = 4;
|
||||
private avgMag: Float32Array = new Float32Array(this.NUM_BANDS);
|
||||
private dataArray: Float32Array | null = null;
|
||||
private cumulativeAudio: Float32Array = new Float32Array(this.NUM_BANDS);
|
||||
private binSize: number = 0;
|
||||
|
||||
private initializeBands = (frequencyBinCount: number) => {
|
||||
this.binSize = Math.floor(frequencyBinCount / this.NUM_BANDS);
|
||||
};
|
||||
|
||||
private createMagnitudeLookupTable = () => {
|
||||
const GAIN_MULTIPLIER = 1.2;
|
||||
const table = new Float32Array(100);
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const db = -100 + i;
|
||||
let magnitude = 1 - (Math.max(-100, Math.min(-10, db)) * -1) / 100;
|
||||
magnitude = Math.pow(magnitude, 0.7) * GAIN_MULTIPLIER;
|
||||
table[i] = Math.min(1, magnitude);
|
||||
}
|
||||
return table;
|
||||
};
|
||||
|
||||
private magnitudeLookupTable = this.createMagnitudeLookupTable();
|
||||
|
||||
decibelToMagnitude = (db: number): number => {
|
||||
if (db === -Infinity) return 0;
|
||||
const index = Math.floor(db + 100);
|
||||
if (index < 0) return 0;
|
||||
if (index >= 100) return 1;
|
||||
return this.magnitudeLookupTable[index];
|
||||
};
|
||||
|
||||
getAudioData() {
|
||||
if (!this.analyser || !this.dataArray) return;
|
||||
|
||||
const SMOOTHING_FACTOR = 0.2;
|
||||
const FREQUENCY_WEIGHTS = [1.2, 1.0, 0.8, 0.6];
|
||||
|
||||
this.analyser.getFloatFrequencyData(this.dataArray);
|
||||
|
||||
let totalMagnitude = 0;
|
||||
|
||||
for (let i = 0; i < this.NUM_BANDS; i++) {
|
||||
const startBin = i * this.binSize;
|
||||
const endBin = startBin + this.binSize;
|
||||
let sum = 0;
|
||||
|
||||
const bandData = this.dataArray.subarray(startBin, endBin);
|
||||
for (let j = 0; j < bandData.length; j++) {
|
||||
const magnitude =
|
||||
this.decibelToMagnitude(bandData[j]) * FREQUENCY_WEIGHTS[i];
|
||||
sum += magnitude;
|
||||
}
|
||||
|
||||
this.avgMag[i] = sum / this.binSize;
|
||||
totalMagnitude += this.avgMag[i];
|
||||
|
||||
this.cumulativeAudio[i] =
|
||||
this.cumulativeAudio[i] * (1 - SMOOTHING_FACTOR) +
|
||||
this.avgMag[i] * SMOOTHING_FACTOR;
|
||||
}
|
||||
|
||||
const micLevel = Math.min(1, (totalMagnitude / this.NUM_BANDS) * 1.2);
|
||||
|
||||
return {
|
||||
avgMag: this.avgMag,
|
||||
micLevel,
|
||||
cumulativeAudio: this.cumulativeAudio,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
1326
app/shaders/fragment.glsl
Normal file
1326
app/shaders/fragment.glsl
Normal file
File diff suppressed because it is too large
Load Diff
20
app/shaders/vertex.glsl
Normal file
20
app/shaders/vertex.glsl
Normal file
@@ -0,0 +1,20 @@
|
||||
#version 300 es
|
||||
|
||||
out vec4 out_position;
|
||||
out vec2 out_uv;
|
||||
|
||||
const vec4 blitFullscreenTrianglePositions[6] = vec4[](
|
||||
vec4(-1.0, -1.0, 0.0, 1.0),
|
||||
vec4(3.0, -1.0, 0.0, 1.0),
|
||||
vec4(-1.0, 3.0, 0.0, 1.0),
|
||||
vec4(-1.0, -1.0, 0.0, 1.0),
|
||||
vec4(3.0, -1.0, 0.0, 1.0),
|
||||
vec4(-1.0, 3.0, 0.0, 1.0)
|
||||
);
|
||||
|
||||
void main() {
|
||||
out_position = blitFullscreenTrianglePositions[gl_VertexID];
|
||||
out_uv = out_position.xy * 0.5 + 0.5;
|
||||
out_uv.y = 1.0 - out_uv.y;
|
||||
gl_Position = out_position;
|
||||
}
|
||||
@@ -1,4 +1,8 @@
|
||||
import { getMessageTextContent, trimTopic } from "../utils";
|
||||
import {
|
||||
getMessageTextContent,
|
||||
isFunctionCallModel,
|
||||
trimTopic,
|
||||
} from "../utils";
|
||||
|
||||
import { indexedDBStorage } from "@/app/utils/indexedDB-storage";
|
||||
import { nanoid } from "nanoid";
|
||||
@@ -448,74 +452,157 @@ export const useChatStore = createPersistStore(
|
||||
});
|
||||
|
||||
const api: ClientApi = getClientApi(modelConfig.providerName);
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
botMessage.date = new Date().toLocaleString();
|
||||
get().onNewMessage(botMessage, session);
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onBeforeTool(tool: ChatMessageTool) {
|
||||
(botMessage.tools = botMessage?.tools || []).push(tool);
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onAfterTool(tool: ChatMessageTool) {
|
||||
botMessage?.tools?.forEach((t, i, tools) => {
|
||||
if (tool.id == t.id) {
|
||||
tools[i] = { ...tool };
|
||||
}
|
||||
});
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message?.includes?.("aborted");
|
||||
botMessage.content +=
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
if (
|
||||
config.pluginConfig.enable &&
|
||||
session.mask.usePlugins &&
|
||||
(allPlugins.length > 0 || isEnableRAG) &&
|
||||
isFunctionCallModel(modelConfig.model)
|
||||
) {
|
||||
console.log("[ToolAgent] start");
|
||||
let pluginToolNames = allPlugins.map((m) => m.toolName);
|
||||
if (isEnableRAG) {
|
||||
// other plugins will affect rag
|
||||
// clear existing plugins here
|
||||
pluginToolNames = [];
|
||||
pluginToolNames.push("myfiles_browser");
|
||||
}
|
||||
const agentCall = () => {
|
||||
api.llm.toolAgentChat({
|
||||
chatSessionId: session.id,
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
agentConfig: { ...pluginConfig, useTools: pluginToolNames },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onToolUpdate(toolName, toolInput) {
|
||||
botMessage.streaming = true;
|
||||
if (toolName && toolInput) {
|
||||
botMessage.toolMessages!.push({
|
||||
toolName,
|
||||
toolInput,
|
||||
});
|
||||
}
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage, session);
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message.includes("aborted");
|
||||
botMessage.content +=
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
});
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
});
|
||||
};
|
||||
agentCall();
|
||||
} else {
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
botMessage.date = new Date().toLocaleString();
|
||||
get().onNewMessage(botMessage, session);
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onBeforeTool(tool: ChatMessageTool) {
|
||||
(botMessage.tools = botMessage?.tools || []).push(tool);
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onAfterTool(tool: ChatMessageTool) {
|
||||
botMessage?.tools?.forEach((t, i, tools) => {
|
||||
if (tool.id == t.id) {
|
||||
tools[i] = { ...tool };
|
||||
}
|
||||
});
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message?.includes?.("aborted");
|
||||
botMessage.content +=
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
getMemoryPrompt() {
|
||||
|
||||
95
app/utils/webgl.ts
Normal file
95
app/utils/webgl.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
export function initWebGL(
|
||||
canvas: HTMLCanvasElement,
|
||||
vertexShaderSource: string,
|
||||
fragmentShaderSource: string,
|
||||
) {
|
||||
// 获取WebGL上下文
|
||||
const gl = canvas.getContext("webgl2", {
|
||||
premultipliedAlpha: true,
|
||||
});
|
||||
if (!gl) {
|
||||
console.error("无法初始化WebGL2上下文");
|
||||
return { gl: null, program: null };
|
||||
}
|
||||
|
||||
// 创建着色器程序
|
||||
const program = createShaderProgram(
|
||||
gl,
|
||||
vertexShaderSource,
|
||||
fragmentShaderSource,
|
||||
);
|
||||
if (!program) {
|
||||
console.error("无法创建着色器程序");
|
||||
return { gl: null, program: null };
|
||||
}
|
||||
|
||||
// 设置视口
|
||||
gl.viewport(0, 0, canvas.width, canvas.height);
|
||||
|
||||
// 使用着色器程序
|
||||
gl.useProgram(program);
|
||||
|
||||
return { gl, program };
|
||||
}
|
||||
|
||||
function createShaderProgram(
|
||||
gl: WebGL2RenderingContext,
|
||||
vertexShaderSource: string,
|
||||
fragmentShaderSource: string,
|
||||
): WebGLProgram | null {
|
||||
// 创建顶点着色器
|
||||
const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
|
||||
if (!vertexShader) return null;
|
||||
|
||||
// 创建片段着色器
|
||||
const fragmentShader = createShader(
|
||||
gl,
|
||||
gl.FRAGMENT_SHADER,
|
||||
fragmentShaderSource,
|
||||
);
|
||||
if (!fragmentShader) return null;
|
||||
|
||||
// 创建着色器程序
|
||||
const program = gl.createProgram();
|
||||
if (!program) return null;
|
||||
|
||||
// 附加着色器
|
||||
gl.attachShader(program, vertexShader);
|
||||
gl.attachShader(program, fragmentShader);
|
||||
|
||||
// 链接程序
|
||||
gl.linkProgram(program);
|
||||
|
||||
// 检查链接状态
|
||||
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
|
||||
console.error("无法初始化着色器程序:", gl.getProgramInfoLog(program));
|
||||
return null;
|
||||
}
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
function createShader(
|
||||
gl: WebGL2RenderingContext,
|
||||
type: number,
|
||||
source: string,
|
||||
): WebGLShader | null {
|
||||
// 创建着色器
|
||||
const shader = gl.createShader(type);
|
||||
if (!shader) return null;
|
||||
|
||||
// 设置着色器源代码
|
||||
gl.shaderSource(shader, source);
|
||||
|
||||
// 编译着色器
|
||||
gl.compileShader(shader);
|
||||
|
||||
// 检查编译状态
|
||||
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
|
||||
console.error("着色器编译错误:", gl.getShaderInfoLog(shader));
|
||||
gl.deleteShader(shader);
|
||||
return null;
|
||||
}
|
||||
|
||||
return shader;
|
||||
}
|
||||
Reference in New Issue
Block a user