Compare commits

..

5 Commits

11 changed files with 463 additions and 146 deletions

1
.yarnrc.yml Normal file
View File

@@ -0,0 +1 @@
nodeLinker: node-modules

View File

@@ -107,7 +107,8 @@ export interface LLMModelProvider {
export abstract class LLMApi {
abstract chat(options: ChatOptions): Promise<void>;
abstract speech(options: SpeechOptions): Promise<ArrayBuffer>;
abstract speech(options: SpeechOptions): Promise<ArrayBuffer | AudioBuffer>;
abstract streamSpeech?(options: SpeechOptions): AsyncGenerator<AudioBuffer>;
abstract usage(): Promise<LLMUsage>;
abstract models(): Promise<LLMModel[]>;
}

View File

@@ -1,5 +1,10 @@
"use client";
import { ApiPath, Alibaba, ALIBABA_BASE_URL } from "@/app/constant";
import {
ApiPath,
Alibaba,
ALIBABA_BASE_URL,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
import {
useAccessStore,
useAppConfig,
@@ -59,6 +64,7 @@ interface RequestPayload {
}
export class QwenApi implements LLMApi {
private audioContext?: AudioContext;
path(path: string): string {
const accessStore = useAccessStore.getState();
@@ -89,10 +95,72 @@ export class QwenApi implements LLMApi {
return res?.output?.choices?.at(0)?.message?.content ?? "";
}
speech(options: SpeechOptions): Promise<ArrayBuffer> {
async speech(options: SpeechOptions): Promise<ArrayBuffer> {
throw new Error("Method not implemented.");
}
async *streamSpeech(options: SpeechOptions): AsyncGenerator<AudioBuffer> {
const requestPayload = {
model: options.model,
input: {
text: options.input,
voice: options.voice,
},
speed: options.speed,
response_format: options.response_format,
};
const controller = new AbortController();
options.onController?.(controller);
try {
const speechPath = this.path(Alibaba.SpeechPath);
const speechPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: {
...getHeaders(),
"X-DashScope-SSE": "enable",
},
};
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
const res = await fetch(speechPath, speechPayload);
const reader = res.body!.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data:")) {
const data = line.slice(5);
const json = JSON.parse(data);
if (json.output.audio.data) {
yield this.PCMBase64ToAudioBuffer(json.output.audio.data);
}
}
}
}
clearTimeout(requestTimeoutId);
reader.releaseLock();
} catch (e) {
console.log("[Request] failed to make a speech request", e);
throw e;
}
}
async chat(options: ChatOptions) {
const modelConfig = {
...useAppConfig.getState().modelConfig,
@@ -273,5 +341,75 @@ export class QwenApi implements LLMApi {
async models(): Promise<LLMModel[]> {
return [];
}
// 播放 PCM base64 数据
private async PCMBase64ToAudioBuffer(base64Data: string) {
try {
// 解码 base64
const binaryString = atob(base64Data);
const bytes = new Uint8Array(binaryString.length);
for (let i = 0; i < binaryString.length; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
// 转换为 AudioBuffer
const audioBuffer = await this.convertToAudioBuffer(bytes);
return audioBuffer;
} catch (error) {
console.error("播放 PCM 数据失败:", error);
throw error;
}
}
// 将 PCM 字节数据转换为 AudioBuffer
private convertToAudioBuffer(pcmData: Uint8Array) {
if (!this.audioContext) {
this.audioContext = new (window.AudioContext ||
window.webkitAudioContext)();
}
const audioContext = this.audioContext;
const channels = 1;
const sampleRate = 24000;
return new Promise<AudioBuffer>((resolve, reject) => {
try {
let float32Array;
// 16位 PCM 转换为 32位浮点数
float32Array = this.pcm16ToFloat32(pcmData);
// 创建 AudioBuffer
const audioBuffer = audioContext.createBuffer(
channels,
float32Array.length / channels,
sampleRate,
);
// 复制数据到 AudioBuffer
for (let channel = 0; channel < channels; channel++) {
const channelData = audioBuffer.getChannelData(channel);
for (let i = 0; i < channelData.length; i++) {
channelData[i] = float32Array[i * channels + channel];
}
}
resolve(audioBuffer);
} catch (error) {
reject(error);
}
});
}
// 16位 PCM 转 32位浮点数
private pcm16ToFloat32(pcmData: Uint8Array) {
const length = pcmData.length / 2;
const float32Array = new Float32Array(length);
for (let i = 0; i < length; i++) {
const int16 = (pcmData[i * 2 + 1] << 8) | pcmData[i * 2];
const int16Signed = int16 > 32767 ? int16 - 65536 : int16;
float32Array[i] = int16Signed / 32768;
}
return float32Array;
}
}
export { Alibaba };

View File

@@ -200,7 +200,6 @@ export class ChatGPTApi implements LLMApi {
options.config.model.startsWith("o1") ||
options.config.model.startsWith("o3") ||
options.config.model.startsWith("o4-mini");
const isGpt5 = options.config.model.startsWith("gpt-5");
if (isDalle3) {
const prompt = getMessageTextContent(
options.messages.slice(-1)?.pop() as any,
@@ -231,7 +230,7 @@ export class ChatGPTApi implements LLMApi {
messages,
stream: options.config.stream,
model: modelConfig.model,
temperature: (!isO1OrO3 && !isGpt5) ? modelConfig.temperature : 1,
temperature: !isO1OrO3 ? modelConfig.temperature : 1,
presence_penalty: !isO1OrO3 ? modelConfig.presence_penalty : 0,
frequency_penalty: !isO1OrO3 ? modelConfig.frequency_penalty : 0,
top_p: !isO1OrO3 ? modelConfig.top_p : 1,
@@ -239,13 +238,7 @@ export class ChatGPTApi implements LLMApi {
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};
if (isGpt5) {
// Remove max_tokens if present
delete requestPayload.max_tokens;
// Add max_completion_tokens (or max_completion_tokens if that's what you meant)
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
} else if (isO1OrO3) {
if (isO1OrO3) {
// by default the o1/o3 models will not attempt to produce output that includes markdown formatting
// manually add "Formatting re-enabled" developer message to encourage markdown inclusion in model responses
// (https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/reasoning?tabs=python-secure#markdown-output)
@@ -258,9 +251,8 @@ export class ChatGPTApi implements LLMApi {
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
}
// add max_tokens to vision model
if (visionModel && !isO1OrO3 && ! isGpt5) {
if (visionModel && !isO1OrO3) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
}
}

View File

@@ -101,8 +101,6 @@ import {
import { useNavigate } from "react-router-dom";
import {
CHAT_PAGE_SIZE,
DEFAULT_TTS_ENGINE,
ModelProvider,
Path,
REQUEST_TIMEOUT_MS,
ServiceProvider,
@@ -1286,6 +1284,7 @@ function _Chat() {
const accessStore = useAccessStore();
const [speechStatus, setSpeechStatus] = useState(false);
const [speechLoading, setSpeechLoading] = useState(false);
const [speechCooldown, setSpeechCooldown] = useState(false);
async function openaiSpeech(text: string) {
if (speechStatus) {
@@ -1293,14 +1292,14 @@ function _Chat() {
setSpeechStatus(false);
} else {
var api: ClientApi;
api = new ClientApi(ModelProvider.GPT);
const config = useAppConfig.getState();
api = new ClientApi(config.ttsConfig.modelProvider);
setSpeechLoading(true);
ttsPlayer.init();
let audioBuffer: ArrayBuffer;
let audioBuffer: ArrayBuffer | AudioBuffer;
const { markdownToTxt } = require("markdown-to-txt");
const textContent = markdownToTxt(text);
if (config.ttsConfig.engine !== DEFAULT_TTS_ENGINE) {
if (config.ttsConfig.engine === "Edge") {
const edgeVoiceName = accessStore.edgeVoiceName();
const tts = new MsEdgeTTS();
await tts.setMetadata(
@@ -1308,28 +1307,60 @@ function _Chat() {
OUTPUT_FORMAT.AUDIO_24KHZ_96KBITRATE_MONO_MP3,
);
audioBuffer = await tts.toArrayBuffer(textContent);
playSpeech(audioBuffer);
} else {
audioBuffer = await api.llm.speech({
model: config.ttsConfig.model,
input: textContent,
voice: config.ttsConfig.voice,
speed: config.ttsConfig.speed,
});
if (api.llm.streamSpeech) {
// 使用流式播放,边接收边播放
setSpeechStatus(true);
ttsPlayer.startStreamPlay(() => {
setSpeechStatus(false);
});
try {
for await (const chunk of api.llm.streamSpeech({
model: config.ttsConfig.model,
input: textContent,
voice: config.ttsConfig.voice,
speed: config.ttsConfig.speed,
})) {
ttsPlayer.addToQueue(chunk);
}
ttsPlayer.finishStreamPlay();
} catch (e) {
console.error("[Stream Speech]", e);
showToast(prettyObject(e));
setSpeechStatus(false);
ttsPlayer.stop();
} finally {
setSpeechLoading(false);
}
} else {
audioBuffer = await api.llm.speech({
model: config.ttsConfig.model,
input: textContent,
voice: config.ttsConfig.voice,
speed: config.ttsConfig.speed,
});
playSpeech(audioBuffer);
}
}
setSpeechStatus(true);
ttsPlayer
.play(audioBuffer, () => {
setSpeechStatus(false);
})
.catch((e) => {
console.error("[OpenAI Speech]", e);
showToast(prettyObject(e));
setSpeechStatus(false);
})
.finally(() => setSpeechLoading(false));
}
}
function playSpeech(audioBuffer: ArrayBuffer | AudioBuffer) {
setSpeechStatus(true);
ttsPlayer
.play(audioBuffer, () => {
setSpeechStatus(false);
})
.catch((e) => {
console.error("[OpenAI Speech]", e);
showToast(prettyObject(e));
setSpeechStatus(false);
})
.finally(() => setSpeechLoading(false));
}
const context: RenderMessage[] = useMemo(() => {
return session.mask.hideContext ? [] : session.mask.context.slice();
}, [session.mask.context, session.mask.hideContext]);

View File

@@ -3,10 +3,9 @@ import { TTSConfig, TTSConfigValidator } from "../store";
import Locale from "../locales";
import { ListItem, Select } from "./ui-lib";
import {
DEFAULT_TTS_ENGINE,
DEFAULT_TTS_ENGINES,
DEFAULT_TTS_MODELS,
DEFAULT_TTS_VOICES,
ServiceProvider,
TTS_CONFIGS,
TTSEngineType
} from "../constant";
import { InputRange } from "./input-range";
@@ -48,22 +47,33 @@ export function TTSConfigList(props: {
<Select
value={props.ttsConfig.engine}
onChange={(e) => {
const newEngine = e.currentTarget.value as TTSEngineType;
props.updateConfig(
(config) =>
(config.engine = TTSConfigValidator.engine(
e.currentTarget.value,
)),
(config) => {
config.engine = TTSConfigValidator.engine(newEngine);
const engineConfig = TTS_CONFIGS[newEngine];
config.model = TTSConfigValidator.model(
engineConfig.Model[0] || ""
);
config.voice = TTSConfigValidator.voice(
engineConfig.Voices[0] || ""
);
config.modelProvider = TTSConfigValidator.modelProvider(
engineConfig.ModelProvider
);
}
);
}}
>
{DEFAULT_TTS_ENGINES.map((v, i) => (
{Object.keys(TTS_CONFIGS).map((v, i) => (
<option value={v} key={i}>
{v}
{v}-TTS
</option>
))}
</Select>
</ListItem>
{props.ttsConfig.engine === DEFAULT_TTS_ENGINE && (
{(props.ttsConfig.engine === ServiceProvider.OpenAI ||
props.ttsConfig.engine === ServiceProvider.Alibaba) && (
<>
<ListItem title={Locale.Settings.TTS.Model}>
<Select
@@ -77,7 +87,7 @@ export function TTSConfigList(props: {
);
}}
>
{DEFAULT_TTS_MODELS.map((v, i) => (
{TTS_CONFIGS[props.ttsConfig.engine]!.Model.map((v, i) => (
<option value={v} key={i}>
{v}
</option>
@@ -99,7 +109,7 @@ export function TTSConfigList(props: {
);
}}
>
{DEFAULT_TTS_VOICES.map((v, i) => (
{TTS_CONFIGS[props.ttsConfig.engine]!.Voices.map((v, i) => (
<option value={v} key={i}>
{v}
</option>

View File

@@ -232,6 +232,7 @@ export const Alibaba = {
}
return `v1/services/aigc/text-generation/generation`;
},
SpeechPath: "v1/services/aigc/multimodal-generation/generation",
};
export const Tencent = {
@@ -461,19 +462,49 @@ export const KnowledgeCutOffDate: Record<string, string> = {
"deepseek-coder": "2024-07",
};
export const DEFAULT_TTS_ENGINE = "OpenAI-TTS";
export const DEFAULT_TTS_ENGINES = ["OpenAI-TTS", "Edge-TTS"];
export const DEFAULT_TTS_ENGINE = ServiceProvider.OpenAI;
export const DEFAULT_TTS_MODEL = "tts-1";
export const DEFAULT_TTS_VOICE = "alloy";
export const DEFAULT_TTS_MODELS = ["tts-1", "tts-1-hd"];
export const DEFAULT_TTS_VOICES = [
"alloy",
"echo",
"fable",
"onyx",
"nova",
"shimmer",
];
export const OPENAI_TTS = {
Provider: ServiceProvider.OpenAI,
ModelProvider: ModelProvider.GPT,
Model: ["tts-1", "tts-1-hd"],
Voices: ["alloy", "echo", "fable", "onyx", "nova", "shimmer"],
} as const;
export const ALIBABA_TTS = {
Provider: ServiceProvider.Alibaba,
ModelProvider: ModelProvider.Qwen,
Model: ["qwen-tts", "qwen-tts-latest"],
Voices: ["Chelsie", "Cherry", "Ethan", "Serena", "Dylan", "Jada", "Sunny"],
} as const;
export const EDGE_TTS = {
Provider: "Edge" as const,
ModelProvider: ModelProvider.GPT,
Model: [] as string[],
Voices: [] as string[],
} as const;
export type TTSEngineType = ServiceProvider.OpenAI | ServiceProvider.Alibaba | "Edge";
export const DEFAULT_TTS_ENGINES = [ServiceProvider.OpenAI, ServiceProvider.Alibaba, "Edge"] as const;
export const DEFAULT_TTS_MODELS = [...OPENAI_TTS.Model, ...ALIBABA_TTS.Model] as const;
export const DEFAULT_TTS_VOICES = [...OPENAI_TTS.Voices, ...ALIBABA_TTS.Voices] as const;
interface TTSConfigItem {
Provider: ServiceProvider | "Edge";
Model: readonly string[];
Voices: readonly string[];
ModelProvider: ModelProvider;
}
export const TTS_CONFIGS: Record<TTSEngineType, TTSConfigItem> = {
[ServiceProvider.OpenAI]: OPENAI_TTS,
[ServiceProvider.Alibaba]: ALIBABA_TTS,
Edge: EDGE_TTS,
} as const;
export const VISION_MODEL_REGEXES = [
/vision/,
@@ -493,7 +524,6 @@ export const VISION_MODEL_REGEXES = [
/o3/,
/o4-mini/,
/grok-4/i,
/gpt-5/
];
export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
@@ -518,11 +548,6 @@ const openaiModels = [
"gpt-4.1-nano-2025-04-14",
"gpt-4.5-preview",
"gpt-4.5-preview-2025-02-27",
"gpt-5-chat",
"gpt-5-mini",
"gpt-5-nano",
"gpt-5",
"gpt-5-chat-2025-01-01-preview",
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
@@ -926,3 +951,4 @@ export const DEFAULT_GA_ID = "G-89WN60ZK2E";
export const SAAS_CHAT_URL = "https://nextchat.club";
export const SAAS_CHAT_UTM_URL = "https://nextchat.club?utm=github";

View File

@@ -13,6 +13,8 @@ import {
DEFAULT_TTS_VOICES,
StoreKey,
ServiceProvider,
TTSEngineType,
ModelProvider,
} from "../constant";
import { createPersistStore } from "../utils/store";
import type { Voice } from "rt-client";
@@ -20,7 +22,6 @@ import type { Voice } from "rt-client";
export type ModelType = (typeof DEFAULT_MODELS)[number]["name"];
export type TTSModelType = (typeof DEFAULT_TTS_MODELS)[number];
export type TTSVoiceType = (typeof DEFAULT_TTS_VOICES)[number];
export type TTSEngineType = (typeof DEFAULT_TTS_ENGINES)[number];
export enum SubmitKey {
Enter = "Enter",
@@ -86,7 +87,8 @@ export const DEFAULT_CONFIG = {
ttsConfig: {
enable: false,
autoplay: false,
engine: DEFAULT_TTS_ENGINE,
modelProvider: ModelProvider.GPT,
engine: DEFAULT_TTS_ENGINE as TTSEngineType,
model: DEFAULT_TTS_MODEL,
voice: DEFAULT_TTS_VOICE,
speed: 1.0,
@@ -126,18 +128,21 @@ export function limitNumber(
}
export const TTSConfigValidator = {
engine(x: string) {
engine(x: string | TTSEngineType): TTSEngineType {
return x as TTSEngineType;
},
model(x: string) {
model(x: string): TTSModelType {
return x as TTSModelType;
},
voice(x: string) {
voice(x: string): TTSVoiceType {
return x as TTSVoiceType;
},
speed(x: number) {
speed(x: number): number {
return limitNumber(x, 0.25, 4.0, 1.0);
},
modelProvider(x: string): ModelProvider {
return x as ModelProvider;
},
};
export const ModalConfigValidator = {

View File

@@ -1,25 +1,48 @@
type TTSPlayer = {
init: () => void;
play: (audioBuffer: ArrayBuffer, onended: () => void | null) => Promise<void>;
play: (
audioBuffer: ArrayBuffer | AudioBuffer,
onended: () => void | null,
) => Promise<void>;
playQueue: (
audioBuffers: (ArrayBuffer | AudioBuffer)[],
onended: () => void | null,
) => Promise<void>;
addToQueue: (audioBuffer: ArrayBuffer | AudioBuffer) => void;
startStreamPlay: (onended: () => void | null) => void;
finishStreamPlay: () => void;
stop: () => void;
};
export function createTTSPlayer(): TTSPlayer {
let audioContext: AudioContext | null = null;
let audioBufferSourceNode: AudioBufferSourceNode | null = null;
let isPlaying = false;
let playQueue: (ArrayBuffer | AudioBuffer)[] = [];
let currentOnended: (() => void | null) | null = null;
let isStreamMode = false;
let streamFinished = false;
const init = () => {
console.log("[TTSPlayer] init");
audioContext = new (window.AudioContext || window.webkitAudioContext)();
audioContext.suspend();
};
const play = async (audioBuffer: ArrayBuffer, onended: () => void | null) => {
const play = async (
audioBuffer: ArrayBuffer | AudioBuffer,
onended: () => void | null,
) => {
if (audioBufferSourceNode) {
audioBufferSourceNode.stop();
audioBufferSourceNode.disconnect();
}
const buffer = await audioContext!.decodeAudioData(audioBuffer);
let buffer: AudioBuffer;
if (audioBuffer instanceof AudioBuffer) {
buffer = audioBuffer;
} else {
buffer = await audioContext!.decodeAudioData(audioBuffer);
}
audioBufferSourceNode = audioContext!.createBufferSource();
audioBufferSourceNode.buffer = buffer;
audioBufferSourceNode.connect(audioContext!.destination);
@@ -29,17 +52,109 @@ export function createTTSPlayer(): TTSPlayer {
audioBufferSourceNode.onended = onended;
};
const stop = () => {
const playNext = async () => {
if (playQueue.length === 0) {
// 在流模式下,如果队列为空但流还没结束,等待
if (isStreamMode && !streamFinished) {
setTimeout(() => playNext(), 100);
return;
}
isPlaying = false;
isStreamMode = false;
streamFinished = false;
if (currentOnended) {
currentOnended();
currentOnended = null;
}
return;
}
const nextBuffer = playQueue.shift()!;
let buffer: AudioBuffer;
if (nextBuffer instanceof AudioBuffer) {
buffer = nextBuffer;
} else {
buffer = await audioContext!.decodeAudioData(nextBuffer);
}
if (audioBufferSourceNode) {
audioBufferSourceNode.stop();
audioBufferSourceNode.disconnect();
}
audioBufferSourceNode = audioContext!.createBufferSource();
audioBufferSourceNode.buffer = buffer;
audioBufferSourceNode.connect(audioContext!.destination);
audioBufferSourceNode.onended = () => {
playNext();
};
await audioContext!.resume();
audioBufferSourceNode.start();
};
const playQueueMethod = async (
audioBuffers: (ArrayBuffer | AudioBuffer)[],
onended: () => void | null,
) => {
playQueue = [...audioBuffers];
currentOnended = onended;
if (!isPlaying) {
isPlaying = true;
await playNext();
}
};
const addToQueue = (audioBuffer: ArrayBuffer | AudioBuffer) => {
if (streamFinished) {
return;
}
playQueue.push(audioBuffer);
};
const startStreamPlay = (onended: () => void | null) => {
isStreamMode = true;
streamFinished = false;
playQueue = [];
currentOnended = onended;
if (!isPlaying) {
isPlaying = true;
playNext();
}
};
const finishStreamPlay = () => {
streamFinished = true;
};
const stop = async () => {
console.log("[TTSPlayer] stop");
playQueue = [];
isPlaying = false;
isStreamMode = false;
streamFinished = true;
currentOnended = null;
if (audioBufferSourceNode) {
audioBufferSourceNode.stop();
audioBufferSourceNode.disconnect();
audioBufferSourceNode = null;
}
if (audioContext) {
audioContext.close();
await audioContext.close();
audioContext = null;
}
};
return { init, play, stop };
return {
init,
play,
playQueue: playQueueMethod,
addToQueue,
startStreamPlay,
finishStreamPlay,
stop,
};
}

View File

@@ -39,7 +39,7 @@
"markdown-to-txt": "^2.0.1",
"mermaid": "^10.6.1",
"nanoid": "^5.0.3",
"next": "^14.2.32",
"next": "^14.1.1",
"node-fetch": "^3.3.1",
"openapi-client-axios": "^7.5.5",
"react": "^18.2.0",
@@ -93,5 +93,9 @@
"resolutions": {
"lint-staged/yaml": "^2.2.2"
},
"packageManager": "yarn@1.22.19"
"packageManager": "yarn@1.22.19",
"volta": {
"node": "20.19.4",
"yarn": "1.22.22"
}
}

124
yarn.lock
View File

@@ -1806,10 +1806,10 @@
raw-body "^3.0.0"
zod "^3.23.8"
"@next/env@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.2.32.tgz#6d1107e2b7cc8649ff3730b8b46deb4e8a6d38fa"
integrity sha512-n9mQdigI6iZ/DF6pCTwMKeWgF2e8lg7qgt5M7HXMLtyhZYMnf/u905M18sSpPmHL9MKp9JHo56C6jrD2EvWxng==
"@next/env@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.1.1.tgz#80150a8440eb0022a73ba353c6088d419b908bac"
integrity sha512-7CnQyD5G8shHxQIIg3c7/pSeYFeMhsNbpU/bmvH7ZnDql7mNRgg8O2JZrhrc/soFnfBnKP4/xXNiiSIPn2w8gA==
"@next/eslint-plugin-next@13.4.19":
version "13.4.19"
@@ -1818,50 +1818,50 @@
dependencies:
glob "7.1.7"
"@next/swc-darwin-arm64@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.32.tgz#83482a7282df899b73d916e02b02a189771e706c"
integrity sha512-osHXveM70zC+ilfuFa/2W6a1XQxJTvEhzEycnjUaVE8kpUS09lDpiDDX2YLdyFCzoUbvbo5r0X1Kp4MllIOShw==
"@next/swc-darwin-arm64@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.1.1.tgz#b74ba7c14af7d05fa2848bdeb8ee87716c939b64"
integrity sha512-yDjSFKQKTIjyT7cFv+DqQfW5jsD+tVxXTckSe1KIouKk75t1qZmj/mV3wzdmFb0XHVGtyRjDMulfVG8uCKemOQ==
"@next/swc-darwin-x64@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.32.tgz#1a9eb676a014e1fc999251f10288c25a0f81d6d1"
integrity sha512-P9NpCAJuOiaHHpqtrCNncjqtSBi1f6QUdHK/+dNabBIXB2RUFWL19TY1Hkhu74OvyNQEYEzzMJCMQk5agjw1Qg==
"@next/swc-darwin-x64@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.1.1.tgz#82c3e67775e40094c66e76845d1a36cc29c9e78b"
integrity sha512-KCQmBL0CmFmN8D64FHIZVD9I4ugQsDBBEJKiblXGgwn7wBCSe8N4Dx47sdzl4JAg39IkSN5NNrr8AniXLMb3aw==
"@next/swc-linux-arm64-gnu@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.32.tgz#7713a49abd555d6f698e766b1631b67d881b4ee4"
integrity sha512-v7JaO0oXXt6d+cFjrrKqYnR2ubrD+JYP7nQVRZgeo5uNE5hkCpWnHmXm9vy3g6foMO8SPwL0P3MPw1c+BjbAzA==
"@next/swc-linux-arm64-gnu@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.1.1.tgz#4f4134457b90adc5c3d167d07dfb713c632c0caa"
integrity sha512-YDQfbWyW0JMKhJf/T4eyFr4b3tceTorQ5w2n7I0mNVTFOvu6CGEzfwT3RSAQGTi/FFMTFcuspPec/7dFHuP7Eg==
"@next/swc-linux-arm64-musl@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.32.tgz#327efdffe97e56f5389a7889cdedbd676fdbb519"
integrity sha512-tA6sIKShXtSJBTH88i0DRd6I9n3ZTirmwpwAqH5zdJoQF7/wlJXR8DkPmKwYl5mFWhEKr5IIa3LfpMW9RRwKmQ==
"@next/swc-linux-arm64-musl@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.1.1.tgz#594bedafaeba4a56db23a48ffed2cef7cd09c31a"
integrity sha512-fiuN/OG6sNGRN/bRFxRvV5LyzLB8gaL8cbDH5o3mEiVwfcMzyE5T//ilMmaTrnA8HLMS6hoz4cHOu6Qcp9vxgQ==
"@next/swc-linux-x64-gnu@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.32.tgz#a3e7444613d0fe5c8ea4ead08d6a9c818246758c"
integrity sha512-7S1GY4TdnlGVIdeXXKQdDkfDysoIVFMD0lJuVVMeb3eoVjrknQ0JNN7wFlhCvea0hEk0Sd4D1hedVChDKfV2jw==
"@next/swc-linux-x64-gnu@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.1.1.tgz#cb4e75f1ff2b9bcadf2a50684605928ddfc58528"
integrity sha512-rv6AAdEXoezjbdfp3ouMuVqeLjE1Bin0AuE6qxE6V9g3Giz5/R3xpocHoAi7CufRR+lnkuUjRBn05SYJ83oKNQ==
"@next/swc-linux-x64-musl@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.32.tgz#a2ec5b0a06c740d6740c938b1d4a614f1a13f018"
integrity sha512-OHHC81P4tirVa6Awk6eCQ6RBfWl8HpFsZtfEkMpJ5GjPsJ3nhPe6wKAJUZ/piC8sszUkAgv3fLflgzPStIwfWg==
"@next/swc-linux-x64-musl@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.1.1.tgz#15f26800df941b94d06327f674819ab64b272e25"
integrity sha512-YAZLGsaNeChSrpz/G7MxO3TIBLaMN8QWMr3X8bt6rCvKovwU7GqQlDu99WdvF33kI8ZahvcdbFsy4jAFzFX7og==
"@next/swc-win32-arm64-msvc@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.32.tgz#b4d3e47c6b276fc4711deb978d04015d029d198d"
integrity sha512-rORQjXsAFeX6TLYJrCG5yoIDj+NKq31Rqwn8Wpn/bkPNy5rTHvOXkW8mLFonItS7QC6M+1JIIcLe+vOCTOYpvg==
"@next/swc-win32-arm64-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.1.1.tgz#060c134fa7fa843666e3e8574972b2b723773dd9"
integrity sha512-1L4mUYPBMvVDMZg1inUYyPvFSduot0g73hgfD9CODgbr4xiTYe0VOMTZzaRqYJYBA9mana0x4eaAaypmWo1r5A==
"@next/swc-win32-ia32-msvc@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.32.tgz#d1f1f854a1fbbaeefa8f81271437448653f33494"
integrity sha512-jHUeDPVHrgFltqoAqDB6g6OStNnFxnc7Aks3p0KE0FbwAvRg6qWKYF5mSTdCTxA3axoSAUwxYdILzXJfUwlHhA==
"@next/swc-win32-ia32-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.1.1.tgz#5c06889352b1f77e3807834a0d0afd7e2d2d1da2"
integrity sha512-jvIE9tsuj9vpbbXlR5YxrghRfMuG0Qm/nZ/1KDHc+y6FpnZ/apsgh+G6t15vefU0zp3WSpTMIdXRUsNl/7RSuw==
"@next/swc-win32-x64-msvc@14.2.32":
version "14.2.32"
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.32.tgz#8212d681cf6858a9e3204728f8f2b161000683ed"
integrity sha512-2N0lSoU4GjfLSO50wvKpMQgKd4HdI2UHEhQPPPnlgfBJlOgJxkjpkYBqzk08f1gItBB6xF/n+ykso2hgxuydsA==
"@next/swc-win32-x64-msvc@14.1.1":
version "14.1.1"
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.1.1.tgz#d38c63a8f9b7f36c1470872797d3735b4a9c5c52"
integrity sha512-S6K6EHDU5+1KrBDLko7/c1MNy/Ya73pIAmvKeFwsF4RmBFJSO7/7YeD4FnZ4iBdzE69PpQ4sOMU9ORKeNuxe8A==
"@next/third-parties@^14.1.0":
version "14.1.0"
@@ -2031,17 +2031,11 @@
"@svgr/plugin-jsx" "^6.5.1"
"@svgr/plugin-svgo" "^6.5.1"
"@swc/counter@^0.1.3":
version "0.1.3"
resolved "https://registry.yarnpkg.com/@swc/counter/-/counter-0.1.3.tgz#cc7463bd02949611c6329596fccd2b0ec782b0e9"
integrity sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==
"@swc/helpers@0.5.5":
version "0.5.5"
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.5.tgz#12689df71bfc9b21c4f4ca00ae55f2f16c8b77c0"
integrity sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==
"@swc/helpers@0.5.2":
version "0.5.2"
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.5.2.tgz#85ea0c76450b61ad7d10a37050289eded783c27d"
integrity sha512-E4KcWTpoLHqwPHLxidpOqQbcrZVgi0rsmmZXUle1jXmJfuIf/UWpczUJ7MZZ5tlxytgJXyp0w4PGkkeLiuIdZw==
dependencies:
"@swc/counter" "^0.1.3"
tslib "^2.4.0"
"@tauri-apps/api@^2.1.1":
@@ -6727,28 +6721,28 @@ neo-async@^2.6.2:
resolved "https://registry.npmmirror.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f"
integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==
next@^14.2.32:
version "14.2.32"
resolved "https://registry.yarnpkg.com/next/-/next-14.2.32.tgz#279b544f0c8ed023c33454ce4d563d3e05c2f3fb"
integrity sha512-fg5g0GZ7/nFc09X8wLe6pNSU8cLWbLRG3TZzPJ1BJvi2s9m7eF991se67wliM9kR5yLHRkyGKU49MMx58s3LJg==
next@^14.1.1:
version "14.1.1"
resolved "https://registry.yarnpkg.com/next/-/next-14.1.1.tgz#92bd603996c050422a738e90362dff758459a171"
integrity sha512-McrGJqlGSHeaz2yTRPkEucxQKe5Zq7uPwyeHNmJaZNY4wx9E9QdxmTp310agFRoMuIYgQrCrT3petg13fSVOww==
dependencies:
"@next/env" "14.2.32"
"@swc/helpers" "0.5.5"
"@next/env" "14.1.1"
"@swc/helpers" "0.5.2"
busboy "1.6.0"
caniuse-lite "^1.0.30001579"
graceful-fs "^4.2.11"
postcss "8.4.31"
styled-jsx "5.1.1"
optionalDependencies:
"@next/swc-darwin-arm64" "14.2.32"
"@next/swc-darwin-x64" "14.2.32"
"@next/swc-linux-arm64-gnu" "14.2.32"
"@next/swc-linux-arm64-musl" "14.2.32"
"@next/swc-linux-x64-gnu" "14.2.32"
"@next/swc-linux-x64-musl" "14.2.32"
"@next/swc-win32-arm64-msvc" "14.2.32"
"@next/swc-win32-ia32-msvc" "14.2.32"
"@next/swc-win32-x64-msvc" "14.2.32"
"@next/swc-darwin-arm64" "14.1.1"
"@next/swc-darwin-x64" "14.1.1"
"@next/swc-linux-arm64-gnu" "14.1.1"
"@next/swc-linux-arm64-musl" "14.1.1"
"@next/swc-linux-x64-gnu" "14.1.1"
"@next/swc-linux-x64-musl" "14.1.1"
"@next/swc-win32-arm64-msvc" "14.1.1"
"@next/swc-win32-ia32-msvc" "14.1.1"
"@next/swc-win32-x64-msvc" "14.1.1"
node-domexception@^1.0.0:
version "1.0.0"