Compare commits

..

9 Commits

Author SHA1 Message Date
dependabot[bot]
c53e471a38 chore(deps-dev): bump @tauri-apps/cli from 1.5.11 to 2.9.1
Bumps [@tauri-apps/cli](https://github.com/tauri-apps/tauri) from 1.5.11 to 2.9.1.
- [Release notes](https://github.com/tauri-apps/tauri/releases)
- [Commits](https://github.com/tauri-apps/tauri/compare/@tauri-apps/cli-v1.5.11...@tauri-apps/cli-v2.9.1)

---
updated-dependencies:
- dependency-name: "@tauri-apps/cli"
  dependency-version: 2.9.1
  dependency-type: direct:development
  update-type: version-update:semver-major
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-10-27 13:04:24 +00:00
RiverRay
c3b8c1587c Merge pull request #6637 from princeaden1/feat-xai-new-models
Some checks failed
Run Tests / test (push) Has been cancelled
Upstream Sync / Sync latest commits from upstream repo (push) Has been cancelled
feat: new models for xAI (#6559)
2025-09-29 19:37:17 +08:00
Adekunle
12ec11ed8a feat: new models for xAI (#6559) 2025-09-20 00:09:59 +01:00
RiverRay
995bef73de Merge pull request #6599 from DreamRivulet/add-support-GPT5
Some checks failed
Run Tests / test (push) Has been cancelled
add: model gpt-5
2025-08-10 17:21:12 +08:00
Sam
38ac502d80 Add support for GPT5 2025-08-09 17:03:49 +08:00
Sam
0511808900 use max_completion_tokens 2025-08-09 17:03:49 +08:00
Sam
42eff644b4 use max_completion_tokens 2025-08-09 17:03:49 +08:00
Sam
8ae6883784 add gpt-5 2025-08-09 17:03:49 +08:00
Sam
c0f2ab6de3 add gpt-5 2025-08-09 17:03:06 +08:00
34 changed files with 153 additions and 595 deletions

View File

@@ -1 +0,0 @@
nodeLinker: node-modules

View File

@@ -107,8 +107,7 @@ export interface LLMModelProvider {
export abstract class LLMApi {
abstract chat(options: ChatOptions): Promise<void>;
abstract speech(options: SpeechOptions): Promise<ArrayBuffer | AudioBuffer>;
abstract streamSpeech?(options: SpeechOptions): AsyncGenerator<AudioBuffer>;
abstract speech(options: SpeechOptions): Promise<ArrayBuffer>;
abstract usage(): Promise<LLMUsage>;
abstract models(): Promise<LLMModel[]>;
}

View File

@@ -1,17 +1,11 @@
"use client";
import {
ApiPath,
Alibaba,
ALIBABA_BASE_URL,
REQUEST_TIMEOUT_MS,
} from "@/app/constant";
import { ApiPath, Alibaba, ALIBABA_BASE_URL } from "@/app/constant";
import {
useAccessStore,
useAppConfig,
useChatStore,
ChatMessageTool,
usePluginStore,
FunctionToolItem,
} from "@/app/store";
import {
preProcessImageContentForAlibabaDashScope,
@@ -57,8 +51,6 @@ interface RequestParam {
repetition_penalty?: number;
top_p: number;
max_tokens?: number;
tools?: FunctionToolItem[];
enable_search?: boolean;
}
interface RequestPayload {
model: string;
@@ -67,7 +59,6 @@ interface RequestPayload {
}
export class QwenApi implements LLMApi {
private static audioContext: AudioContext | null = null;
path(path: string): string {
const accessStore = useAccessStore.getState();
@@ -98,72 +89,10 @@ export class QwenApi implements LLMApi {
return res?.output?.choices?.at(0)?.message?.content ?? "";
}
async speech(options: SpeechOptions): Promise<ArrayBuffer> {
speech(options: SpeechOptions): Promise<ArrayBuffer> {
throw new Error("Method not implemented.");
}
async *streamSpeech(options: SpeechOptions): AsyncGenerator<AudioBuffer> {
const requestPayload = {
model: options.model,
input: {
text: options.input,
voice: options.voice,
},
speed: options.speed,
response_format: options.response_format,
};
const controller = new AbortController();
options.onController?.(controller);
try {
const speechPath = this.path(Alibaba.SpeechPath);
const speechPayload = {
method: "POST",
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: {
...getHeaders(),
"X-DashScope-SSE": "enable",
},
};
// make a fetch request
const requestTimeoutId = setTimeout(
() => controller.abort(),
REQUEST_TIMEOUT_MS,
);
const res = await fetch(speechPath, speechPayload);
clearTimeout(requestTimeoutId); // Clear timeout on successful connection
const reader = res.body!.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
buffer += decoder.decode(value, { stream: true });
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.startsWith("data:")) {
const data = line.slice(5);
const json = JSON.parse(data);
if (json.output?.audio?.data) {
yield this.PCMBase64ToAudioBuffer(json.output.audio.data);
}
}
}
}
reader.releaseLock();
} catch (e) {
console.log("[Request] failed to make a speech request", e);
throw e;
}
}
async chat(options: ChatOptions) {
const modelConfig = {
...useAppConfig.getState().modelConfig,
@@ -200,7 +129,6 @@ export class QwenApi implements LLMApi {
temperature: modelConfig.temperature,
// max_tokens: modelConfig.max_tokens,
top_p: modelConfig.top_p === 1 ? 0.99 : modelConfig.top_p, // qwen top_p is should be < 1
enable_search: modelConfig.enableNetWork,
},
};
@@ -233,16 +161,11 @@ export class QwenApi implements LLMApi {
.getAsTools(
useChatStore.getState().currentSession().mask?.plugin || [],
);
// console.log("getAsTools", tools, funcs);
const _tools = tools as unknown as FunctionToolItem[];
if (_tools && _tools.length > 0) {
requestPayload.parameters.tools = _tools;
}
return streamWithThink(
chatPath,
requestPayload,
headers,
[],
tools as any,
funcs,
controller,
// parseSSE
@@ -275,7 +198,7 @@ export class QwenApi implements LLMApi {
});
} else {
// @ts-ignore
runTools[index]["function"]["arguments"] += args || "";
runTools[index]["function"]["arguments"] += args;
}
}
@@ -350,79 +273,5 @@ export class QwenApi implements LLMApi {
async models(): Promise<LLMModel[]> {
return [];
}
// 播放 PCM base64 数据
private async PCMBase64ToAudioBuffer(base64Data: string) {
try {
// 解码 base64
const binaryString = atob(base64Data);
const bytes = new Uint8Array(binaryString.length);
for (let i = 0; i < binaryString.length; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
// 转换为 AudioBuffer
const audioBuffer = await this.convertToAudioBuffer(bytes);
return audioBuffer;
} catch (error) {
console.error("播放 PCM 数据失败:", error);
throw error;
}
}
private static getAudioContext(): AudioContext {
if (!QwenApi.audioContext) {
QwenApi.audioContext = new (window.AudioContext ||
window.webkitAudioContext)();
}
return QwenApi.audioContext;
}
// 将 PCM 字节数据转换为 AudioBuffer
private convertToAudioBuffer(pcmData: Uint8Array) {
const audioContext = QwenApi.getAudioContext();
const channels = 1;
const sampleRate = 24000;
return new Promise<AudioBuffer>((resolve, reject) => {
try {
let float32Array;
// 16位 PCM 转换为 32位浮点数
float32Array = this.pcm16ToFloat32(pcmData);
// 创建 AudioBuffer
const audioBuffer = audioContext.createBuffer(
channels,
float32Array.length / channels,
sampleRate,
);
// 复制数据到 AudioBuffer
for (let channel = 0; channel < channels; channel++) {
const channelData = audioBuffer.getChannelData(channel);
for (let i = 0; i < channelData.length; i++) {
channelData[i] = float32Array[i * channels + channel];
}
}
resolve(audioBuffer);
} catch (error) {
reject(error);
}
});
}
// 16位 PCM 转 32位浮点数
private pcm16ToFloat32(pcmData: Uint8Array) {
const length = pcmData.length / 2;
const float32Array = new Float32Array(length);
for (let i = 0; i < length; i++) {
const int16 = (pcmData[i * 2 + 1] << 8) | pcmData[i * 2];
const int16Signed = int16 > 32767 ? int16 - 65536 : int16;
float32Array[i] = int16Signed / 32768;
}
return float32Array;
}
}
export { Alibaba };

View File

@@ -200,6 +200,7 @@ export class ChatGPTApi implements LLMApi {
options.config.model.startsWith("o1") ||
options.config.model.startsWith("o3") ||
options.config.model.startsWith("o4-mini");
const isGpt5 = options.config.model.startsWith("gpt-5");
if (isDalle3) {
const prompt = getMessageTextContent(
options.messages.slice(-1)?.pop() as any,
@@ -230,7 +231,7 @@ export class ChatGPTApi implements LLMApi {
messages,
stream: options.config.stream,
model: modelConfig.model,
temperature: !isO1OrO3 ? modelConfig.temperature : 1,
temperature: (!isO1OrO3 && !isGpt5) ? modelConfig.temperature : 1,
presence_penalty: !isO1OrO3 ? modelConfig.presence_penalty : 0,
frequency_penalty: !isO1OrO3 ? modelConfig.frequency_penalty : 0,
top_p: !isO1OrO3 ? modelConfig.top_p : 1,
@@ -238,7 +239,13 @@ export class ChatGPTApi implements LLMApi {
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};
if (isO1OrO3) {
if (isGpt5) {
// Remove max_tokens if present
delete requestPayload.max_tokens;
// Add max_completion_tokens (or max_completion_tokens if that's what you meant)
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
} else if (isO1OrO3) {
// by default the o1/o3 models will not attempt to produce output that includes markdown formatting
// manually add "Formatting re-enabled" developer message to encourage markdown inclusion in model responses
// (https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/reasoning?tabs=python-secure#markdown-output)
@@ -251,8 +258,9 @@ export class ChatGPTApi implements LLMApi {
requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
}
// add max_tokens to vision model
if (visionModel && !isO1OrO3) {
if (visionModel && !isO1OrO3 && ! isGpt5) {
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
}
}

View File

@@ -48,7 +48,6 @@ import PluginIcon from "../icons/plugin.svg";
import ShortcutkeyIcon from "../icons/shortcutkey.svg";
import McpToolIcon from "../icons/tool.svg";
import HeadphoneIcon from "../icons/headphone.svg";
import NetWorkIcon from "../icons/network.svg";
import {
BOT_HELLO,
ChatMessage,
@@ -76,7 +75,6 @@ import {
useMobileScreen,
selectOrCopy,
showPlugins,
canUseNetWork,
} from "../utils";
import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
@@ -103,6 +101,8 @@ import {
import { useNavigate } from "react-router-dom";
import {
CHAT_PAGE_SIZE,
DEFAULT_TTS_ENGINE,
ModelProvider,
Path,
REQUEST_TIMEOUT_MS,
ServiceProvider,
@@ -512,7 +512,6 @@ export function ChatActions(props: {
// switch themes
const theme = config.theme;
const enableNetWork = session.mask.modelConfig.enableNetWork || false;
function nextTheme() {
const themes = [Theme.Auto, Theme.Light, Theme.Dark];
@@ -522,13 +521,6 @@ export function ChatActions(props: {
config.update((config) => (config.theme = nextTheme));
}
function nextNetWork() {
chatStore.updateTargetSession(session, (session) => {
session.mask.modelConfig.enableNetWork =
!session.mask.modelConfig.enableNetWork;
});
}
// stop all responses
const couldStop = ChatControllerPool.hasPending();
const stopAll = () => ChatControllerPool.stopAll();
@@ -841,16 +833,6 @@ export function ChatActions(props: {
/>
)}
{!isMobileScreen && <MCPAction />}
{canUseNetWork(currentModel) && (
<ChatAction
onClick={nextNetWork}
text={
Locale.Chat.InputActions.NetWork[enableNetWork ? "on" : "off"]
}
icon={<NetWorkIcon />}
/>
)}
</>
<div className={styles["chat-input-actions-end"]}>
{config.realtimeConfig.enable && (
@@ -1304,7 +1286,6 @@ function _Chat() {
const accessStore = useAccessStore();
const [speechStatus, setSpeechStatus] = useState(false);
const [speechLoading, setSpeechLoading] = useState(false);
const [speechCooldown, setSpeechCooldown] = useState(false);
async function openaiSpeech(text: string) {
if (speechStatus) {
@@ -1312,14 +1293,14 @@ function _Chat() {
setSpeechStatus(false);
} else {
var api: ClientApi;
api = new ClientApi(ModelProvider.GPT);
const config = useAppConfig.getState();
api = new ClientApi(config.ttsConfig.modelProvider);
setSpeechLoading(true);
ttsPlayer.init();
let audioBuffer: ArrayBuffer | AudioBuffer;
let audioBuffer: ArrayBuffer;
const { markdownToTxt } = require("markdown-to-txt");
const textContent = markdownToTxt(text);
if (config.ttsConfig.engine === "Edge") {
if (config.ttsConfig.engine !== DEFAULT_TTS_ENGINE) {
const edgeVoiceName = accessStore.edgeVoiceName();
const tts = new MsEdgeTTS();
await tts.setMetadata(
@@ -1327,60 +1308,28 @@ function _Chat() {
OUTPUT_FORMAT.AUDIO_24KHZ_96KBITRATE_MONO_MP3,
);
audioBuffer = await tts.toArrayBuffer(textContent);
playSpeech(audioBuffer);
} else {
if (api.llm.streamSpeech) {
// 使用流式播放,边接收边播放
setSpeechStatus(true);
ttsPlayer.startStreamPlay(() => {
setSpeechStatus(false);
});
try {
for await (const chunk of api.llm.streamSpeech({
model: config.ttsConfig.model,
input: textContent,
voice: config.ttsConfig.voice,
speed: config.ttsConfig.speed,
})) {
ttsPlayer.addToQueue(chunk);
}
ttsPlayer.finishStreamPlay();
} catch (e) {
console.error("[Stream Speech]", e);
showToast(prettyObject(e));
setSpeechStatus(false);
ttsPlayer.stop();
} finally {
setSpeechLoading(false);
}
} else {
audioBuffer = await api.llm.speech({
model: config.ttsConfig.model,
input: textContent,
voice: config.ttsConfig.voice,
speed: config.ttsConfig.speed,
});
playSpeech(audioBuffer);
}
audioBuffer = await api.llm.speech({
model: config.ttsConfig.model,
input: textContent,
voice: config.ttsConfig.voice,
speed: config.ttsConfig.speed,
});
}
setSpeechStatus(true);
ttsPlayer
.play(audioBuffer, () => {
setSpeechStatus(false);
})
.catch((e) => {
console.error("[OpenAI Speech]", e);
showToast(prettyObject(e));
setSpeechStatus(false);
})
.finally(() => setSpeechLoading(false));
}
}
function playSpeech(audioBuffer: ArrayBuffer | AudioBuffer) {
setSpeechStatus(true);
ttsPlayer
.play(audioBuffer, () => {
setSpeechStatus(false);
})
.catch((e) => {
console.error("[OpenAI Speech]", e);
showToast(prettyObject(e));
setSpeechStatus(false);
})
.finally(() => setSpeechLoading(false));
}
const context: RenderMessage[] = useMemo(() => {
return session.mask.hideContext ? [] : session.mask.context.slice();
}, [session.mask.context, session.mask.hideContext]);

View File

@@ -3,9 +3,10 @@ import { TTSConfig, TTSConfigValidator } from "../store";
import Locale from "../locales";
import { ListItem, Select } from "./ui-lib";
import {
ServiceProvider,
TTS_CONFIGS,
TTSEngineType
DEFAULT_TTS_ENGINE,
DEFAULT_TTS_ENGINES,
DEFAULT_TTS_MODELS,
DEFAULT_TTS_VOICES,
} from "../constant";
import { InputRange } from "./input-range";
@@ -47,33 +48,22 @@ export function TTSConfigList(props: {
<Select
value={props.ttsConfig.engine}
onChange={(e) => {
const newEngine = e.currentTarget.value as TTSEngineType;
props.updateConfig(
(config) => {
config.engine = TTSConfigValidator.engine(newEngine);
const engineConfig = TTS_CONFIGS[newEngine];
config.model = TTSConfigValidator.model(
engineConfig.Model[0] || ""
);
config.voice = TTSConfigValidator.voice(
engineConfig.Voices[0] || ""
);
config.modelProvider = TTSConfigValidator.modelProvider(
engineConfig.ModelProvider
);
}
(config) =>
(config.engine = TTSConfigValidator.engine(
e.currentTarget.value,
)),
);
}}
>
{Object.keys(TTS_CONFIGS).map((v, i) => (
{DEFAULT_TTS_ENGINES.map((v, i) => (
<option value={v} key={i}>
{v}-TTS
{v}
</option>
))}
</Select>
</ListItem>
{(props.ttsConfig.engine === ServiceProvider.OpenAI ||
props.ttsConfig.engine === ServiceProvider.Alibaba) && (
{props.ttsConfig.engine === DEFAULT_TTS_ENGINE && (
<>
<ListItem title={Locale.Settings.TTS.Model}>
<Select
@@ -87,7 +77,7 @@ export function TTSConfigList(props: {
);
}}
>
{TTS_CONFIGS[props.ttsConfig.engine]!.Model.map((v, i) => (
{DEFAULT_TTS_MODELS.map((v, i) => (
<option value={v} key={i}>
{v}
</option>
@@ -109,7 +99,7 @@ export function TTSConfigList(props: {
);
}}
>
{TTS_CONFIGS[props.ttsConfig.engine]!.Voices.map((v, i) => (
{DEFAULT_TTS_VOICES.map((v, i) => (
<option value={v} key={i}>
{v}
</option>

View File

@@ -232,7 +232,6 @@ export const Alibaba = {
}
return `v1/services/aigc/text-generation/generation`;
},
SpeechPath: "v1/services/aigc/multimodal-generation/generation",
};
export const Tencent = {
@@ -462,49 +461,19 @@ export const KnowledgeCutOffDate: Record<string, string> = {
"deepseek-coder": "2024-07",
};
export const DEFAULT_TTS_ENGINE = ServiceProvider.OpenAI;
export const DEFAULT_TTS_ENGINE = "OpenAI-TTS";
export const DEFAULT_TTS_ENGINES = ["OpenAI-TTS", "Edge-TTS"];
export const DEFAULT_TTS_MODEL = "tts-1";
export const DEFAULT_TTS_VOICE = "alloy";
export const OPENAI_TTS = {
Provider: ServiceProvider.OpenAI,
ModelProvider: ModelProvider.GPT,
Model: ["tts-1", "tts-1-hd"],
Voices: ["alloy", "echo", "fable", "onyx", "nova", "shimmer"],
} as const;
export const ALIBABA_TTS = {
Provider: ServiceProvider.Alibaba,
ModelProvider: ModelProvider.Qwen,
Model: ["qwen-tts", "qwen-tts-latest"],
Voices: ["Chelsie", "Cherry", "Ethan", "Serena", "Dylan", "Jada", "Sunny"],
} as const;
export const EDGE_TTS = {
Provider: "Edge" as const,
ModelProvider: ModelProvider.GPT,
Model: [] as string[],
Voices: [] as string[],
} as const;
export type TTSEngineType = ServiceProvider.OpenAI | ServiceProvider.Alibaba | "Edge";
export const DEFAULT_TTS_ENGINES = [ServiceProvider.OpenAI, ServiceProvider.Alibaba, "Edge"] as const;
export const DEFAULT_TTS_MODELS = [...OPENAI_TTS.Model, ...ALIBABA_TTS.Model] as const;
export const DEFAULT_TTS_VOICES = [...OPENAI_TTS.Voices, ...ALIBABA_TTS.Voices] as const;
interface TTSConfigItem {
Provider: ServiceProvider | "Edge";
Model: readonly string[];
Voices: readonly string[];
ModelProvider: ModelProvider;
}
export const TTS_CONFIGS: Record<TTSEngineType, TTSConfigItem> = {
[ServiceProvider.OpenAI]: OPENAI_TTS,
[ServiceProvider.Alibaba]: ALIBABA_TTS,
Edge: EDGE_TTS,
} as const;
export const DEFAULT_TTS_MODELS = ["tts-1", "tts-1-hd"];
export const DEFAULT_TTS_VOICES = [
"alloy",
"echo",
"fable",
"onyx",
"nova",
"shimmer",
];
export const VISION_MODEL_REGEXES = [
/vision/,
@@ -524,6 +493,7 @@ export const VISION_MODEL_REGEXES = [
/o3/,
/o4-mini/,
/grok-4/i,
/gpt-5/
];
export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
@@ -548,6 +518,11 @@ const openaiModels = [
"gpt-4.1-nano-2025-04-14",
"gpt-4.5-preview",
"gpt-4.5-preview-2025-02-27",
"gpt-5-chat",
"gpt-5-mini",
"gpt-5-nano",
"gpt-5",
"gpt-5-chat-2025-01-01-preview",
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
@@ -698,6 +673,11 @@ const xAIModes = [
"grok-3-beta",
"grok-3",
"grok-3-latest",
"grok-4",
"grok-4-0709",
"grok-4-fast-non-reasoning",
"grok-4-fast-reasoning",
"grok-code-fast-1",
];
const chatglmModels = [
@@ -951,4 +931,3 @@ export const DEFAULT_GA_ID = "G-89WN60ZK2E";
export const SAAS_CHAT_URL = "https://nextchat.club";
export const SAAS_CHAT_UTM_URL = "https://nextchat.club?utm=github";

View File

@@ -1 +0,0 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1754388361314" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1734" xmlns:xlink="http://www.w3.org/1999/xlink" width="16" height="16"><path d="M522.666667 42.666667c3.776 0 7.530667 0.170667 11.242666 0.490666C782.954667 54.613333 981.333333 260.138667 981.333333 512c0 251.861333-198.4 457.386667-447.424 468.821333-3.712 0.341333-7.466667 0.512-11.242666 0.512l-3.285334-0.064C516.906667 981.333333 514.474667 981.333333 512 981.333333 252.8 981.333333 42.666667 771.2 42.666667 512S252.8 42.666667 512 42.666667l7.658667 0.042666L522.666667 42.666667zM490.666667 533.333333h-149.056c4.842667 191.082667 74.069333 342.08 149.056 376.576V533.333333z m213.056 0H554.666667v376.576c74.986667-34.517333 144.213333-185.514667 149.056-376.554666z m-426.133334 0H107.221333c8.746667 168.853333 120.853333 310.4 274.261334 362.517334-60.16-81.109333-100.394667-212.650667-103.893334-362.496z m639.189334 0h-149.034667c-3.349333 143.104-40.170667 269.504-95.872 351.253334C810.048 825.216 908.586667 691.221333 916.778667 533.333333zM381.482667 128.128c-146.986667 50.069333-255.936 181.909333-272.597334 341.226667h169.450667c6.634667-140.970667 45.866667-263.978667 103.146667-341.226667zM342.4 469.333333H490.666667V114.090667C418.496 147.285333 351.637333 288.426667 342.4 469.333333zM554.666667 114.090667L554.666667 469.333333h148.266666C693.674667 288.448 626.837333 147.306667 554.666667 114.090667z m117.184 25.322666l1.834666 2.730667c51.904 77.674667 87.04 194.474667 93.290667 327.189333h148.117333c-15.530667-148.565333-111.317333-273.237333-243.242666-329.92z" fill="#333333" p-id="1735"></path></svg>

Before

Width:  |  Height:  |  Size: 1.8 KiB

View File

@@ -72,10 +72,6 @@ const ar: PartialLocaleType = {
light: "الوضع الفاتح",
dark: "الوضع الداكن",
},
NetWork: {
on: "تفعيل البحث عبر الإنترنت",
off: "إيقاف البحث عبر الإنترنت",
},
Prompt: "الأوامر السريعة",
Masks: "جميع الأقنعة",
Clear: "مسح الدردشة",

View File

@@ -72,10 +72,6 @@ const bn: PartialLocaleType = {
light: "আলোর মোড",
dark: "অন্ধকার মোড",
},
NetWork: {
on: "ওয়েব অনুসন্ধান সক্রিয় করুন",
off: "ওয়েব অনুসন্ধান নিষ্ক্রিয় করুন",
},
Prompt: "সংক্ষিপ্ত নির্দেশনা",
Masks: "সমস্ত মাস্ক",
Clear: "চ্যাট পরিষ্কার করুন",

View File

@@ -76,10 +76,6 @@ const cn = {
light: "亮色模式",
dark: "深色模式",
},
NetWork: {
on: "开启联网搜索",
off: "关闭联网搜索",
},
Prompt: "快捷指令",
Masks: "所有面具",
Clear: "清除聊天",

View File

@@ -72,10 +72,6 @@ const cs: PartialLocaleType = {
light: "Světelný režim",
dark: "Tmavý režim",
},
NetWork: {
on: "Povolit webové vyhledávání",
off: "Zakázat webové vyhledávání",
},
Prompt: "Rychlé příkazy",
Masks: "Všechny masky",
Clear: "Vymazat konverzaci",

View File

@@ -74,10 +74,6 @@ const da: PartialLocaleType = {
light: "Lyst tema",
dark: "Mørkt tema",
},
NetWork: {
on: "Aktivér web-søgning",
off: "Deaktivér web-søgning",
},
Prompt: "Prompts",
Masks: "Personaer",
Clear: "Ryd kontekst",

View File

@@ -73,10 +73,6 @@ const de: PartialLocaleType = {
light: "Helles Thema",
dark: "Dunkles Thema",
},
NetWork: {
on: "Web-Suche aktivieren",
off: "Web-Suche deaktivieren",
},
Prompt: "Schnellbefehle",
Masks: "Alle Masken",
Clear: "Chat löschen",
@@ -441,8 +437,7 @@ const de: PartialLocaleType = {
AI302: {
ApiKey: {
Title: "Schnittstellenschlüssel",
SubTitle:
"Verwenden Sie einen benutzerdefinierten 302.AI API-Schlüssel",
SubTitle: "Verwenden Sie einen benutzerdefinierten 302.AI API-Schlüssel",
Placeholder: "302.AI API-Schlüssel",
},
Endpoint: {

View File

@@ -77,10 +77,6 @@ const en: LocaleType = {
light: "Light Theme",
dark: "Dark Theme",
},
NetWork: {
on: "Enable Web Search",
off: "Disable Web Search",
},
Prompt: "Prompts",
Masks: "Masks",
Clear: "Clear Context",

View File

@@ -74,10 +74,6 @@ const es: PartialLocaleType = {
light: "Modo claro",
dark: "Modo oscuro",
},
NetWork: {
on: "Habilitar búsqueda web",
off: "Deshabilitar búsqueda web",
},
Prompt: "Comandos rápidos",
Masks: "Todas las máscaras",
Clear: "Limpiar chat",

View File

@@ -73,10 +73,6 @@ const fr: PartialLocaleType = {
light: "Mode clair",
dark: "Mode sombre",
},
NetWork: {
on: "Activer la recherche web",
off: "Désactiver la recherche web",
},
Prompt: "Commandes rapides",
Masks: "Tous les masques",
Clear: "Effacer la discussion",

View File

@@ -72,10 +72,6 @@ const id: PartialLocaleType = {
light: "Mode Terang",
dark: "Mode Gelap",
},
NetWork: {
on: "Aktifkan pencarian web",
off: "Nonaktifkan pencarian web",
},
Prompt: "Perintah Cepat",
Masks: "Semua Masker",
Clear: "Hapus Obrolan",

View File

@@ -73,10 +73,6 @@ const it: PartialLocaleType = {
light: "Tema chiaro",
dark: "Tema scuro",
},
NetWork: {
on: "Abilita ricerca web",
off: "Disabilita ricerca web",
},
Prompt: "Comandi rapidi",
Masks: "Tutte le maschere",
Clear: "Pulisci chat",

View File

@@ -72,10 +72,6 @@ const jp: PartialLocaleType = {
light: "ライトモード",
dark: "ダークモード",
},
NetWork: {
on: "ウェブ検索を有効化",
off: "ウェブ検索を無効化",
},
Prompt: "クイックコマンド",
Masks: "すべてのマスク",
Clear: "チャットをクリア",

View File

@@ -76,10 +76,6 @@ const ko: PartialLocaleType = {
light: "라이트 모드",
dark: "다크 모드",
},
NetWork: {
on: "웹 검색 활성화",
off: "웹 검색 비활성화",
},
Prompt: "빠른 명령",
Masks: "모든 마스크",
Clear: "채팅 지우기",

View File

@@ -74,10 +74,6 @@ const no: PartialLocaleType = {
light: "Lyst tema",
dark: "Mørkt tema",
},
NetWork: {
on: "Aktiver web-søk",
off: "Deaktiver web-søk",
},
Prompt: "Hurtigkommando",
Masks: "Alle masker",
Clear: "Rydd samtale",

View File

@@ -72,10 +72,6 @@ const pt: PartialLocaleType = {
light: "Tema Claro",
dark: "Tema Escuro",
},
NetWork: {
on: "Ativar pesquisa web",
off: "Desativar pesquisa web",
},
Prompt: "Prompts",
Masks: "Máscaras",
Clear: "Limpar Contexto",

View File

@@ -72,10 +72,6 @@ const ru: PartialLocaleType = {
light: "Светлая тема",
dark: "Темная тема",
},
NetWork: {
on: "Включить веб-поиск",
off: "Отключить веб-поиск",
},
Prompt: "Быстрая команда",
Masks: "Все маски",
Clear: "Очистить чат",

View File

@@ -73,10 +73,6 @@ const sk: PartialLocaleType = {
light: "Svetlý motív",
dark: "Tmavý motív",
},
NetWork: {
on: "Povoliť webové vyhľadávanie",
off: "Zakázať webové vyhľadávanie",
},
Prompt: "Výzvy",
Masks: "Masky",
Clear: "Vymazať kontext",

View File

@@ -72,10 +72,6 @@ const tr: PartialLocaleType = {
light: "Açık mod",
dark: "Koyu mod",
},
NetWork: {
on: "Web aramasını etkinleştir",
off: "Web aramasını devre dışı bırak",
},
Prompt: "Kısayol komutu",
Masks: "Tüm maskeler",
Clear: "Sohbeti temizle",

View File

@@ -72,10 +72,6 @@ const tw = {
light: "亮色模式",
dark: "深色模式",
},
NetWork: {
on: "開啟網路搜尋",
off: "關閉網路搜尋",
},
Prompt: "快捷指令",
Masks: "所有角色範本",
Clear: "清除聊天",

View File

@@ -72,10 +72,6 @@ const vi: PartialLocaleType = {
light: "Chế độ sáng",
dark: "Chế độ tối",
},
NetWork: {
on: "Bật tìm kiếm web",
off: "Tắt tìm kiếm web",
},
Prompt: "Lệnh tắt",
Masks: "Tất cả mặt nạ",
Clear: "Xóa cuộc trò chuyện",

View File

@@ -6,14 +6,13 @@ import {
DEFAULT_MODELS,
DEFAULT_SIDEBAR_WIDTH,
DEFAULT_TTS_ENGINE,
DEFAULT_TTS_ENGINES,
DEFAULT_TTS_MODEL,
DEFAULT_TTS_MODELS,
DEFAULT_TTS_VOICE,
DEFAULT_TTS_VOICES,
StoreKey,
ServiceProvider,
TTSEngineType,
ModelProvider,
} from "../constant";
import { createPersistStore } from "../utils/store";
import type { Voice } from "rt-client";
@@ -21,6 +20,7 @@ import type { Voice } from "rt-client";
export type ModelType = (typeof DEFAULT_MODELS)[number]["name"];
export type TTSModelType = (typeof DEFAULT_TTS_MODELS)[number];
export type TTSVoiceType = (typeof DEFAULT_TTS_VOICES)[number];
export type TTSEngineType = (typeof DEFAULT_TTS_ENGINES)[number];
export enum SubmitKey {
Enter = "Enter",
@@ -81,14 +81,12 @@ export const DEFAULT_CONFIG = {
size: "1024x1024" as ModelSize,
quality: "standard" as DalleQuality,
style: "vivid" as DalleStyle,
enableNetWork: false,
},
ttsConfig: {
enable: false,
autoplay: false,
modelProvider: ModelProvider.GPT,
engine: DEFAULT_TTS_ENGINE as TTSEngineType,
engine: DEFAULT_TTS_ENGINE,
model: DEFAULT_TTS_MODEL,
voice: DEFAULT_TTS_VOICE,
speed: 1.0,
@@ -128,21 +126,18 @@ export function limitNumber(
}
export const TTSConfigValidator = {
engine(x: string | TTSEngineType): TTSEngineType {
engine(x: string) {
return x as TTSEngineType;
},
model(x: string): TTSModelType {
model(x: string) {
return x as TTSModelType;
},
voice(x: string): TTSVoiceType {
voice(x: string) {
return x as TTSVoiceType;
},
speed(x: number): number {
speed(x: number) {
return limitNumber(x, 0.25, 4.0, 1.0);
},
modelProvider(x: string): ModelProvider {
return x as ModelProvider;
},
};
export const ModalConfigValidator = {

View File

@@ -296,15 +296,6 @@ export function isDalle3(model: string) {
return "dall-e-3" === model;
}
export function canUseNetWork(model: string) {
return (
model.includes("qwen-max") ||
model.includes("qwen-plus") ||
model.includes("qwen-turbo") ||
model.includes("qwq")
);
}
export function getTimeoutMSByModel(model: string) {
model = model.toLowerCase();
if (
@@ -356,12 +347,6 @@ export function showPlugins(provider: ServiceProvider, model: string) {
if (provider == ServiceProvider.Google && !model.includes("vision")) {
return true;
}
if (
provider == ServiceProvider.Alibaba &&
(model.includes("qwen") || model.includes("deepseek"))
) {
return true;
}
return false;
}

View File

@@ -1,48 +1,25 @@
type TTSPlayer = {
init: () => void;
play: (
audioBuffer: ArrayBuffer | AudioBuffer,
onended: () => void | null,
) => Promise<void>;
playQueue: (
audioBuffers: (ArrayBuffer | AudioBuffer)[],
onended: () => void | null,
) => Promise<void>;
addToQueue: (audioBuffer: ArrayBuffer | AudioBuffer) => void;
startStreamPlay: (onended: () => void | null) => void;
finishStreamPlay: () => void;
play: (audioBuffer: ArrayBuffer, onended: () => void | null) => Promise<void>;
stop: () => void;
};
export function createTTSPlayer(): TTSPlayer {
let audioContext: AudioContext | null = null;
let audioBufferSourceNode: AudioBufferSourceNode | null = null;
let isPlaying = false;
let playQueue: (ArrayBuffer | AudioBuffer)[] = [];
let currentOnended: (() => void | null) | null = null;
let isStreamMode = false;
let streamFinished = false;
const init = () => {
console.log("[TTSPlayer] init");
audioContext = new (window.AudioContext || window.webkitAudioContext)();
audioContext.suspend();
};
const play = async (
audioBuffer: ArrayBuffer | AudioBuffer,
onended: () => void | null,
) => {
const play = async (audioBuffer: ArrayBuffer, onended: () => void | null) => {
if (audioBufferSourceNode) {
audioBufferSourceNode.stop();
audioBufferSourceNode.disconnect();
}
let buffer: AudioBuffer;
if (audioBuffer instanceof AudioBuffer) {
buffer = audioBuffer;
} else {
buffer = await audioContext!.decodeAudioData(audioBuffer);
}
const buffer = await audioContext!.decodeAudioData(audioBuffer);
audioBufferSourceNode = audioContext!.createBufferSource();
audioBufferSourceNode.buffer = buffer;
audioBufferSourceNode.connect(audioContext!.destination);
@@ -52,109 +29,17 @@ export function createTTSPlayer(): TTSPlayer {
audioBufferSourceNode.onended = onended;
};
const playNext = async () => {
if (playQueue.length === 0) {
// 在流模式下,如果队列为空但流还没结束,等待
if (isStreamMode && !streamFinished) {
setTimeout(() => playNext(), 100);
return;
}
isPlaying = false;
isStreamMode = false;
streamFinished = false;
if (currentOnended) {
currentOnended();
currentOnended = null;
}
return;
}
const nextBuffer = playQueue.shift()!;
let buffer: AudioBuffer;
if (nextBuffer instanceof AudioBuffer) {
buffer = nextBuffer;
} else {
buffer = await audioContext!.decodeAudioData(nextBuffer);
}
if (audioBufferSourceNode) {
audioBufferSourceNode.stop();
audioBufferSourceNode.disconnect();
}
audioBufferSourceNode = audioContext!.createBufferSource();
audioBufferSourceNode.buffer = buffer;
audioBufferSourceNode.connect(audioContext!.destination);
audioBufferSourceNode.onended = () => {
playNext();
};
await audioContext!.resume();
audioBufferSourceNode.start();
};
const playQueueMethod = async (
audioBuffers: (ArrayBuffer | AudioBuffer)[],
onended: () => void | null,
) => {
playQueue = [...audioBuffers];
currentOnended = onended;
if (!isPlaying) {
isPlaying = true;
await playNext();
}
};
const addToQueue = (audioBuffer: ArrayBuffer | AudioBuffer) => {
if (streamFinished) {
return;
}
playQueue.push(audioBuffer);
};
const startStreamPlay = (onended: () => void | null) => {
isStreamMode = true;
streamFinished = false;
playQueue = [];
currentOnended = onended;
if (!isPlaying) {
isPlaying = true;
playNext();
}
};
const finishStreamPlay = () => {
streamFinished = true;
};
const stop = async () => {
console.log("[TTSPlayer] stop");
playQueue = [];
isPlaying = false;
isStreamMode = false;
streamFinished = true;
currentOnended = null;
const stop = () => {
if (audioBufferSourceNode) {
audioBufferSourceNode.stop();
audioBufferSourceNode.disconnect();
audioBufferSourceNode = null;
}
if (audioContext) {
await audioContext.close();
audioContext.close();
audioContext = null;
}
};
return {
init,
play,
playQueue: playQueueMethod,
addToQueue,
startStreamPlay,
finishStreamPlay,
stop,
};
return { init, play, stop };
}

View File

@@ -60,7 +60,7 @@
},
"devDependencies": {
"@tauri-apps/api": "^2.1.1",
"@tauri-apps/cli": "1.5.11",
"@tauri-apps/cli": "2.9.1",
"@testing-library/dom": "^10.4.0",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^16.1.0",
@@ -93,9 +93,5 @@
"resolutions": {
"lint-staged/yaml": "^2.2.2"
},
"packageManager": "yarn@1.22.19",
"volta": {
"node": "20.19.4",
"yarn": "1.22.22"
}
"packageManager": "yarn@1.22.19"
}

View File

@@ -2,16 +2,16 @@
{
"id": "dalle3",
"name": "Dalle3",
"schema": "https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/dalle/openapi.json"
"schema": "https://ghp.ci/https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/dalle/openapi.json"
},
{
"id": "arxivsearch",
"name": "ArxivSearch",
"schema": "https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/arxivsearch/openapi.json"
"schema": "https://ghp.ci/https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/arxivsearch/openapi.json"
},
{
"id": "duckduckgolite",
"name": "DuckDuckGoLiteSearch",
"schema": "https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/duckduckgolite/openapi.json"
"schema": "https://ghp.ci/https://raw.githubusercontent.com/ChatGPTNextWeb/NextChat-Awesome-Plugins/main/plugins/duckduckgolite/openapi.json"
}
]

114
yarn.lock
View File

@@ -2043,71 +2043,77 @@
resolved "https://registry.yarnpkg.com/@tauri-apps/api/-/api-2.1.1.tgz#77d4ddb683d31072de4e6a47c8613d9db011652b"
integrity sha512-fzUfFFKo4lknXGJq8qrCidkUcKcH2UHhfaaCNt4GzgzGaW2iS26uFOg4tS3H4P8D6ZEeUxtiD5z0nwFF0UN30A==
"@tauri-apps/cli-darwin-arm64@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-1.5.11.tgz#a831f98f685148e46e8050dbdddbf4bcdda9ddc6"
integrity sha512-2NLSglDb5VfvTbMtmOKWyD+oaL/e8Z/ZZGovHtUFyUSFRabdXc6cZOlcD1BhFvYkHqm+TqGaz5qtPR5UbqDs8A==
"@tauri-apps/cli-darwin-arm64@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.9.1.tgz#60bbf0a34098c1feb865d7483433c6e4a0208142"
integrity sha512-sdwhtsE/6njD0AjgfYEj1JyxZH4SBmCJSXpRm6Ph5fQeuZD6MyjzjdVOrrtFguyREVQ7xn0Ujkwvbo01ULthNg==
"@tauri-apps/cli-darwin-x64@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-1.5.11.tgz#0afae17fe1e84b9699a6b9824cd83b60c6ebfa59"
integrity sha512-/RQllHiJRH2fJOCudtZlaUIjofkHzP3zZgxi71ZUm7Fy80smU5TDfwpwOvB0wSVh0g/ciDjMArCSTo0MRvL+ag==
"@tauri-apps/cli-darwin-x64@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.9.1.tgz#e0bdb2e29201dd0b46bffa61cc7dccb5f0c6ae29"
integrity sha512-c86g+67wTdI4TUCD7CaSd/13+oYuLQxVST4ZNJ5C+6i1kdnU3Us1L68N9MvbDLDQGJc9eo0pvuK6sCWkee+BzA==
"@tauri-apps/cli-linux-arm-gnueabihf@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-1.5.11.tgz#c46166d7f6c1022105a13d530b1d1336f628981f"
integrity sha512-IlBuBPKmMm+a5LLUEK6a21UGr9ZYd6zKuKLq6IGM4tVweQa8Sf2kP2Nqs74dMGIUrLmMs0vuqdURpykQg+z4NQ==
"@tauri-apps/cli-linux-arm-gnueabihf@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.9.1.tgz#15ad573c32b8941d10c2ebd2ecfe399ea85ed023"
integrity sha512-IrB3gFQmueQKJjjisOcMktW/Gh6gxgqYO419doA3YZ7yIV5rbE8ZW52Q3I4AO+SlFEyVYer5kpi066p0JBlLGw==
"@tauri-apps/cli-linux-arm64-gnu@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-1.5.11.tgz#fd5c539a03371e0ab6cd00563dced1610ceb8943"
integrity sha512-w+k1bNHCU/GbmXshtAhyTwqosThUDmCEFLU4Zkin1vl2fuAtQry2RN7thfcJFepblUGL/J7yh3Q/0+BCjtspKQ==
"@tauri-apps/cli-linux-arm64-gnu@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.9.1.tgz#f7a5dd6f46c5a2b52f20a61308cd61dd04694435"
integrity sha512-Ke7TyXvu6HbWSkmVkFbbH19D3cLsd117YtXP/u9NIvSpYwKeFtnbpirrIUfPm44Q+PZFZ2Hvg8X9qoUiAK0zKw==
"@tauri-apps/cli-linux-arm64-musl@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-1.5.11.tgz#bf7f940c3aca981d7c240857a86568d5b6e8310f"
integrity sha512-PN6/dl+OfYQ/qrAy4HRAfksJ2AyWQYn2IA/2Wwpaa7SDRz2+hzwTQkvajuvy0sQ5L2WCG7ymFYRYMbpC6Hk9Pg==
"@tauri-apps/cli-linux-arm64-musl@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.9.1.tgz#011caad0d6222f5c18221d7b413e1b4750eef5f4"
integrity sha512-sGvy75sv55oeMulR5ArwPD28DsDQxqTzLhXCrpU9/nbFg/JImmI7k994YE9fr3V0qE3Cjk5gjLldRNv7I9sjwQ==
"@tauri-apps/cli-linux-x64-gnu@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-1.5.11.tgz#17323105e3863a3f36d51771e642e489037ba59b"
integrity sha512-MTVXLi89Nj7Apcvjezw92m7ZqIDKT5SFKZtVPCg6RoLUBTzko/BQoXYIRWmdoz2pgkHDUHgO2OMJ8oKzzddXbw==
"@tauri-apps/cli-linux-riscv64-gnu@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-riscv64-gnu/-/cli-linux-riscv64-gnu-2.9.1.tgz#0328624bf798d653c49bc0455d1cc111e631846b"
integrity sha512-tEKbJydV3BdIxpAx8aGHW6VDg1xW4LlQuRD/QeFZdZNTreHJpMbJEcdvAcI+Hg6vgQpVpaoEldR9W4F6dYSLqQ==
"@tauri-apps/cli-linux-x64-musl@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-1.5.11.tgz#83e22026771ec8ab094922ab114a7385532aa16c"
integrity sha512-kwzAjqFpz7rvTs7WGZLy/a5nS5t15QKr3E9FG95MNF0exTl3d29YoAUAe1Mn0mOSrTJ9Z+vYYAcI/QdcsGBP+w==
"@tauri-apps/cli-linux-x64-gnu@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.9.1.tgz#25e0f95615ddede953af4d29ad1c0d1f7c160c3f"
integrity sha512-mg5msXHagtHpyCVWgI01M26JeSrgE/otWyGdYcuTwyRYZYEJRTbcNt7hscOkdNlPBe7isScW7PVKbxmAjJJl4g==
"@tauri-apps/cli-win32-arm64-msvc@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-1.5.11.tgz#817874d230fdb09e7211013006a9a22f66ace573"
integrity sha512-L+5NZ/rHrSUrMxjj6YpFYCXp6wHnq8c8SfDTBOX8dO8x+5283/vftb4vvuGIsLS4UwUFXFnLt3XQr44n84E67Q==
"@tauri-apps/cli-linux-x64-musl@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.9.1.tgz#ccb819101a225947f42ebb4f1f6902514aa902e3"
integrity sha512-lFZEXkpDreUe3zKilvnMsrnKP9gwQudaEjDnOz/GMzbzNceIuPfFZz0cR/ky1Aoq4eSvZonPKHhROq4owz4fzg==
"@tauri-apps/cli-win32-ia32-msvc@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-1.5.11.tgz#dee1a00eb9e216415d9d6ab9386c35849613c560"
integrity sha512-oVlD9IVewrY0lZzTdb71kNXkjdgMqFq+ohb67YsJb4Rf7o8A9DTlFds1XLCe3joqLMm4M+gvBKD7YnGIdxQ9vA==
"@tauri-apps/cli-win32-arm64-msvc@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.9.1.tgz#c0be0555d2b3686d4b3ea702039d41ed6017e4eb"
integrity sha512-ejc5RAp/Lm1Aj0EQHaT+Wdt5PHfdgQV5hIDV00MV6HNbIb5W4ZUFxMDaRkAg65gl9MvY2fH396riePW3RoKXDw==
"@tauri-apps/cli-win32-x64-msvc@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-1.5.11.tgz#c003ce00b36d056a8b08e0ecf4633c2bba00c497"
integrity sha512-1CexcqUFCis5ypUIMOKllxUBrna09McbftWENgvVXMfA+SP+yPDPAVb8fIvUcdTIwR/yHJwcIucmTB4anww4vg==
"@tauri-apps/cli-win32-ia32-msvc@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.9.1.tgz#b637cd436f129ef3ff6e83a4095c37b85f308a27"
integrity sha512-fSATtJDc0fNjVB6ystyi8NbwhNFk8i8E05h6KrsC8Fio5eaJIJvPCbC9pdrPl6kkxN1X7fj25ErBbgfqgcK8Fg==
"@tauri-apps/cli@1.5.11":
version "1.5.11"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-1.5.11.tgz#02beb559b3b55836c90a1ba9121b3fc50e3760cd"
integrity sha512-B475D7phZrq5sZ3kDABH4g2mEoUIHtnIO+r4ZGAAfsjMbZCwXxR/jlMGTEL+VO3YzjpF7gQe38IzB4vLBbVppw==
"@tauri-apps/cli-win32-x64-msvc@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.9.1.tgz#d7c8512dd70f9aa8203f2011322542ec11f76b66"
integrity sha512-/JHlOzpUDhjBOO9w167bcYxfJbcMQv7ykS/Y07xjtcga8np0rzUzVGWYmLMH7orKcDMC7wjhheEW1x8cbGma/Q==
"@tauri-apps/cli@2.9.1":
version "2.9.1"
resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-2.9.1.tgz#cdc1cc6a005fef017b2a8f1a96d32537afc74579"
integrity sha512-kKi2/WWsNXKoMdatBl4xrT7e1Ce27JvsetBVfWuIb6D3ep/Y0WO5SIr70yarXOSWam8NyDur4ipzjZkg6m7VDg==
optionalDependencies:
"@tauri-apps/cli-darwin-arm64" "1.5.11"
"@tauri-apps/cli-darwin-x64" "1.5.11"
"@tauri-apps/cli-linux-arm-gnueabihf" "1.5.11"
"@tauri-apps/cli-linux-arm64-gnu" "1.5.11"
"@tauri-apps/cli-linux-arm64-musl" "1.5.11"
"@tauri-apps/cli-linux-x64-gnu" "1.5.11"
"@tauri-apps/cli-linux-x64-musl" "1.5.11"
"@tauri-apps/cli-win32-arm64-msvc" "1.5.11"
"@tauri-apps/cli-win32-ia32-msvc" "1.5.11"
"@tauri-apps/cli-win32-x64-msvc" "1.5.11"
"@tauri-apps/cli-darwin-arm64" "2.9.1"
"@tauri-apps/cli-darwin-x64" "2.9.1"
"@tauri-apps/cli-linux-arm-gnueabihf" "2.9.1"
"@tauri-apps/cli-linux-arm64-gnu" "2.9.1"
"@tauri-apps/cli-linux-arm64-musl" "2.9.1"
"@tauri-apps/cli-linux-riscv64-gnu" "2.9.1"
"@tauri-apps/cli-linux-x64-gnu" "2.9.1"
"@tauri-apps/cli-linux-x64-musl" "2.9.1"
"@tauri-apps/cli-win32-arm64-msvc" "2.9.1"
"@tauri-apps/cli-win32-ia32-msvc" "2.9.1"
"@tauri-apps/cli-win32-x64-msvc" "2.9.1"
"@testing-library/dom@^10.4.0":
version "10.4.0"