mirror of
				https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
				synced 2025-11-04 00:03:46 +08:00 
			
		
		
		
	Compare commits
	
		
			28 Commits
		
	
	
		
			feat/markd
			...
			72c76bbc57
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					72c76bbc57 | ||
| 
						 | 
					814fd2786e | ||
| 
						 | 
					29dbffac3e | ||
| 
						 | 
					92532b2c74 | ||
| 
						 | 
					4f16ca1320 | ||
| 
						 | 
					4d43fac12a | ||
| 
						 | 
					d3e164f23e | ||
| 
						 | 
					31129ba213 | ||
| 
						 | 
					673f907ea4 | ||
| 
						 | 
					fb3af2a08f | ||
| 
						 | 
					eb193ac0ff | ||
| 
						 | 
					c30ddfbb07 | ||
| 
						 | 
					a2f0149786 | ||
| 
						 | 
					03d36f96ed | ||
| 
						 | 
					705dffc664 | ||
| 
						 | 
					02f7e6de98 | ||
| 
						 | 
					843dc52efa | ||
| 
						 | 
					3809375694 | ||
| 
						 | 
					1b0de25986 | ||
| 
						 | 
					865c45dd29 | ||
| 
						 | 
					1f5d8e6d9c | ||
| 
						 | 
					c9ef6d58ed | ||
| 
						 | 
					2d7229d2b8 | ||
| 
						 | 
					11b37c15bd | ||
| 
						 | 
					1d0038f17d | ||
| 
						 | 
					619fa519c0 | ||
| 
						 | 
					c261ebc82c | ||
| 
						 | 
					f7c747c65f | 
@@ -81,3 +81,9 @@ SILICONFLOW_API_KEY=
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
### siliconflow Api url (optional)
 | 
					### siliconflow Api url (optional)
 | 
				
			||||||
SILICONFLOW_URL=
 | 
					SILICONFLOW_URL=
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### 302.AI Api key (optional)
 | 
				
			||||||
 | 
					AI302_API_KEY=
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					### 302.AI Api url (optional)
 | 
				
			||||||
 | 
					AI302_URL=
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										27
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										27
									
								
								README.md
									
									
									
									
									
								
							@@ -6,7 +6,6 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					 | 
				
			||||||
<h1 align="center">NextChat</h1>
 | 
					<h1 align="center">NextChat</h1>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
English / [简体中文](./README_CN.md)
 | 
					English / [简体中文](./README_CN.md)
 | 
				
			||||||
@@ -22,12 +21,12 @@ English / [简体中文](./README_CN.md)
 | 
				
			|||||||
[![MacOS][MacOS-image]][download-url]
 | 
					[![MacOS][MacOS-image]][download-url]
 | 
				
			||||||
[![Linux][Linux-image]][download-url]
 | 
					[![Linux][Linux-image]][download-url]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[NextChatAI](https://nextchat.club?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev)
 | 
					[NextChatAI](https://nextchat.club?utm_source=readme) / [iOS APP](https://apps.apple.com/us/app/nextchat-ai/id6743085599) / [Web App Demo](https://app.nextchat.club) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Enterprise Edition](#enterprise-edition) 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[saas-url]: https://nextchat.club?utm_source=readme
 | 
					[saas-url]: https://nextchat.club?utm_source=readme
 | 
				
			||||||
[saas-image]: https://img.shields.io/badge/NextChat-Saas-green?logo=microsoftedge
 | 
					[saas-image]: https://img.shields.io/badge/NextChat-Saas-green?logo=microsoftedge
 | 
				
			||||||
[web-url]: https://app.nextchat.dev/
 | 
					[web-url]: https://app.nextchat.club/
 | 
				
			||||||
[download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases
 | 
					[download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases
 | 
				
			||||||
[Web-image]: https://img.shields.io/badge/Web-PWA-orange?logo=microsoftedge
 | 
					[Web-image]: https://img.shields.io/badge/Web-PWA-orange?logo=microsoftedge
 | 
				
			||||||
[Windows-image]: https://img.shields.io/badge/-Windows-blue?logo=windows
 | 
					[Windows-image]: https://img.shields.io/badge/-Windows-blue?logo=windows
 | 
				
			||||||
@@ -36,17 +35,27 @@ English / [简体中文](./README_CN.md)
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
[<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://vercel.com/button" alt="Deploy on Vercel" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat)  [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/ChatGPTNextWeb/NextChat) 
 | 
					[<img src="https://zeabur.com/button.svg" alt="Deploy on Zeabur" height="30">](https://zeabur.com/templates/ZBUEFA) [<img src="https://vercel.com/button" alt="Deploy on Vercel" height="30">](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2FChatGPTNextWeb%2FChatGPT-Next-Web&env=OPENAI_API_KEY&env=CODE&project-name=nextchat&repository-name=NextChat)  [<img src="https://gitpod.io/button/open-in-gitpod.svg" alt="Open in Gitpod" height="30">](https://gitpod.io/#https://github.com/ChatGPTNextWeb/NextChat) 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
[<img src="https://github.com/user-attachments/assets/903482d4-3e87-4134-9af1-f2588fa90659" height="50" width="" >](https://monica.im/?utm=nxcrp)
 | 
					[<img src="https://github.com/user-attachments/assets/903482d4-3e87-4134-9af1-f2588fa90659" height="50" width="" >](https://monica.im/?utm=nxcrp)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
</div>
 | 
					</div>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## 🥳 Cheer for DeepSeek, China's AI star!
 | 
					## ❤️ Sponsor AI API
 | 
				
			||||||
 > Purpose-Built UI for DeepSeek Reasoner Model
 | 
					<a href='https://302.ai/'>
 | 
				
			||||||
 | 
					  <img src="https://github.com/user-attachments/assets/a03edf82-2031-4f23-bdb8-bfc0bfd168a4" width="100%" alt="icon"/>
 | 
				
			||||||
 | 
					</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[302.AI](https://302.ai/) is a pay-as-you-go AI application platform that offers the most comprehensive AI APIs and online applications available.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## 🥳 Cheer for NextChat iOS Version Online!
 | 
				
			||||||
 | 
					> [👉 Click Here to Install Now](https://apps.apple.com/us/app/nextchat-ai/id6743085599)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					> [❤️ Source Code Coming Soon](https://github.com/ChatGPTNextWeb/NextChat-iOS)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
					 
 | 
				
			||||||
<img src="https://github.com/user-attachments/assets/f3952210-3af1-4dc0-9b81-40eaa4847d9a"/>
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
## 🫣 NextChat Support MCP  ! 
 | 
					## 🫣 NextChat Support MCP  ! 
 | 
				
			||||||
> Before build, please set env ENABLE_MCP=true
 | 
					> Before build, please set env ENABLE_MCP=true
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -4,6 +4,7 @@
 | 
				
			|||||||
  <img src="./docs/images/ent.svg" alt="icon"/>
 | 
					  <img src="./docs/images/ent.svg" alt="icon"/>
 | 
				
			||||||
</a>
 | 
					</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<h1 align="center">NextChat</h1>
 | 
					<h1 align="center">NextChat</h1>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
一键免费部署你的私人 ChatGPT 网页应用,支持 Claude, GPT4 & Gemini Pro 模型。
 | 
					一键免费部署你的私人 ChatGPT 网页应用,支持 Claude, GPT4 & Gemini Pro 模型。
 | 
				
			||||||
@@ -14,6 +15,14 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
</div>
 | 
					</div>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Sponsor AI API
 | 
				
			||||||
 | 
					<a href='https://302.ai/'>
 | 
				
			||||||
 | 
					  <img src="https://github.com/user-attachments/assets/d8c0c513-1e18-4d3b-a2a9-ff3696aec0d4" width="100%" alt="icon"/>
 | 
				
			||||||
 | 
					</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[302.AI](https://302.ai/) 是一个按需付费的AI应用平台,提供市面上最全的AI API和AI在线应用。
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## 企业版
 | 
					## 企业版
 | 
				
			||||||
 | 
					
 | 
				
			||||||
满足您公司私有化部署和定制需求
 | 
					满足您公司私有化部署和定制需求
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,7 @@
 | 
				
			|||||||
<div align="center">
 | 
					<div align="center">
 | 
				
			||||||
<img src="./docs/images/ent.svg" alt="プレビュー"/>
 | 
					<img src="./docs/images/ent.svg" alt="プレビュー"/>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
<h1 align="center">NextChat</h1>
 | 
					<h1 align="center">NextChat</h1>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
ワンクリックで無料であなた専用の ChatGPT ウェブアプリをデプロイ。GPT3、GPT4 & Gemini Pro モデルをサポート。
 | 
					ワンクリックで無料であなた専用の ChatGPT ウェブアプリをデプロイ。GPT3、GPT4 & Gemini Pro モデルをサポート。
 | 
				
			||||||
@@ -12,6 +13,14 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
</div>
 | 
					</div>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					## Sponsor AI API
 | 
				
			||||||
 | 
					<a href='https://302.ai/'>
 | 
				
			||||||
 | 
					  <img src="https://github.com/user-attachments/assets/6cf24233-1010-43e0-9a83-a11159866175" width="100%" alt="icon"/>
 | 
				
			||||||
 | 
					</a>
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					[302.AI](https://302.ai/) は、オンデマンドで支払うAIアプリケーションプラットフォームで、最も安全なAI APIとAIオンラインアプリケーションを提供します。 
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
## 企業版
 | 
					## 企業版
 | 
				
			||||||
 | 
					
 | 
				
			||||||
あなたの会社のプライベートデプロイとカスタマイズのニーズに応える
 | 
					あなたの会社のプライベートデプロイとカスタマイズのニーズに応える
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										128
									
								
								app/api/302ai.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										128
									
								
								app/api/302ai.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,128 @@
 | 
				
			|||||||
 | 
					import { getServerSideConfig } from "@/app/config/server";
 | 
				
			||||||
 | 
					import {
 | 
				
			||||||
 | 
					  AI302_BASE_URL,
 | 
				
			||||||
 | 
					  ApiPath,
 | 
				
			||||||
 | 
					  ModelProvider,
 | 
				
			||||||
 | 
					  ServiceProvider,
 | 
				
			||||||
 | 
					} from "@/app/constant";
 | 
				
			||||||
 | 
					import { prettyObject } from "@/app/utils/format";
 | 
				
			||||||
 | 
					import { NextRequest, NextResponse } from "next/server";
 | 
				
			||||||
 | 
					import { auth } from "@/app/api/auth";
 | 
				
			||||||
 | 
					import { isModelNotavailableInServer } from "@/app/utils/model";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					const serverConfig = getServerSideConfig();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					export async function handle(
 | 
				
			||||||
 | 
					  req: NextRequest,
 | 
				
			||||||
 | 
					  { params }: { params: { path: string[] } },
 | 
				
			||||||
 | 
					) {
 | 
				
			||||||
 | 
					  console.log("[302.AI Route] params ", params);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  if (req.method === "OPTIONS") {
 | 
				
			||||||
 | 
					    return NextResponse.json({ body: "OK" }, { status: 200 });
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  const authResult = auth(req, ModelProvider["302.AI"]);
 | 
				
			||||||
 | 
					  if (authResult.error) {
 | 
				
			||||||
 | 
					    return NextResponse.json(authResult, {
 | 
				
			||||||
 | 
					      status: 401,
 | 
				
			||||||
 | 
					    });
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  try {
 | 
				
			||||||
 | 
					    const response = await request(req);
 | 
				
			||||||
 | 
					    return response;
 | 
				
			||||||
 | 
					  } catch (e) {
 | 
				
			||||||
 | 
					    console.error("[302.AI] ", e);
 | 
				
			||||||
 | 
					    return NextResponse.json(prettyObject(e));
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					async function request(req: NextRequest) {
 | 
				
			||||||
 | 
					  const controller = new AbortController();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  // alibaba use base url or just remove the path
 | 
				
			||||||
 | 
					  let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath["302.AI"], "");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  let baseUrl = serverConfig.ai302Url || AI302_BASE_URL;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  if (!baseUrl.startsWith("http")) {
 | 
				
			||||||
 | 
					    baseUrl = `https://${baseUrl}`;
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  if (baseUrl.endsWith("/")) {
 | 
				
			||||||
 | 
					    baseUrl = baseUrl.slice(0, -1);
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  console.log("[Proxy] ", path);
 | 
				
			||||||
 | 
					  console.log("[Base Url]", baseUrl);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  const timeoutId = setTimeout(
 | 
				
			||||||
 | 
					    () => {
 | 
				
			||||||
 | 
					      controller.abort();
 | 
				
			||||||
 | 
					    },
 | 
				
			||||||
 | 
					    10 * 60 * 1000,
 | 
				
			||||||
 | 
					  );
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  const fetchUrl = `${baseUrl}${path}`;
 | 
				
			||||||
 | 
					  const fetchOptions: RequestInit = {
 | 
				
			||||||
 | 
					    headers: {
 | 
				
			||||||
 | 
					      "Content-Type": "application/json",
 | 
				
			||||||
 | 
					      Authorization: req.headers.get("Authorization") ?? "",
 | 
				
			||||||
 | 
					    },
 | 
				
			||||||
 | 
					    method: req.method,
 | 
				
			||||||
 | 
					    body: req.body,
 | 
				
			||||||
 | 
					    redirect: "manual",
 | 
				
			||||||
 | 
					    // @ts-ignore
 | 
				
			||||||
 | 
					    duplex: "half",
 | 
				
			||||||
 | 
					    signal: controller.signal,
 | 
				
			||||||
 | 
					  };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  // #1815 try to refuse some request to some models
 | 
				
			||||||
 | 
					  if (serverConfig.customModels && req.body) {
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      const clonedBody = await req.text();
 | 
				
			||||||
 | 
					      fetchOptions.body = clonedBody;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      const jsonBody = JSON.parse(clonedBody) as { model?: string };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      // not undefined and is false
 | 
				
			||||||
 | 
					      if (
 | 
				
			||||||
 | 
					        isModelNotavailableInServer(
 | 
				
			||||||
 | 
					          serverConfig.customModels,
 | 
				
			||||||
 | 
					          jsonBody?.model as string,
 | 
				
			||||||
 | 
					          ServiceProvider["302.AI"] as string,
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					      ) {
 | 
				
			||||||
 | 
					        return NextResponse.json(
 | 
				
			||||||
 | 
					          {
 | 
				
			||||||
 | 
					            error: true,
 | 
				
			||||||
 | 
					            message: `you are not allowed to use ${jsonBody?.model} model`,
 | 
				
			||||||
 | 
					          },
 | 
				
			||||||
 | 
					          {
 | 
				
			||||||
 | 
					            status: 403,
 | 
				
			||||||
 | 
					          },
 | 
				
			||||||
 | 
					        );
 | 
				
			||||||
 | 
					      }
 | 
				
			||||||
 | 
					    } catch (e) {
 | 
				
			||||||
 | 
					      console.error(`[302.AI] filter`, e);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					  try {
 | 
				
			||||||
 | 
					    const res = await fetch(fetchUrl, fetchOptions);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    // to prevent browser prompt for credentials
 | 
				
			||||||
 | 
					    const newHeaders = new Headers(res.headers);
 | 
				
			||||||
 | 
					    newHeaders.delete("www-authenticate");
 | 
				
			||||||
 | 
					    // to disable nginx buffering
 | 
				
			||||||
 | 
					    newHeaders.set("X-Accel-Buffering", "no");
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return new Response(res.body, {
 | 
				
			||||||
 | 
					      status: res.status,
 | 
				
			||||||
 | 
					      statusText: res.statusText,
 | 
				
			||||||
 | 
					      headers: newHeaders,
 | 
				
			||||||
 | 
					    });
 | 
				
			||||||
 | 
					  } finally {
 | 
				
			||||||
 | 
					    clearTimeout(timeoutId);
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -15,6 +15,7 @@ import { handle as siliconflowHandler } from "../../siliconflow";
 | 
				
			|||||||
import { handle as xaiHandler } from "../../xai";
 | 
					import { handle as xaiHandler } from "../../xai";
 | 
				
			||||||
import { handle as chatglmHandler } from "../../glm";
 | 
					import { handle as chatglmHandler } from "../../glm";
 | 
				
			||||||
import { handle as proxyHandler } from "../../proxy";
 | 
					import { handle as proxyHandler } from "../../proxy";
 | 
				
			||||||
 | 
					import { handle as ai302Handler } from "../../302ai";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async function handle(
 | 
					async function handle(
 | 
				
			||||||
  req: NextRequest,
 | 
					  req: NextRequest,
 | 
				
			||||||
@@ -52,6 +53,8 @@ async function handle(
 | 
				
			|||||||
      return siliconflowHandler(req, { params });
 | 
					      return siliconflowHandler(req, { params });
 | 
				
			||||||
    case ApiPath.OpenAI:
 | 
					    case ApiPath.OpenAI:
 | 
				
			||||||
      return openaiHandler(req, { params });
 | 
					      return openaiHandler(req, { params });
 | 
				
			||||||
 | 
					    case ApiPath["302.AI"]:
 | 
				
			||||||
 | 
					      return ai302Handler(req, { params });
 | 
				
			||||||
    default:
 | 
					    default:
 | 
				
			||||||
      return proxyHandler(req, { params });
 | 
					      return proxyHandler(req, { params });
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -24,6 +24,7 @@ import { DeepSeekApi } from "./platforms/deepseek";
 | 
				
			|||||||
import { XAIApi } from "./platforms/xai";
 | 
					import { XAIApi } from "./platforms/xai";
 | 
				
			||||||
import { ChatGLMApi } from "./platforms/glm";
 | 
					import { ChatGLMApi } from "./platforms/glm";
 | 
				
			||||||
import { SiliconflowApi } from "./platforms/siliconflow";
 | 
					import { SiliconflowApi } from "./platforms/siliconflow";
 | 
				
			||||||
 | 
					import { Ai302Api } from "./platforms/ai302";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const ROLES = ["system", "user", "assistant"] as const;
 | 
					export const ROLES = ["system", "user", "assistant"] as const;
 | 
				
			||||||
export type MessageRole = (typeof ROLES)[number];
 | 
					export type MessageRole = (typeof ROLES)[number];
 | 
				
			||||||
@@ -173,6 +174,9 @@ export class ClientApi {
 | 
				
			|||||||
      case ModelProvider.SiliconFlow:
 | 
					      case ModelProvider.SiliconFlow:
 | 
				
			||||||
        this.llm = new SiliconflowApi();
 | 
					        this.llm = new SiliconflowApi();
 | 
				
			||||||
        break;
 | 
					        break;
 | 
				
			||||||
 | 
					      case ModelProvider["302.AI"]:
 | 
				
			||||||
 | 
					        this.llm = new Ai302Api();
 | 
				
			||||||
 | 
					        break;
 | 
				
			||||||
      default:
 | 
					      default:
 | 
				
			||||||
        this.llm = new ChatGPTApi();
 | 
					        this.llm = new ChatGPTApi();
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
@@ -265,6 +269,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
				
			|||||||
    const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
 | 
					    const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
 | 
				
			||||||
    const isSiliconFlow =
 | 
					    const isSiliconFlow =
 | 
				
			||||||
      modelConfig.providerName === ServiceProvider.SiliconFlow;
 | 
					      modelConfig.providerName === ServiceProvider.SiliconFlow;
 | 
				
			||||||
 | 
					    const isAI302 = modelConfig.providerName === ServiceProvider["302.AI"];
 | 
				
			||||||
    const isEnabledAccessControl = accessStore.enabledAccessControl();
 | 
					    const isEnabledAccessControl = accessStore.enabledAccessControl();
 | 
				
			||||||
    const apiKey = isGoogle
 | 
					    const apiKey = isGoogle
 | 
				
			||||||
      ? accessStore.googleApiKey
 | 
					      ? accessStore.googleApiKey
 | 
				
			||||||
@@ -290,6 +295,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
				
			|||||||
      ? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
 | 
					      ? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
 | 
				
			||||||
        ? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
 | 
					        ? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
 | 
				
			||||||
        : ""
 | 
					        : ""
 | 
				
			||||||
 | 
					      : isAI302
 | 
				
			||||||
 | 
					      ? accessStore.ai302ApiKey
 | 
				
			||||||
      : accessStore.openaiApiKey;
 | 
					      : accessStore.openaiApiKey;
 | 
				
			||||||
    return {
 | 
					    return {
 | 
				
			||||||
      isGoogle,
 | 
					      isGoogle,
 | 
				
			||||||
@@ -304,6 +311,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
				
			|||||||
      isXAI,
 | 
					      isXAI,
 | 
				
			||||||
      isChatGLM,
 | 
					      isChatGLM,
 | 
				
			||||||
      isSiliconFlow,
 | 
					      isSiliconFlow,
 | 
				
			||||||
 | 
					      isAI302,
 | 
				
			||||||
      apiKey,
 | 
					      apiKey,
 | 
				
			||||||
      isEnabledAccessControl,
 | 
					      isEnabledAccessControl,
 | 
				
			||||||
    };
 | 
					    };
 | 
				
			||||||
@@ -332,6 +340,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
				
			|||||||
    isXAI,
 | 
					    isXAI,
 | 
				
			||||||
    isChatGLM,
 | 
					    isChatGLM,
 | 
				
			||||||
    isSiliconFlow,
 | 
					    isSiliconFlow,
 | 
				
			||||||
 | 
					    isAI302,
 | 
				
			||||||
    apiKey,
 | 
					    apiKey,
 | 
				
			||||||
    isEnabledAccessControl,
 | 
					    isEnabledAccessControl,
 | 
				
			||||||
  } = getConfig();
 | 
					  } = getConfig();
 | 
				
			||||||
@@ -382,6 +391,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
 | 
				
			|||||||
      return new ClientApi(ModelProvider.ChatGLM);
 | 
					      return new ClientApi(ModelProvider.ChatGLM);
 | 
				
			||||||
    case ServiceProvider.SiliconFlow:
 | 
					    case ServiceProvider.SiliconFlow:
 | 
				
			||||||
      return new ClientApi(ModelProvider.SiliconFlow);
 | 
					      return new ClientApi(ModelProvider.SiliconFlow);
 | 
				
			||||||
 | 
					    case ServiceProvider["302.AI"]:
 | 
				
			||||||
 | 
					      return new ClientApi(ModelProvider["302.AI"]);
 | 
				
			||||||
    default:
 | 
					    default:
 | 
				
			||||||
      return new ClientApi(ModelProvider.GPT);
 | 
					      return new ClientApi(ModelProvider.GPT);
 | 
				
			||||||
  }
 | 
					  }
 | 
				
			||||||
 
 | 
				
			|||||||
							
								
								
									
										287
									
								
								app/client/platforms/ai302.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										287
									
								
								app/client/platforms/ai302.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,287 @@
 | 
				
			|||||||
 | 
					"use client";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import {
 | 
				
			||||||
 | 
					  ApiPath,
 | 
				
			||||||
 | 
					  AI302_BASE_URL,
 | 
				
			||||||
 | 
					  DEFAULT_MODELS,
 | 
				
			||||||
 | 
					  AI302,
 | 
				
			||||||
 | 
					} from "@/app/constant";
 | 
				
			||||||
 | 
					import {
 | 
				
			||||||
 | 
					  useAccessStore,
 | 
				
			||||||
 | 
					  useAppConfig,
 | 
				
			||||||
 | 
					  useChatStore,
 | 
				
			||||||
 | 
					  ChatMessageTool,
 | 
				
			||||||
 | 
					  usePluginStore,
 | 
				
			||||||
 | 
					} from "@/app/store";
 | 
				
			||||||
 | 
					import { preProcessImageContent, streamWithThink } from "@/app/utils/chat";
 | 
				
			||||||
 | 
					import {
 | 
				
			||||||
 | 
					  ChatOptions,
 | 
				
			||||||
 | 
					  getHeaders,
 | 
				
			||||||
 | 
					  LLMApi,
 | 
				
			||||||
 | 
					  LLMModel,
 | 
				
			||||||
 | 
					  SpeechOptions,
 | 
				
			||||||
 | 
					} from "../api";
 | 
				
			||||||
 | 
					import { getClientConfig } from "@/app/config/client";
 | 
				
			||||||
 | 
					import {
 | 
				
			||||||
 | 
					  getMessageTextContent,
 | 
				
			||||||
 | 
					  getMessageTextContentWithoutThinking,
 | 
				
			||||||
 | 
					  isVisionModel,
 | 
				
			||||||
 | 
					  getTimeoutMSByModel,
 | 
				
			||||||
 | 
					} from "@/app/utils";
 | 
				
			||||||
 | 
					import { RequestPayload } from "./openai";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import { fetch } from "@/app/utils/stream";
 | 
				
			||||||
 | 
					export interface Ai302ListModelResponse {
 | 
				
			||||||
 | 
					  object: string;
 | 
				
			||||||
 | 
					  data: Array<{
 | 
				
			||||||
 | 
					    id: string;
 | 
				
			||||||
 | 
					    object: string;
 | 
				
			||||||
 | 
					    root: string;
 | 
				
			||||||
 | 
					  }>;
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					export class Ai302Api implements LLMApi {
 | 
				
			||||||
 | 
					  private disableListModels = false;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  path(path: string): string {
 | 
				
			||||||
 | 
					    const accessStore = useAccessStore.getState();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    let baseUrl = "";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if (accessStore.useCustomConfig) {
 | 
				
			||||||
 | 
					      baseUrl = accessStore.ai302Url;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if (baseUrl.length === 0) {
 | 
				
			||||||
 | 
					      const isApp = !!getClientConfig()?.isApp;
 | 
				
			||||||
 | 
					      const apiPath = ApiPath["302.AI"];
 | 
				
			||||||
 | 
					      baseUrl = isApp ? AI302_BASE_URL : apiPath;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if (baseUrl.endsWith("/")) {
 | 
				
			||||||
 | 
					      baseUrl = baseUrl.slice(0, baseUrl.length - 1);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					    if (
 | 
				
			||||||
 | 
					      !baseUrl.startsWith("http") &&
 | 
				
			||||||
 | 
					      !baseUrl.startsWith(ApiPath["302.AI"])
 | 
				
			||||||
 | 
					    ) {
 | 
				
			||||||
 | 
					      baseUrl = "https://" + baseUrl;
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    console.log("[Proxy Endpoint] ", baseUrl, path);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return [baseUrl, path].join("/");
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  extractMessage(res: any) {
 | 
				
			||||||
 | 
					    return res.choices?.at(0)?.message?.content ?? "";
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  speech(options: SpeechOptions): Promise<ArrayBuffer> {
 | 
				
			||||||
 | 
					    throw new Error("Method not implemented.");
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  async chat(options: ChatOptions) {
 | 
				
			||||||
 | 
					    const visionModel = isVisionModel(options.config.model);
 | 
				
			||||||
 | 
					    const messages: ChatOptions["messages"] = [];
 | 
				
			||||||
 | 
					    for (const v of options.messages) {
 | 
				
			||||||
 | 
					      if (v.role === "assistant") {
 | 
				
			||||||
 | 
					        const content = getMessageTextContentWithoutThinking(v);
 | 
				
			||||||
 | 
					        messages.push({ role: v.role, content });
 | 
				
			||||||
 | 
					      } else {
 | 
				
			||||||
 | 
					        const content = visionModel
 | 
				
			||||||
 | 
					          ? await preProcessImageContent(v.content)
 | 
				
			||||||
 | 
					          : getMessageTextContent(v);
 | 
				
			||||||
 | 
					        messages.push({ role: v.role, content });
 | 
				
			||||||
 | 
					      }
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    const modelConfig = {
 | 
				
			||||||
 | 
					      ...useAppConfig.getState().modelConfig,
 | 
				
			||||||
 | 
					      ...useChatStore.getState().currentSession().mask.modelConfig,
 | 
				
			||||||
 | 
					      ...{
 | 
				
			||||||
 | 
					        model: options.config.model,
 | 
				
			||||||
 | 
					        providerName: options.config.providerName,
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
 | 
					    };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    const requestPayload: RequestPayload = {
 | 
				
			||||||
 | 
					      messages,
 | 
				
			||||||
 | 
					      stream: options.config.stream,
 | 
				
			||||||
 | 
					      model: modelConfig.model,
 | 
				
			||||||
 | 
					      temperature: modelConfig.temperature,
 | 
				
			||||||
 | 
					      presence_penalty: modelConfig.presence_penalty,
 | 
				
			||||||
 | 
					      frequency_penalty: modelConfig.frequency_penalty,
 | 
				
			||||||
 | 
					      top_p: modelConfig.top_p,
 | 
				
			||||||
 | 
					      // max_tokens: Math.max(modelConfig.max_tokens, 1024),
 | 
				
			||||||
 | 
					      // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
 | 
				
			||||||
 | 
					    };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    console.log("[Request] openai payload: ", requestPayload);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    const shouldStream = !!options.config.stream;
 | 
				
			||||||
 | 
					    const controller = new AbortController();
 | 
				
			||||||
 | 
					    options.onController?.(controller);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    try {
 | 
				
			||||||
 | 
					      const chatPath = this.path(AI302.ChatPath);
 | 
				
			||||||
 | 
					      const chatPayload = {
 | 
				
			||||||
 | 
					        method: "POST",
 | 
				
			||||||
 | 
					        body: JSON.stringify(requestPayload),
 | 
				
			||||||
 | 
					        signal: controller.signal,
 | 
				
			||||||
 | 
					        headers: getHeaders(),
 | 
				
			||||||
 | 
					      };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      // console.log(chatPayload);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      // Use extended timeout for thinking models as they typically require more processing time
 | 
				
			||||||
 | 
					      const requestTimeoutId = setTimeout(
 | 
				
			||||||
 | 
					        () => controller.abort(),
 | 
				
			||||||
 | 
					        getTimeoutMSByModel(options.config.model),
 | 
				
			||||||
 | 
					      );
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      if (shouldStream) {
 | 
				
			||||||
 | 
					        const [tools, funcs] = usePluginStore
 | 
				
			||||||
 | 
					          .getState()
 | 
				
			||||||
 | 
					          .getAsTools(
 | 
				
			||||||
 | 
					            useChatStore.getState().currentSession().mask?.plugin || [],
 | 
				
			||||||
 | 
					          );
 | 
				
			||||||
 | 
					        return streamWithThink(
 | 
				
			||||||
 | 
					          chatPath,
 | 
				
			||||||
 | 
					          requestPayload,
 | 
				
			||||||
 | 
					          getHeaders(),
 | 
				
			||||||
 | 
					          tools as any,
 | 
				
			||||||
 | 
					          funcs,
 | 
				
			||||||
 | 
					          controller,
 | 
				
			||||||
 | 
					          // parseSSE
 | 
				
			||||||
 | 
					          (text: string, runTools: ChatMessageTool[]) => {
 | 
				
			||||||
 | 
					            // console.log("parseSSE", text, runTools);
 | 
				
			||||||
 | 
					            const json = JSON.parse(text);
 | 
				
			||||||
 | 
					            const choices = json.choices as Array<{
 | 
				
			||||||
 | 
					              delta: {
 | 
				
			||||||
 | 
					                content: string | null;
 | 
				
			||||||
 | 
					                tool_calls: ChatMessageTool[];
 | 
				
			||||||
 | 
					                reasoning_content: string | null;
 | 
				
			||||||
 | 
					              };
 | 
				
			||||||
 | 
					            }>;
 | 
				
			||||||
 | 
					            const tool_calls = choices[0]?.delta?.tool_calls;
 | 
				
			||||||
 | 
					            if (tool_calls?.length > 0) {
 | 
				
			||||||
 | 
					              const index = tool_calls[0]?.index;
 | 
				
			||||||
 | 
					              const id = tool_calls[0]?.id;
 | 
				
			||||||
 | 
					              const args = tool_calls[0]?.function?.arguments;
 | 
				
			||||||
 | 
					              if (id) {
 | 
				
			||||||
 | 
					                runTools.push({
 | 
				
			||||||
 | 
					                  id,
 | 
				
			||||||
 | 
					                  type: tool_calls[0]?.type,
 | 
				
			||||||
 | 
					                  function: {
 | 
				
			||||||
 | 
					                    name: tool_calls[0]?.function?.name as string,
 | 
				
			||||||
 | 
					                    arguments: args,
 | 
				
			||||||
 | 
					                  },
 | 
				
			||||||
 | 
					                });
 | 
				
			||||||
 | 
					              } else {
 | 
				
			||||||
 | 
					                // @ts-ignore
 | 
				
			||||||
 | 
					                runTools[index]["function"]["arguments"] += args;
 | 
				
			||||||
 | 
					              }
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					            const reasoning = choices[0]?.delta?.reasoning_content;
 | 
				
			||||||
 | 
					            const content = choices[0]?.delta?.content;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            // Skip if both content and reasoning_content are empty or null
 | 
				
			||||||
 | 
					            if (
 | 
				
			||||||
 | 
					              (!reasoning || reasoning.length === 0) &&
 | 
				
			||||||
 | 
					              (!content || content.length === 0)
 | 
				
			||||||
 | 
					            ) {
 | 
				
			||||||
 | 
					              return {
 | 
				
			||||||
 | 
					                isThinking: false,
 | 
				
			||||||
 | 
					                content: "",
 | 
				
			||||||
 | 
					              };
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            if (reasoning && reasoning.length > 0) {
 | 
				
			||||||
 | 
					              return {
 | 
				
			||||||
 | 
					                isThinking: true,
 | 
				
			||||||
 | 
					                content: reasoning,
 | 
				
			||||||
 | 
					              };
 | 
				
			||||||
 | 
					            } else if (content && content.length > 0) {
 | 
				
			||||||
 | 
					              return {
 | 
				
			||||||
 | 
					                isThinking: false,
 | 
				
			||||||
 | 
					                content: content,
 | 
				
			||||||
 | 
					              };
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            return {
 | 
				
			||||||
 | 
					              isThinking: false,
 | 
				
			||||||
 | 
					              content: "",
 | 
				
			||||||
 | 
					            };
 | 
				
			||||||
 | 
					          },
 | 
				
			||||||
 | 
					          // processToolMessage, include tool_calls message and tool call results
 | 
				
			||||||
 | 
					          (
 | 
				
			||||||
 | 
					            requestPayload: RequestPayload,
 | 
				
			||||||
 | 
					            toolCallMessage: any,
 | 
				
			||||||
 | 
					            toolCallResult: any[],
 | 
				
			||||||
 | 
					          ) => {
 | 
				
			||||||
 | 
					            // @ts-ignore
 | 
				
			||||||
 | 
					            requestPayload?.messages?.splice(
 | 
				
			||||||
 | 
					              // @ts-ignore
 | 
				
			||||||
 | 
					              requestPayload?.messages?.length,
 | 
				
			||||||
 | 
					              0,
 | 
				
			||||||
 | 
					              toolCallMessage,
 | 
				
			||||||
 | 
					              ...toolCallResult,
 | 
				
			||||||
 | 
					            );
 | 
				
			||||||
 | 
					          },
 | 
				
			||||||
 | 
					          options,
 | 
				
			||||||
 | 
					        );
 | 
				
			||||||
 | 
					      } else {
 | 
				
			||||||
 | 
					        const res = await fetch(chatPath, chatPayload);
 | 
				
			||||||
 | 
					        clearTimeout(requestTimeoutId);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        const resJson = await res.json();
 | 
				
			||||||
 | 
					        const message = this.extractMessage(resJson);
 | 
				
			||||||
 | 
					        options.onFinish(message, res);
 | 
				
			||||||
 | 
					      }
 | 
				
			||||||
 | 
					    } catch (e) {
 | 
				
			||||||
 | 
					      console.log("[Request] failed to make a chat request", e);
 | 
				
			||||||
 | 
					      options.onError?.(e as Error);
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					  async usage() {
 | 
				
			||||||
 | 
					    return {
 | 
				
			||||||
 | 
					      used: 0,
 | 
				
			||||||
 | 
					      total: 0,
 | 
				
			||||||
 | 
					    };
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  async models(): Promise<LLMModel[]> {
 | 
				
			||||||
 | 
					    if (this.disableListModels) {
 | 
				
			||||||
 | 
					      return DEFAULT_MODELS.slice();
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    const res = await fetch(this.path(AI302.ListModelPath), {
 | 
				
			||||||
 | 
					      method: "GET",
 | 
				
			||||||
 | 
					      headers: {
 | 
				
			||||||
 | 
					        ...getHeaders(),
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
 | 
					    });
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    const resJson = (await res.json()) as Ai302ListModelResponse;
 | 
				
			||||||
 | 
					    const chatModels = resJson.data;
 | 
				
			||||||
 | 
					    console.log("[Models]", chatModels);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if (!chatModels) {
 | 
				
			||||||
 | 
					      return [];
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    let seq = 1000; //同 Constant.ts 中的排序保持一致
 | 
				
			||||||
 | 
					    return chatModels.map((m) => ({
 | 
				
			||||||
 | 
					      name: m.id,
 | 
				
			||||||
 | 
					      available: true,
 | 
				
			||||||
 | 
					      sorted: seq++,
 | 
				
			||||||
 | 
					      provider: {
 | 
				
			||||||
 | 
					        id: "ai302",
 | 
				
			||||||
 | 
					        providerName: "302.AI",
 | 
				
			||||||
 | 
					        providerType: "ai302",
 | 
				
			||||||
 | 
					        sorted: 15,
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
 | 
					    }));
 | 
				
			||||||
 | 
					  }
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@@ -56,7 +56,7 @@ export interface OpenAIListModelResponse {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
export interface RequestPayload {
 | 
					export interface RequestPayload {
 | 
				
			||||||
  messages: {
 | 
					  messages: {
 | 
				
			||||||
    role: "system" | "user" | "assistant";
 | 
					    role: "developer" | "system" | "user" | "assistant";
 | 
				
			||||||
    content: string | MultimodalContent[];
 | 
					    content: string | MultimodalContent[];
 | 
				
			||||||
  }[];
 | 
					  }[];
 | 
				
			||||||
  stream?: boolean;
 | 
					  stream?: boolean;
 | 
				
			||||||
@@ -198,7 +198,8 @@ export class ChatGPTApi implements LLMApi {
 | 
				
			|||||||
    const isDalle3 = _isDalle3(options.config.model);
 | 
					    const isDalle3 = _isDalle3(options.config.model);
 | 
				
			||||||
    const isO1OrO3 =
 | 
					    const isO1OrO3 =
 | 
				
			||||||
      options.config.model.startsWith("o1") ||
 | 
					      options.config.model.startsWith("o1") ||
 | 
				
			||||||
      options.config.model.startsWith("o3");
 | 
					      options.config.model.startsWith("o3") ||
 | 
				
			||||||
 | 
					      options.config.model.startsWith("o4-mini");
 | 
				
			||||||
    if (isDalle3) {
 | 
					    if (isDalle3) {
 | 
				
			||||||
      const prompt = getMessageTextContent(
 | 
					      const prompt = getMessageTextContent(
 | 
				
			||||||
        options.messages.slice(-1)?.pop() as any,
 | 
					        options.messages.slice(-1)?.pop() as any,
 | 
				
			||||||
@@ -237,13 +238,21 @@ export class ChatGPTApi implements LLMApi {
 | 
				
			|||||||
        // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
 | 
					        // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
 | 
				
			||||||
      };
 | 
					      };
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      // O1 使用 max_completion_tokens 控制token数 (https://platform.openai.com/docs/guides/reasoning#controlling-costs)
 | 
					 | 
				
			||||||
      if (isO1OrO3) {
 | 
					      if (isO1OrO3) {
 | 
				
			||||||
 | 
					        // by default the o1/o3 models will not attempt to produce output that includes markdown formatting
 | 
				
			||||||
 | 
					        // manually add "Formatting re-enabled" developer message to encourage markdown inclusion in model responses
 | 
				
			||||||
 | 
					        // (https://learn.microsoft.com/en-us/azure/ai-services/openai/how-to/reasoning?tabs=python-secure#markdown-output)
 | 
				
			||||||
 | 
					        requestPayload["messages"].unshift({
 | 
				
			||||||
 | 
					          role: "developer",
 | 
				
			||||||
 | 
					          content: "Formatting re-enabled",
 | 
				
			||||||
 | 
					        });
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        // o1/o3 uses max_completion_tokens to control the number of tokens (https://platform.openai.com/docs/guides/reasoning#controlling-costs)
 | 
				
			||||||
        requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
 | 
					        requestPayload["max_completion_tokens"] = modelConfig.max_tokens;
 | 
				
			||||||
      }
 | 
					      }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      // add max_tokens to vision model
 | 
					      // add max_tokens to vision model
 | 
				
			||||||
      if (visionModel) {
 | 
					      if (visionModel && !isO1OrO3) {
 | 
				
			||||||
        requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
 | 
					        requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
 | 
				
			||||||
      }
 | 
					      }
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -18,6 +18,7 @@ import ReturnIcon from "../icons/return.svg";
 | 
				
			|||||||
import CopyIcon from "../icons/copy.svg";
 | 
					import CopyIcon from "../icons/copy.svg";
 | 
				
			||||||
import SpeakIcon from "../icons/speak.svg";
 | 
					import SpeakIcon from "../icons/speak.svg";
 | 
				
			||||||
import SpeakStopIcon from "../icons/speak-stop.svg";
 | 
					import SpeakStopIcon from "../icons/speak-stop.svg";
 | 
				
			||||||
 | 
					import LoadingIcon from "../icons/three-dots.svg";
 | 
				
			||||||
import LoadingButtonIcon from "../icons/loading.svg";
 | 
					import LoadingButtonIcon from "../icons/loading.svg";
 | 
				
			||||||
import PromptIcon from "../icons/prompt.svg";
 | 
					import PromptIcon from "../icons/prompt.svg";
 | 
				
			||||||
import MaskIcon from "../icons/mask.svg";
 | 
					import MaskIcon from "../icons/mask.svg";
 | 
				
			||||||
@@ -78,6 +79,8 @@ import {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
 | 
					import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import dynamic from "next/dynamic";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import { ChatControllerPool } from "../client/controller";
 | 
					import { ChatControllerPool } from "../client/controller";
 | 
				
			||||||
import { DalleQuality, DalleStyle, ModelSize } from "../typing";
 | 
					import { DalleQuality, DalleStyle, ModelSize } from "../typing";
 | 
				
			||||||
import { Prompt, usePromptStore } from "../store/prompt";
 | 
					import { Prompt, usePromptStore } from "../store/prompt";
 | 
				
			||||||
@@ -122,15 +125,14 @@ import { getModelProvider } from "../utils/model";
 | 
				
			|||||||
import { RealtimeChat } from "@/app/components/realtime-chat";
 | 
					import { RealtimeChat } from "@/app/components/realtime-chat";
 | 
				
			||||||
import clsx from "clsx";
 | 
					import clsx from "clsx";
 | 
				
			||||||
import { getAvailableClientsCount, isMcpEnabled } from "../mcp/actions";
 | 
					import { getAvailableClientsCount, isMcpEnabled } from "../mcp/actions";
 | 
				
			||||||
import { Markdown } from "./markdown";
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
const localStorage = safeLocalStorage();
 | 
					const localStorage = safeLocalStorage();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const ttsPlayer = createTTSPlayer();
 | 
					const ttsPlayer = createTTSPlayer();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
 | 
					const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
 | 
				
			||||||
//   loading: () => <LoadingIcon />,
 | 
					  loading: () => <LoadingIcon />,
 | 
				
			||||||
// });
 | 
					});
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const MCPAction = () => {
 | 
					const MCPAction = () => {
 | 
				
			||||||
  const navigate = useNavigate();
 | 
					  const navigate = useNavigate();
 | 
				
			||||||
@@ -1982,8 +1984,6 @@ function _Chat() {
 | 
				
			|||||||
                              fontFamily={fontFamily}
 | 
					                              fontFamily={fontFamily}
 | 
				
			||||||
                              parentRef={scrollRef}
 | 
					                              parentRef={scrollRef}
 | 
				
			||||||
                              defaultShow={i >= messages.length - 6}
 | 
					                              defaultShow={i >= messages.length - 6}
 | 
				
			||||||
                              immediatelyRender={i >= messages.length - 3}
 | 
					 | 
				
			||||||
                              streaming={message.streaming}
 | 
					 | 
				
			||||||
                            />
 | 
					                            />
 | 
				
			||||||
                            {getMessageImages(message).length == 1 && (
 | 
					                            {getMessageImages(message).length == 1 && (
 | 
				
			||||||
                              <img
 | 
					                              <img
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -267,136 +267,6 @@ function tryWrapHtmlCode(text: string) {
 | 
				
			|||||||
    );
 | 
					    );
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// Split content into paragraphs while preserving code blocks
 | 
					 | 
				
			||||||
function splitContentIntoParagraphs(content: string) {
 | 
					 | 
				
			||||||
  // Check for unclosed code blocks
 | 
					 | 
				
			||||||
  const codeBlockStartCount = (content.match(/```/g) || []).length;
 | 
					 | 
				
			||||||
  let processedContent = content;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Add closing tag if there's an odd number of code block markers
 | 
					 | 
				
			||||||
  if (codeBlockStartCount % 2 !== 0) {
 | 
					 | 
				
			||||||
    processedContent = content + "\n```";
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Extract code blocks
 | 
					 | 
				
			||||||
  const codeBlockRegex = /```[\s\S]*?```/g;
 | 
					 | 
				
			||||||
  const codeBlocks: string[] = [];
 | 
					 | 
				
			||||||
  let codeBlockCounter = 0;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Replace code blocks with placeholders
 | 
					 | 
				
			||||||
  const contentWithPlaceholders = processedContent.replace(
 | 
					 | 
				
			||||||
    codeBlockRegex,
 | 
					 | 
				
			||||||
    (match) => {
 | 
					 | 
				
			||||||
      codeBlocks.push(match);
 | 
					 | 
				
			||||||
      const placeholder = `__CODE_BLOCK_${codeBlockCounter++}__`;
 | 
					 | 
				
			||||||
      return placeholder;
 | 
					 | 
				
			||||||
    },
 | 
					 | 
				
			||||||
  );
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Split by double newlines
 | 
					 | 
				
			||||||
  const paragraphs = contentWithPlaceholders
 | 
					 | 
				
			||||||
    .split(/\n\n+/)
 | 
					 | 
				
			||||||
    .filter((p) => p.trim());
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Restore code blocks
 | 
					 | 
				
			||||||
  return paragraphs.map((p) => {
 | 
					 | 
				
			||||||
    if (p.match(/__CODE_BLOCK_\d+__/)) {
 | 
					 | 
				
			||||||
      return p.replace(/__CODE_BLOCK_\d+__/g, (match) => {
 | 
					 | 
				
			||||||
        const index = parseInt(match.match(/\d+/)?.[0] || "0");
 | 
					 | 
				
			||||||
        return codeBlocks[index] || match;
 | 
					 | 
				
			||||||
      });
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    return p;
 | 
					 | 
				
			||||||
  });
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
// Lazy-loaded paragraph component
 | 
					 | 
				
			||||||
function MarkdownParagraph({
 | 
					 | 
				
			||||||
  content,
 | 
					 | 
				
			||||||
  onLoad,
 | 
					 | 
				
			||||||
}: {
 | 
					 | 
				
			||||||
  content: string;
 | 
					 | 
				
			||||||
  onLoad?: () => void;
 | 
					 | 
				
			||||||
}) {
 | 
					 | 
				
			||||||
  const [isLoaded, setIsLoaded] = useState(false);
 | 
					 | 
				
			||||||
  const placeholderRef = useRef<HTMLDivElement>(null);
 | 
					 | 
				
			||||||
  const [isVisible, setIsVisible] = useState(false);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  useEffect(() => {
 | 
					 | 
				
			||||||
    let observer: IntersectionObserver;
 | 
					 | 
				
			||||||
    if (placeholderRef.current) {
 | 
					 | 
				
			||||||
      observer = new IntersectionObserver(
 | 
					 | 
				
			||||||
        (entries) => {
 | 
					 | 
				
			||||||
          if (entries[0].isIntersecting) {
 | 
					 | 
				
			||||||
            setIsVisible(true);
 | 
					 | 
				
			||||||
          }
 | 
					 | 
				
			||||||
        },
 | 
					 | 
				
			||||||
        { threshold: 0.1, rootMargin: "200px 0px" },
 | 
					 | 
				
			||||||
      );
 | 
					 | 
				
			||||||
      observer.observe(placeholderRef.current);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    return () => observer?.disconnect();
 | 
					 | 
				
			||||||
  }, []);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  useEffect(() => {
 | 
					 | 
				
			||||||
    if (isVisible && !isLoaded) {
 | 
					 | 
				
			||||||
      setIsLoaded(true);
 | 
					 | 
				
			||||||
      onLoad?.();
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }, [isVisible, isLoaded, onLoad]);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Generate preview content
 | 
					 | 
				
			||||||
  const previewContent = useMemo(() => {
 | 
					 | 
				
			||||||
    if (content.startsWith("```")) {
 | 
					 | 
				
			||||||
      return "```" + (content.split("\n")[0] || "").slice(3) + "...```";
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
    return content.length > 60 ? content.slice(0, 60) + "..." : content;
 | 
					 | 
				
			||||||
  }, [content]);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  return (
 | 
					 | 
				
			||||||
    <div className="markdown-paragraph" ref={placeholderRef}>
 | 
					 | 
				
			||||||
      {!isLoaded ? (
 | 
					 | 
				
			||||||
        <div className="markdown-paragraph-placeholder">{previewContent}</div>
 | 
					 | 
				
			||||||
      ) : (
 | 
					 | 
				
			||||||
        <_MarkDownContent content={content} />
 | 
					 | 
				
			||||||
      )}
 | 
					 | 
				
			||||||
    </div>
 | 
					 | 
				
			||||||
  );
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
// Memoized paragraph component to prevent unnecessary re-renders
 | 
					 | 
				
			||||||
const MemoizedMarkdownParagraph = React.memo(
 | 
					 | 
				
			||||||
  ({ content }: { content: string }) => {
 | 
					 | 
				
			||||||
    return <_MarkDownContent content={content} />;
 | 
					 | 
				
			||||||
  },
 | 
					 | 
				
			||||||
  (prevProps, nextProps) => prevProps.content === nextProps.content,
 | 
					 | 
				
			||||||
);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
MemoizedMarkdownParagraph.displayName = "MemoizedMarkdownParagraph";
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
// Specialized component for streaming content
 | 
					 | 
				
			||||||
function StreamingMarkdownContent({ content }: { content: string }) {
 | 
					 | 
				
			||||||
  const paragraphs = useMemo(
 | 
					 | 
				
			||||||
    () => splitContentIntoParagraphs(content),
 | 
					 | 
				
			||||||
    [content],
 | 
					 | 
				
			||||||
  );
 | 
					 | 
				
			||||||
  const lastParagraphRef = useRef<HTMLDivElement>(null);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  return (
 | 
					 | 
				
			||||||
    <div className="markdown-streaming-content">
 | 
					 | 
				
			||||||
      {paragraphs.map((paragraph, index) => (
 | 
					 | 
				
			||||||
        <div
 | 
					 | 
				
			||||||
          key={`p-${index}-${paragraph.substring(0, 20)}`}
 | 
					 | 
				
			||||||
          className="markdown-paragraph markdown-streaming-paragraph"
 | 
					 | 
				
			||||||
          ref={index === paragraphs.length - 1 ? lastParagraphRef : null}
 | 
					 | 
				
			||||||
        >
 | 
					 | 
				
			||||||
          <MemoizedMarkdownParagraph content={paragraph} />
 | 
					 | 
				
			||||||
        </div>
 | 
					 | 
				
			||||||
      ))}
 | 
					 | 
				
			||||||
    </div>
 | 
					 | 
				
			||||||
  );
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
function _MarkDownContent(props: { content: string }) {
 | 
					function _MarkDownContent(props: { content: string }) {
 | 
				
			||||||
  const escapedContent = useMemo(() => {
 | 
					  const escapedContent = useMemo(() => {
 | 
				
			||||||
    return tryWrapHtmlCode(escapeBrackets(props.content));
 | 
					    return tryWrapHtmlCode(escapeBrackets(props.content));
 | 
				
			||||||
@@ -456,27 +326,9 @@ export function Markdown(
 | 
				
			|||||||
    fontFamily?: string;
 | 
					    fontFamily?: string;
 | 
				
			||||||
    parentRef?: RefObject<HTMLDivElement>;
 | 
					    parentRef?: RefObject<HTMLDivElement>;
 | 
				
			||||||
    defaultShow?: boolean;
 | 
					    defaultShow?: boolean;
 | 
				
			||||||
    immediatelyRender?: boolean;
 | 
					 | 
				
			||||||
    streaming?: boolean; // Whether this is a streaming response
 | 
					 | 
				
			||||||
  } & React.DOMAttributes<HTMLDivElement>,
 | 
					  } & React.DOMAttributes<HTMLDivElement>,
 | 
				
			||||||
) {
 | 
					) {
 | 
				
			||||||
  const mdRef = useRef<HTMLDivElement>(null);
 | 
					  const mdRef = useRef<HTMLDivElement>(null);
 | 
				
			||||||
  const paragraphs = useMemo(
 | 
					 | 
				
			||||||
    () => splitContentIntoParagraphs(props.content),
 | 
					 | 
				
			||||||
    [props.content],
 | 
					 | 
				
			||||||
  );
 | 
					 | 
				
			||||||
  const [loadedCount, setLoadedCount] = useState(0);
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  // Determine rendering strategy based on props
 | 
					 | 
				
			||||||
  const shouldAsyncRender =
 | 
					 | 
				
			||||||
    !props.immediatelyRender && !props.streaming && paragraphs.length > 1;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  useEffect(() => {
 | 
					 | 
				
			||||||
    // Immediately render all paragraphs if specified
 | 
					 | 
				
			||||||
    if (props.immediatelyRender) {
 | 
					 | 
				
			||||||
      setLoadedCount(paragraphs.length);
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
  }, [props.immediatelyRender, paragraphs.length]);
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return (
 | 
					  return (
 | 
				
			||||||
    <div
 | 
					    <div
 | 
				
			||||||
@@ -492,24 +344,6 @@ export function Markdown(
 | 
				
			|||||||
    >
 | 
					    >
 | 
				
			||||||
      {props.loading ? (
 | 
					      {props.loading ? (
 | 
				
			||||||
        <LoadingIcon />
 | 
					        <LoadingIcon />
 | 
				
			||||||
      ) : props.streaming ? (
 | 
					 | 
				
			||||||
        // Use specialized component for streaming content
 | 
					 | 
				
			||||||
        <StreamingMarkdownContent content={props.content} />
 | 
					 | 
				
			||||||
      ) : shouldAsyncRender ? (
 | 
					 | 
				
			||||||
        <div className="markdown-content">
 | 
					 | 
				
			||||||
          {paragraphs.map((paragraph, index) => (
 | 
					 | 
				
			||||||
            <MarkdownParagraph
 | 
					 | 
				
			||||||
              key={index}
 | 
					 | 
				
			||||||
              content={paragraph}
 | 
					 | 
				
			||||||
              onLoad={() => setLoadedCount((prev) => prev + 1)}
 | 
					 | 
				
			||||||
            />
 | 
					 | 
				
			||||||
          ))}
 | 
					 | 
				
			||||||
          {loadedCount < paragraphs.length && loadedCount > 0 && (
 | 
					 | 
				
			||||||
            <div className="markdown-paragraph-loading">
 | 
					 | 
				
			||||||
              <LoadingIcon />
 | 
					 | 
				
			||||||
            </div>
 | 
					 | 
				
			||||||
          )}
 | 
					 | 
				
			||||||
        </div>
 | 
					 | 
				
			||||||
      ) : (
 | 
					      ) : (
 | 
				
			||||||
        <MarkdownContent content={props.content} />
 | 
					        <MarkdownContent content={props.content} />
 | 
				
			||||||
      )}
 | 
					      )}
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -75,6 +75,7 @@ import {
 | 
				
			|||||||
  ChatGLM,
 | 
					  ChatGLM,
 | 
				
			||||||
  DeepSeek,
 | 
					  DeepSeek,
 | 
				
			||||||
  SiliconFlow,
 | 
					  SiliconFlow,
 | 
				
			||||||
 | 
					  AI302,
 | 
				
			||||||
} from "../constant";
 | 
					} from "../constant";
 | 
				
			||||||
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
 | 
					import { Prompt, SearchService, usePromptStore } from "../store/prompt";
 | 
				
			||||||
import { ErrorBoundary } from "./error";
 | 
					import { ErrorBoundary } from "./error";
 | 
				
			||||||
@@ -1458,6 +1459,46 @@ export function Settings() {
 | 
				
			|||||||
    </>
 | 
					    </>
 | 
				
			||||||
  );
 | 
					  );
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  const ai302ConfigComponent = accessStore.provider === ServiceProvider["302.AI"] && (
 | 
				
			||||||
 | 
					    <>
 | 
				
			||||||
 | 
					      <ListItem
 | 
				
			||||||
 | 
					          title={Locale.Settings.Access.AI302.Endpoint.Title}
 | 
				
			||||||
 | 
					          subTitle={
 | 
				
			||||||
 | 
					            Locale.Settings.Access.AI302.Endpoint.SubTitle +
 | 
				
			||||||
 | 
					            AI302.ExampleEndpoint
 | 
				
			||||||
 | 
					          }
 | 
				
			||||||
 | 
					        >
 | 
				
			||||||
 | 
					          <input
 | 
				
			||||||
 | 
					            aria-label={Locale.Settings.Access.AI302.Endpoint.Title}
 | 
				
			||||||
 | 
					            type="text"
 | 
				
			||||||
 | 
					            value={accessStore.ai302Url}
 | 
				
			||||||
 | 
					            placeholder={AI302.ExampleEndpoint}
 | 
				
			||||||
 | 
					            onChange={(e) =>
 | 
				
			||||||
 | 
					              accessStore.update(
 | 
				
			||||||
 | 
					                (access) => (access.ai302Url = e.currentTarget.value),
 | 
				
			||||||
 | 
					              )
 | 
				
			||||||
 | 
					            }
 | 
				
			||||||
 | 
					          ></input>
 | 
				
			||||||
 | 
					        </ListItem>
 | 
				
			||||||
 | 
					        <ListItem
 | 
				
			||||||
 | 
					          title={Locale.Settings.Access.AI302.ApiKey.Title}
 | 
				
			||||||
 | 
					          subTitle={Locale.Settings.Access.AI302.ApiKey.SubTitle}
 | 
				
			||||||
 | 
					        >
 | 
				
			||||||
 | 
					          <PasswordInput
 | 
				
			||||||
 | 
					            aria-label={Locale.Settings.Access.AI302.ApiKey.Title}
 | 
				
			||||||
 | 
					            value={accessStore.ai302ApiKey}
 | 
				
			||||||
 | 
					            type="text"
 | 
				
			||||||
 | 
					            placeholder={Locale.Settings.Access.AI302.ApiKey.Placeholder}
 | 
				
			||||||
 | 
					            onChange={(e) => {
 | 
				
			||||||
 | 
					              accessStore.update(
 | 
				
			||||||
 | 
					                (access) => (access.ai302ApiKey = e.currentTarget.value),
 | 
				
			||||||
 | 
					              );
 | 
				
			||||||
 | 
					            }}
 | 
				
			||||||
 | 
					          />
 | 
				
			||||||
 | 
					        </ListItem>
 | 
				
			||||||
 | 
					      </>
 | 
				
			||||||
 | 
					  );
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  return (
 | 
					  return (
 | 
				
			||||||
    <ErrorBoundary>
 | 
					    <ErrorBoundary>
 | 
				
			||||||
      <div className="window-header" data-tauri-drag-region>
 | 
					      <div className="window-header" data-tauri-drag-region>
 | 
				
			||||||
@@ -1822,6 +1863,7 @@ export function Settings() {
 | 
				
			|||||||
                  {XAIConfigComponent}
 | 
					                  {XAIConfigComponent}
 | 
				
			||||||
                  {chatglmConfigComponent}
 | 
					                  {chatglmConfigComponent}
 | 
				
			||||||
                  {siliconflowConfigComponent}
 | 
					                  {siliconflowConfigComponent}
 | 
				
			||||||
 | 
					                  {ai302ConfigComponent}
 | 
				
			||||||
                </>
 | 
					                </>
 | 
				
			||||||
              )}
 | 
					              )}
 | 
				
			||||||
            </>
 | 
					            </>
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -88,6 +88,10 @@ declare global {
 | 
				
			|||||||
      SILICONFLOW_URL?: string;
 | 
					      SILICONFLOW_URL?: string;
 | 
				
			||||||
      SILICONFLOW_API_KEY?: string;
 | 
					      SILICONFLOW_API_KEY?: string;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      // 302.AI only
 | 
				
			||||||
 | 
					      AI302_URL?: string;
 | 
				
			||||||
 | 
					      AI302_API_KEY?: string;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      // custom template for preprocessing user input
 | 
					      // custom template for preprocessing user input
 | 
				
			||||||
      DEFAULT_INPUT_TEMPLATE?: string;
 | 
					      DEFAULT_INPUT_TEMPLATE?: string;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -163,6 +167,7 @@ export const getServerSideConfig = () => {
 | 
				
			|||||||
  const isXAI = !!process.env.XAI_API_KEY;
 | 
					  const isXAI = !!process.env.XAI_API_KEY;
 | 
				
			||||||
  const isChatGLM = !!process.env.CHATGLM_API_KEY;
 | 
					  const isChatGLM = !!process.env.CHATGLM_API_KEY;
 | 
				
			||||||
  const isSiliconFlow = !!process.env.SILICONFLOW_API_KEY;
 | 
					  const isSiliconFlow = !!process.env.SILICONFLOW_API_KEY;
 | 
				
			||||||
 | 
					  const isAI302 = !!process.env.AI302_API_KEY;
 | 
				
			||||||
  // const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
 | 
					  // const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
 | 
				
			||||||
  // const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
 | 
					  // const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
 | 
				
			||||||
  // const randomIndex = Math.floor(Math.random() * apiKeys.length);
 | 
					  // const randomIndex = Math.floor(Math.random() * apiKeys.length);
 | 
				
			||||||
@@ -246,6 +251,10 @@ export const getServerSideConfig = () => {
 | 
				
			|||||||
    siliconFlowUrl: process.env.SILICONFLOW_URL,
 | 
					    siliconFlowUrl: process.env.SILICONFLOW_URL,
 | 
				
			||||||
    siliconFlowApiKey: getApiKey(process.env.SILICONFLOW_API_KEY),
 | 
					    siliconFlowApiKey: getApiKey(process.env.SILICONFLOW_API_KEY),
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    isAI302,
 | 
				
			||||||
 | 
					    ai302Url: process.env.AI302_URL,
 | 
				
			||||||
 | 
					    ai302ApiKey: getApiKey(process.env.AI302_API_KEY),
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    gtmId: process.env.GTM_ID,
 | 
					    gtmId: process.env.GTM_ID,
 | 
				
			||||||
    gaId: process.env.GA_ID || DEFAULT_GA_ID,
 | 
					    gaId: process.env.GA_ID || DEFAULT_GA_ID,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -36,6 +36,8 @@ export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
export const SILICONFLOW_BASE_URL = "https://api.siliconflow.cn";
 | 
					export const SILICONFLOW_BASE_URL = "https://api.siliconflow.cn";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					export const AI302_BASE_URL = "https://api.302.ai";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const CACHE_URL_PREFIX = "/api/cache";
 | 
					export const CACHE_URL_PREFIX = "/api/cache";
 | 
				
			||||||
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
 | 
					export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@@ -72,6 +74,7 @@ export enum ApiPath {
 | 
				
			|||||||
  ChatGLM = "/api/chatglm",
 | 
					  ChatGLM = "/api/chatglm",
 | 
				
			||||||
  DeepSeek = "/api/deepseek",
 | 
					  DeepSeek = "/api/deepseek",
 | 
				
			||||||
  SiliconFlow = "/api/siliconflow",
 | 
					  SiliconFlow = "/api/siliconflow",
 | 
				
			||||||
 | 
					  "302.AI" = "/api/302ai",
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export enum SlotID {
 | 
					export enum SlotID {
 | 
				
			||||||
@@ -130,6 +133,7 @@ export enum ServiceProvider {
 | 
				
			|||||||
  ChatGLM = "ChatGLM",
 | 
					  ChatGLM = "ChatGLM",
 | 
				
			||||||
  DeepSeek = "DeepSeek",
 | 
					  DeepSeek = "DeepSeek",
 | 
				
			||||||
  SiliconFlow = "SiliconFlow",
 | 
					  SiliconFlow = "SiliconFlow",
 | 
				
			||||||
 | 
					  "302.AI" = "302.AI",
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
 | 
					// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
 | 
				
			||||||
@@ -156,6 +160,7 @@ export enum ModelProvider {
 | 
				
			|||||||
  ChatGLM = "ChatGLM",
 | 
					  ChatGLM = "ChatGLM",
 | 
				
			||||||
  DeepSeek = "DeepSeek",
 | 
					  DeepSeek = "DeepSeek",
 | 
				
			||||||
  SiliconFlow = "SiliconFlow",
 | 
					  SiliconFlow = "SiliconFlow",
 | 
				
			||||||
 | 
					  "302.AI" = "302.AI",
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const Stability = {
 | 
					export const Stability = {
 | 
				
			||||||
@@ -266,6 +271,13 @@ export const SiliconFlow = {
 | 
				
			|||||||
  ListModelPath: "v1/models?&sub_type=chat",
 | 
					  ListModelPath: "v1/models?&sub_type=chat",
 | 
				
			||||||
};
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					export const AI302 = {
 | 
				
			||||||
 | 
					  ExampleEndpoint: AI302_BASE_URL,
 | 
				
			||||||
 | 
					  ChatPath: "v1/chat/completions",
 | 
				
			||||||
 | 
					  EmbeddingsPath: "jina/v1/embeddings",
 | 
				
			||||||
 | 
					  ListModelPath: "v1/models?llm=1",
 | 
				
			||||||
 | 
					};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
 | 
					export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
 | 
				
			||||||
// export const DEFAULT_SYSTEM_TEMPLATE = `
 | 
					// export const DEFAULT_SYSTEM_TEMPLATE = `
 | 
				
			||||||
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
 | 
					// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
 | 
				
			||||||
@@ -417,6 +429,14 @@ export const KnowledgeCutOffDate: Record<string, string> = {
 | 
				
			|||||||
  "gpt-4-turbo": "2023-12",
 | 
					  "gpt-4-turbo": "2023-12",
 | 
				
			||||||
  "gpt-4-turbo-2024-04-09": "2023-12",
 | 
					  "gpt-4-turbo-2024-04-09": "2023-12",
 | 
				
			||||||
  "gpt-4-turbo-preview": "2023-12",
 | 
					  "gpt-4-turbo-preview": "2023-12",
 | 
				
			||||||
 | 
					  "gpt-4.1": "2024-06",
 | 
				
			||||||
 | 
					  "gpt-4.1-2025-04-14": "2024-06",
 | 
				
			||||||
 | 
					  "gpt-4.1-mini": "2024-06",
 | 
				
			||||||
 | 
					  "gpt-4.1-mini-2025-04-14": "2024-06",
 | 
				
			||||||
 | 
					  "gpt-4.1-nano": "2024-06",
 | 
				
			||||||
 | 
					  "gpt-4.1-nano-2025-04-14": "2024-06",
 | 
				
			||||||
 | 
					  "gpt-4.5-preview": "2023-10",
 | 
				
			||||||
 | 
					  "gpt-4.5-preview-2025-02-27": "2023-10",
 | 
				
			||||||
  "gpt-4o": "2023-10",
 | 
					  "gpt-4o": "2023-10",
 | 
				
			||||||
  "gpt-4o-2024-05-13": "2023-10",
 | 
					  "gpt-4o-2024-05-13": "2023-10",
 | 
				
			||||||
  "gpt-4o-2024-08-06": "2023-10",
 | 
					  "gpt-4o-2024-08-06": "2023-10",
 | 
				
			||||||
@@ -458,6 +478,7 @@ export const DEFAULT_TTS_VOICES = [
 | 
				
			|||||||
export const VISION_MODEL_REGEXES = [
 | 
					export const VISION_MODEL_REGEXES = [
 | 
				
			||||||
  /vision/,
 | 
					  /vision/,
 | 
				
			||||||
  /gpt-4o/,
 | 
					  /gpt-4o/,
 | 
				
			||||||
 | 
					  /gpt-4\.1/,
 | 
				
			||||||
  /claude-3/,
 | 
					  /claude-3/,
 | 
				
			||||||
  /gemini-1\.5/,
 | 
					  /gemini-1\.5/,
 | 
				
			||||||
  /gemini-exp/,
 | 
					  /gemini-exp/,
 | 
				
			||||||
@@ -469,6 +490,8 @@ export const VISION_MODEL_REGEXES = [
 | 
				
			|||||||
  /^dall-e-3$/, // Matches exactly "dall-e-3"
 | 
					  /^dall-e-3$/, // Matches exactly "dall-e-3"
 | 
				
			||||||
  /glm-4v/,
 | 
					  /glm-4v/,
 | 
				
			||||||
  /vl/i,
 | 
					  /vl/i,
 | 
				
			||||||
 | 
					  /o3/,
 | 
				
			||||||
 | 
					  /o4-mini/,
 | 
				
			||||||
];
 | 
					];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
 | 
					export const EXCLUDE_VISION_MODEL_REGEXES = [/claude-3-5-haiku-20241022/];
 | 
				
			||||||
@@ -485,6 +508,14 @@ const openaiModels = [
 | 
				
			|||||||
  "gpt-4-32k-0613",
 | 
					  "gpt-4-32k-0613",
 | 
				
			||||||
  "gpt-4-turbo",
 | 
					  "gpt-4-turbo",
 | 
				
			||||||
  "gpt-4-turbo-preview",
 | 
					  "gpt-4-turbo-preview",
 | 
				
			||||||
 | 
					  "gpt-4.1",
 | 
				
			||||||
 | 
					  "gpt-4.1-2025-04-14",
 | 
				
			||||||
 | 
					  "gpt-4.1-mini",
 | 
				
			||||||
 | 
					  "gpt-4.1-mini-2025-04-14",
 | 
				
			||||||
 | 
					  "gpt-4.1-nano",
 | 
				
			||||||
 | 
					  "gpt-4.1-nano-2025-04-14",
 | 
				
			||||||
 | 
					  "gpt-4.5-preview",
 | 
				
			||||||
 | 
					  "gpt-4.5-preview-2025-02-27",
 | 
				
			||||||
  "gpt-4o",
 | 
					  "gpt-4o",
 | 
				
			||||||
  "gpt-4o-2024-05-13",
 | 
					  "gpt-4o-2024-05-13",
 | 
				
			||||||
  "gpt-4o-2024-08-06",
 | 
					  "gpt-4o-2024-08-06",
 | 
				
			||||||
@@ -499,23 +530,20 @@ const openaiModels = [
 | 
				
			|||||||
  "o1-mini",
 | 
					  "o1-mini",
 | 
				
			||||||
  "o1-preview",
 | 
					  "o1-preview",
 | 
				
			||||||
  "o3-mini",
 | 
					  "o3-mini",
 | 
				
			||||||
 | 
					  "o3",
 | 
				
			||||||
 | 
					  "o4-mini",
 | 
				
			||||||
];
 | 
					];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const googleModels = [
 | 
					const googleModels = [
 | 
				
			||||||
  "gemini-1.0-pro", // Deprecated on 2/15/2025
 | 
					 | 
				
			||||||
  "gemini-1.5-pro-latest",
 | 
					  "gemini-1.5-pro-latest",
 | 
				
			||||||
  "gemini-1.5-pro",
 | 
					  "gemini-1.5-pro",
 | 
				
			||||||
  "gemini-1.5-pro-002",
 | 
					  "gemini-1.5-pro-002",
 | 
				
			||||||
  "gemini-1.5-pro-exp-0827",
 | 
					 | 
				
			||||||
  "gemini-1.5-flash-latest",
 | 
					  "gemini-1.5-flash-latest",
 | 
				
			||||||
  "gemini-1.5-flash-8b-latest",
 | 
					  "gemini-1.5-flash-8b-latest",
 | 
				
			||||||
  "gemini-1.5-flash",
 | 
					  "gemini-1.5-flash",
 | 
				
			||||||
  "gemini-1.5-flash-8b",
 | 
					  "gemini-1.5-flash-8b",
 | 
				
			||||||
  "gemini-1.5-flash-002",
 | 
					  "gemini-1.5-flash-002",
 | 
				
			||||||
  "gemini-1.5-flash-exp-0827",
 | 
					 | 
				
			||||||
  "learnlm-1.5-pro-experimental",
 | 
					  "learnlm-1.5-pro-experimental",
 | 
				
			||||||
  "gemini-exp-1114",
 | 
					 | 
				
			||||||
  "gemini-exp-1121",
 | 
					 | 
				
			||||||
  "gemini-exp-1206",
 | 
					  "gemini-exp-1206",
 | 
				
			||||||
  "gemini-2.0-flash",
 | 
					  "gemini-2.0-flash",
 | 
				
			||||||
  "gemini-2.0-flash-exp",
 | 
					  "gemini-2.0-flash-exp",
 | 
				
			||||||
@@ -525,6 +553,7 @@ const googleModels = [
 | 
				
			|||||||
  "gemini-2.0-flash-thinking-exp-01-21",
 | 
					  "gemini-2.0-flash-thinking-exp-01-21",
 | 
				
			||||||
  "gemini-2.0-pro-exp",
 | 
					  "gemini-2.0-pro-exp",
 | 
				
			||||||
  "gemini-2.0-pro-exp-02-05",
 | 
					  "gemini-2.0-pro-exp-02-05",
 | 
				
			||||||
 | 
					  "gemini-2.5-pro-preview-06-05",
 | 
				
			||||||
];
 | 
					];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const anthropicModels = [
 | 
					const anthropicModels = [
 | 
				
			||||||
@@ -611,6 +640,18 @@ const xAIModes = [
 | 
				
			|||||||
  "grok-2-vision-1212",
 | 
					  "grok-2-vision-1212",
 | 
				
			||||||
  "grok-2-vision",
 | 
					  "grok-2-vision",
 | 
				
			||||||
  "grok-2-vision-latest",
 | 
					  "grok-2-vision-latest",
 | 
				
			||||||
 | 
					  "grok-3-mini-fast-beta",
 | 
				
			||||||
 | 
					  "grok-3-mini-fast",
 | 
				
			||||||
 | 
					  "grok-3-mini-fast-latest",
 | 
				
			||||||
 | 
					  "grok-3-mini-beta",
 | 
				
			||||||
 | 
					  "grok-3-mini",
 | 
				
			||||||
 | 
					  "grok-3-mini-latest",
 | 
				
			||||||
 | 
					  "grok-3-fast-beta",
 | 
				
			||||||
 | 
					  "grok-3-fast",
 | 
				
			||||||
 | 
					  "grok-3-fast-latest",
 | 
				
			||||||
 | 
					  "grok-3-beta",
 | 
				
			||||||
 | 
					  "grok-3",
 | 
				
			||||||
 | 
					  "grok-3-latest",
 | 
				
			||||||
];
 | 
					];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const chatglmModels = [
 | 
					const chatglmModels = [
 | 
				
			||||||
@@ -650,6 +691,31 @@ const siliconflowModels = [
 | 
				
			|||||||
  "Pro/deepseek-ai/DeepSeek-V3",
 | 
					  "Pro/deepseek-ai/DeepSeek-V3",
 | 
				
			||||||
];
 | 
					];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					const ai302Models = [
 | 
				
			||||||
 | 
					  "deepseek-chat",
 | 
				
			||||||
 | 
					  "gpt-4o",
 | 
				
			||||||
 | 
					  "chatgpt-4o-latest",
 | 
				
			||||||
 | 
					  "llama3.3-70b",
 | 
				
			||||||
 | 
					  "deepseek-reasoner",
 | 
				
			||||||
 | 
					  "gemini-2.0-flash",
 | 
				
			||||||
 | 
					  "claude-3-7-sonnet-20250219",
 | 
				
			||||||
 | 
					  "claude-3-7-sonnet-latest",
 | 
				
			||||||
 | 
					  "grok-3-beta",
 | 
				
			||||||
 | 
					  "grok-3-mini-beta",
 | 
				
			||||||
 | 
					  "gpt-4.1",
 | 
				
			||||||
 | 
					  "gpt-4.1-mini",
 | 
				
			||||||
 | 
					  "o3",
 | 
				
			||||||
 | 
					  "o4-mini",
 | 
				
			||||||
 | 
					  "qwen3-235b-a22b",
 | 
				
			||||||
 | 
					  "qwen3-32b",
 | 
				
			||||||
 | 
					  "gemini-2.5-pro-preview-05-06",
 | 
				
			||||||
 | 
					  "llama-4-maverick",
 | 
				
			||||||
 | 
					  "gemini-2.5-flash",
 | 
				
			||||||
 | 
					  "claude-sonnet-4-20250514",
 | 
				
			||||||
 | 
					  "claude-opus-4-20250514",
 | 
				
			||||||
 | 
					  "gemini-2.5-pro",
 | 
				
			||||||
 | 
					];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
let seq = 1000; // 内置的模型序号生成器从1000开始
 | 
					let seq = 1000; // 内置的模型序号生成器从1000开始
 | 
				
			||||||
export const DEFAULT_MODELS = [
 | 
					export const DEFAULT_MODELS = [
 | 
				
			||||||
  ...openaiModels.map((name) => ({
 | 
					  ...openaiModels.map((name) => ({
 | 
				
			||||||
@@ -806,6 +872,17 @@ export const DEFAULT_MODELS = [
 | 
				
			|||||||
      sorted: 14,
 | 
					      sorted: 14,
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
  })),
 | 
					  })),
 | 
				
			||||||
 | 
					  ...ai302Models.map((name) => ({
 | 
				
			||||||
 | 
					    name,
 | 
				
			||||||
 | 
					    available: true,
 | 
				
			||||||
 | 
					    sorted: seq++,
 | 
				
			||||||
 | 
					    provider: {
 | 
				
			||||||
 | 
					      id: "ai302",
 | 
				
			||||||
 | 
					      providerName: "302.AI",
 | 
				
			||||||
 | 
					      providerType: "ai302",
 | 
				
			||||||
 | 
					      sorted: 15,
 | 
				
			||||||
 | 
					    },
 | 
				
			||||||
 | 
					  })),
 | 
				
			||||||
] as const;
 | 
					] as const;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export const CHAT_PAGE_SIZE = 15;
 | 
					export const CHAT_PAGE_SIZE = 15;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -416,6 +416,17 @@ const ar: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "مثال:",
 | 
					          SubTitle: "مثال:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "مفتاح 302.AI API",
 | 
				
			||||||
 | 
					          SubTitle: "استخدم مفتاح 302.AI API مخصص",
 | 
				
			||||||
 | 
					          Placeholder: "مفتاح 302.AI API",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "عنوان الواجهة",
 | 
				
			||||||
 | 
					          SubTitle: "مثال:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "اسم النموذج المخصص",
 | 
					        Title: "اسم النموذج المخصص",
 | 
				
			||||||
        SubTitle: "أضف خيارات نموذج مخصص، مفصولة بفواصل إنجليزية",
 | 
					        SubTitle: "أضف خيارات نموذج مخصص، مفصولة بفواصل إنجليزية",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -423,6 +423,17 @@ const bn: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "উদাহরণ:",
 | 
					          SubTitle: "উদাহরণ:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "ইন্টারফেস কী",
 | 
				
			||||||
 | 
					          SubTitle: "স্বনির্ধারিত 302.AI API কী ব্যবহার করুন",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API কী",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "ইন্টারফেস ঠিকানা",
 | 
				
			||||||
 | 
					          SubTitle: "উদাহরণ:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "স্বনির্ধারিত মডেল নাম",
 | 
					        Title: "স্বনির্ধারিত মডেল নাম",
 | 
				
			||||||
        SubTitle:
 | 
					        SubTitle:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -538,6 +538,17 @@ const cn = {
 | 
				
			|||||||
        Title: "自定义模型名",
 | 
					        Title: "自定义模型名",
 | 
				
			||||||
        SubTitle: "增加自定义模型可选项,使用英文逗号隔开",
 | 
					        SubTitle: "增加自定义模型可选项,使用英文逗号隔开",
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "接口密钥",
 | 
				
			||||||
 | 
					          SubTitle: "使用自定义302.AI API Key",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API Key",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "接口地址",
 | 
				
			||||||
 | 
					          SubTitle: "样例:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Model: "模型 (model)",
 | 
					    Model: "模型 (model)",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -423,6 +423,17 @@ const cs: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Příklad:",
 | 
					          SubTitle: "Příklad:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Rozhraní klíč",
 | 
				
			||||||
 | 
					          SubTitle: "Použijte vlastní 302.AI API Key",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API Key",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Adresa rozhraní",
 | 
				
			||||||
 | 
					          SubTitle: "Příklad:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Vlastní názvy modelů",
 | 
					        Title: "Vlastní názvy modelů",
 | 
				
			||||||
        SubTitle: "Přidejte možnosti vlastních modelů, oddělené čárkami",
 | 
					        SubTitle: "Přidejte možnosti vlastních modelů, oddělené čárkami",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -517,6 +517,17 @@ const da: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Vælg et niveau for indholdskontrol",
 | 
					          SubTitle: "Vælg et niveau for indholdskontrol",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "302.AI API Key",
 | 
				
			||||||
 | 
					          SubTitle: "Brug en custom 302.AI API Key",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API Key",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Endpoint-adresse",
 | 
				
			||||||
 | 
					          SubTitle: "Eksempel: ",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
    Model: "Model",
 | 
					    Model: "Model",
 | 
				
			||||||
    CompressModel: {
 | 
					    CompressModel: {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -434,6 +434,17 @@ const de: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Beispiel:",
 | 
					          SubTitle: "Beispiel:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Schnittstellenschlüssel",
 | 
				
			||||||
 | 
					          SubTitle: "Verwenden Sie einen benutzerdefinierten 302.AI API-Schlüssel",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API-Schlüssel",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Endpunktadresse",
 | 
				
			||||||
 | 
					          SubTitle: "Beispiel:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Benutzerdefinierter Modellname",
 | 
					        Title: "Benutzerdefinierter Modellname",
 | 
				
			||||||
        SubTitle:
 | 
					        SubTitle:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -543,6 +543,17 @@ const en: LocaleType = {
 | 
				
			|||||||
          SubTitle: "Select a safety filtering level",
 | 
					          SubTitle: "Select a safety filtering level",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "302.AI API Key",
 | 
				
			||||||
 | 
					          SubTitle: "Use a custom 302.AI API Key",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API Key",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Endpoint Address",
 | 
				
			||||||
 | 
					          SubTitle: "Example: ",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Model: "Model",
 | 
					    Model: "Model",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -436,6 +436,17 @@ const es: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Ejemplo:",
 | 
					          SubTitle: "Ejemplo:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Clave de interfaz",
 | 
				
			||||||
 | 
					          SubTitle: "Usa una clave API de 302.AI personalizada",
 | 
				
			||||||
 | 
					          Placeholder: "Clave API de 302.AI",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Dirección del endpoint",
 | 
				
			||||||
 | 
					          SubTitle: "Ejemplo:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Nombre del modelo personalizado",
 | 
					        Title: "Nombre del modelo personalizado",
 | 
				
			||||||
        SubTitle:
 | 
					        SubTitle:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -435,6 +435,17 @@ const fr: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Exemple :",
 | 
					          SubTitle: "Exemple :",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Clé d'interface",
 | 
				
			||||||
 | 
					          SubTitle: "Utiliser une clé API 302.AI personnalisée",
 | 
				
			||||||
 | 
					          Placeholder: "Clé API 302.AI",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Adresse de l'endpoint",
 | 
				
			||||||
 | 
					          SubTitle: "Exemple :",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Nom du modèle personnalisé",
 | 
					        Title: "Nom du modèle personnalisé",
 | 
				
			||||||
        SubTitle:
 | 
					        SubTitle:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -424,6 +424,17 @@ const id: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Contoh:",
 | 
					          SubTitle: "Contoh:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Kunci Antarmuka",
 | 
				
			||||||
 | 
					          SubTitle: "Gunakan 302.AI API Key kustom",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API Key",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Alamat Antarmuka",
 | 
				
			||||||
 | 
					          SubTitle: "Contoh:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Nama Model Kustom",
 | 
					        Title: "Nama Model Kustom",
 | 
				
			||||||
        SubTitle: "Tambahkan opsi model kustom, pisahkan dengan koma",
 | 
					        SubTitle: "Tambahkan opsi model kustom, pisahkan dengan koma",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -436,6 +436,17 @@ const it: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Esempio:",
 | 
					          SubTitle: "Esempio:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Chiave dell'interfaccia",
 | 
				
			||||||
 | 
					          SubTitle: "Utilizza una chiave API 302.AI personalizzata",
 | 
				
			||||||
 | 
					          Placeholder: "Chiave API 302.AI",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Indirizzo dell'interfaccia",
 | 
				
			||||||
 | 
					          SubTitle: "Esempio:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Nome del modello personalizzato",
 | 
					        Title: "Nome del modello personalizzato",
 | 
				
			||||||
        SubTitle:
 | 
					        SubTitle:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -420,6 +420,17 @@ const jp: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "例:",
 | 
					          SubTitle: "例:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "APIキー",
 | 
				
			||||||
 | 
					          SubTitle: "カスタム302.AI APIキーを使用",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI APIキー",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "エンドポイント",
 | 
				
			||||||
 | 
					          SubTitle: "例:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "カスタムモデル名",
 | 
					        Title: "カスタムモデル名",
 | 
				
			||||||
        SubTitle: "カスタムモデルの選択肢を追加、英語のカンマで区切る",
 | 
					        SubTitle: "カスタムモデルの選択肢を追加、英語のカンマで区切る",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -421,6 +421,17 @@ const ko: PartialLocaleType = {
 | 
				
			|||||||
        Title: "커스텀 모델 이름",
 | 
					        Title: "커스텀 모델 이름",
 | 
				
			||||||
        SubTitle: "커스텀 모델 옵션 추가, 영어 쉼표로 구분",
 | 
					        SubTitle: "커스텀 모델 옵션 추가, 영어 쉼표로 구분",
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "엔드포인트 키",
 | 
				
			||||||
 | 
					          SubTitle: "커스텀 302.AI API 키 사용",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API 키",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "엔드포인트 주소",
 | 
				
			||||||
 | 
					          SubTitle: "예: ",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Model: "모델 (model)",
 | 
					    Model: "모델 (model)",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -433,6 +433,17 @@ const no: PartialLocaleType = {
 | 
				
			|||||||
        Title: "Egendefinert modellnavn",
 | 
					        Title: "Egendefinert modellnavn",
 | 
				
			||||||
        SubTitle: "Legg til egendefinerte modellalternativer, skill med komma",
 | 
					        SubTitle: "Legg til egendefinerte modellalternativer, skill med komma",
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "API-nøkkel",
 | 
				
			||||||
 | 
					          SubTitle: "Bruk egendefinert 302.AI API-nøkkel",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API-nøkkel",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "API-adresse",
 | 
				
			||||||
 | 
					          SubTitle: "Eksempel:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Model: "Modell",
 | 
					    Model: "Modell",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -359,6 +359,17 @@ const pt: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Verifique sua versão API do console Anthropic",
 | 
					          SubTitle: "Verifique sua versão API do console Anthropic",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Chave API 302.AI",
 | 
				
			||||||
 | 
					          SubTitle: "Use uma chave API 302.AI personalizada",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API Key",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Endpoint Address",
 | 
				
			||||||
 | 
					          SubTitle: "Exemplo: ",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Modelos Personalizados",
 | 
					        Title: "Modelos Personalizados",
 | 
				
			||||||
        SubTitle: "Opções de modelo personalizado, separados por vírgula",
 | 
					        SubTitle: "Opções de modelo personalizado, separados por vírgula",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -426,6 +426,17 @@ const ru: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Пример:",
 | 
					          SubTitle: "Пример:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Ключ интерфейса",
 | 
				
			||||||
 | 
					          SubTitle: "Использовать пользовательский 302.AI API-ключ",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API-ключ",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Адрес интерфейса",
 | 
				
			||||||
 | 
					          SubTitle: "Пример:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Название пользовательской модели",
 | 
					        Title: "Название пользовательской модели",
 | 
				
			||||||
        SubTitle:
 | 
					        SubTitle:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -381,6 +381,17 @@ const sk: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Vyberte špecifickú verziu časti",
 | 
					          SubTitle: "Vyberte špecifickú verziu časti",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "API kľúč",
 | 
				
			||||||
 | 
					          SubTitle: "Použiť vlastný API kľúč 302.AI",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API kľúč",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Adresa koncového bodu",
 | 
				
			||||||
 | 
					          SubTitle: "Príklad:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Model: "Model",
 | 
					    Model: "Model",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -426,6 +426,17 @@ const tr: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Örnek:",
 | 
					          SubTitle: "Örnek:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "API Anahtarı",
 | 
				
			||||||
 | 
					          SubTitle: "Özelleştirilmiş 302.AI API Anahtarı kullanın",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API Anahtarı",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "API Adresi",
 | 
				
			||||||
 | 
					          SubTitle: "Örnek:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Özelleştirilmiş Model Adı",
 | 
					        Title: "Özelleştirilmiş Model Adı",
 | 
				
			||||||
        SubTitle:
 | 
					        SubTitle:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -382,6 +382,17 @@ const tw = {
 | 
				
			|||||||
          SubTitle: "選擇一個特定的 API 版本",
 | 
					          SubTitle: "選擇一個特定的 API 版本",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "API 金鑰",
 | 
				
			||||||
 | 
					          SubTitle: "使用自訂 302.AI API 金鑰",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API 金鑰",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "端點位址",
 | 
				
			||||||
 | 
					          SubTitle: "範例:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "自訂模型名稱",
 | 
					        Title: "自訂模型名稱",
 | 
				
			||||||
        SubTitle: "增加自訂模型可選擇項目,使用英文逗號隔開",
 | 
					        SubTitle: "增加自訂模型可選擇項目,使用英文逗號隔開",
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -422,6 +422,17 @@ const vi: PartialLocaleType = {
 | 
				
			|||||||
          SubTitle: "Ví dụ:",
 | 
					          SubTitle: "Ví dụ:",
 | 
				
			||||||
        },
 | 
					        },
 | 
				
			||||||
      },
 | 
					      },
 | 
				
			||||||
 | 
					      AI302: {
 | 
				
			||||||
 | 
					        ApiKey: {
 | 
				
			||||||
 | 
					          Title: "Khóa API 302.AI",
 | 
				
			||||||
 | 
					          SubTitle: "Sử dụng khóa API 302.AI tùy chỉnh",
 | 
				
			||||||
 | 
					          Placeholder: "302.AI API Key",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					        Endpoint: {
 | 
				
			||||||
 | 
					          Title: "Địa chỉ giao diện",
 | 
				
			||||||
 | 
					          SubTitle: "Ví dụ:",
 | 
				
			||||||
 | 
					        },
 | 
				
			||||||
 | 
					      },
 | 
				
			||||||
      CustomModel: {
 | 
					      CustomModel: {
 | 
				
			||||||
        Title: "Tên mô hình tùy chỉnh",
 | 
					        Title: "Tên mô hình tùy chỉnh",
 | 
				
			||||||
        SubTitle:
 | 
					        SubTitle:
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -17,6 +17,7 @@ import {
 | 
				
			|||||||
  XAI_BASE_URL,
 | 
					  XAI_BASE_URL,
 | 
				
			||||||
  CHATGLM_BASE_URL,
 | 
					  CHATGLM_BASE_URL,
 | 
				
			||||||
  SILICONFLOW_BASE_URL,
 | 
					  SILICONFLOW_BASE_URL,
 | 
				
			||||||
 | 
					  AI302_BASE_URL,
 | 
				
			||||||
} from "../constant";
 | 
					} from "../constant";
 | 
				
			||||||
import { getHeaders } from "../client/api";
 | 
					import { getHeaders } from "../client/api";
 | 
				
			||||||
import { getClientConfig } from "../config/client";
 | 
					import { getClientConfig } from "../config/client";
 | 
				
			||||||
@@ -59,6 +60,8 @@ const DEFAULT_SILICONFLOW_URL = isApp
 | 
				
			|||||||
  ? SILICONFLOW_BASE_URL
 | 
					  ? SILICONFLOW_BASE_URL
 | 
				
			||||||
  : ApiPath.SiliconFlow;
 | 
					  : ApiPath.SiliconFlow;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					const DEFAULT_AI302_URL = isApp ? AI302_BASE_URL : ApiPath["302.AI"];
 | 
				
			||||||
 | 
					
 | 
				
			||||||
const DEFAULT_ACCESS_STATE = {
 | 
					const DEFAULT_ACCESS_STATE = {
 | 
				
			||||||
  accessCode: "",
 | 
					  accessCode: "",
 | 
				
			||||||
  useCustomConfig: false,
 | 
					  useCustomConfig: false,
 | 
				
			||||||
@@ -132,6 +135,10 @@ const DEFAULT_ACCESS_STATE = {
 | 
				
			|||||||
  siliconflowUrl: DEFAULT_SILICONFLOW_URL,
 | 
					  siliconflowUrl: DEFAULT_SILICONFLOW_URL,
 | 
				
			||||||
  siliconflowApiKey: "",
 | 
					  siliconflowApiKey: "",
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  // 302.AI
 | 
				
			||||||
 | 
					  ai302Url: DEFAULT_AI302_URL,
 | 
				
			||||||
 | 
					  ai302ApiKey: "",
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  // server config
 | 
					  // server config
 | 
				
			||||||
  needCode: true,
 | 
					  needCode: true,
 | 
				
			||||||
  hideUserApiKey: false,
 | 
					  hideUserApiKey: false,
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -99,7 +99,6 @@
 | 
				
			|||||||
  font-size: 14px;
 | 
					  font-size: 14px;
 | 
				
			||||||
  line-height: 1.5;
 | 
					  line-height: 1.5;
 | 
				
			||||||
  word-wrap: break-word;
 | 
					  word-wrap: break-word;
 | 
				
			||||||
  margin-bottom: 0;
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
.light {
 | 
					.light {
 | 
				
			||||||
@@ -359,14 +358,8 @@
 | 
				
			|||||||
.markdown-body kbd {
 | 
					.markdown-body kbd {
 | 
				
			||||||
  display: inline-block;
 | 
					  display: inline-block;
 | 
				
			||||||
  padding: 3px 5px;
 | 
					  padding: 3px 5px;
 | 
				
			||||||
  font:
 | 
					  font: 11px ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas,
 | 
				
			||||||
    11px ui-monospace,
 | 
					    Liberation Mono, monospace;
 | 
				
			||||||
    SFMono-Regular,
 | 
					 | 
				
			||||||
    SF Mono,
 | 
					 | 
				
			||||||
    Menlo,
 | 
					 | 
				
			||||||
    Consolas,
 | 
					 | 
				
			||||||
    Liberation Mono,
 | 
					 | 
				
			||||||
    monospace;
 | 
					 | 
				
			||||||
  line-height: 10px;
 | 
					  line-height: 10px;
 | 
				
			||||||
  color: var(--color-fg-default);
 | 
					  color: var(--color-fg-default);
 | 
				
			||||||
  vertical-align: middle;
 | 
					  vertical-align: middle;
 | 
				
			||||||
@@ -455,28 +448,16 @@
 | 
				
			|||||||
.markdown-body tt,
 | 
					.markdown-body tt,
 | 
				
			||||||
.markdown-body code,
 | 
					.markdown-body code,
 | 
				
			||||||
.markdown-body samp {
 | 
					.markdown-body samp {
 | 
				
			||||||
  font-family:
 | 
					  font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas,
 | 
				
			||||||
    ui-monospace,
 | 
					    Liberation Mono, monospace;
 | 
				
			||||||
    SFMono-Regular,
 | 
					 | 
				
			||||||
    SF Mono,
 | 
					 | 
				
			||||||
    Menlo,
 | 
					 | 
				
			||||||
    Consolas,
 | 
					 | 
				
			||||||
    Liberation Mono,
 | 
					 | 
				
			||||||
    monospace;
 | 
					 | 
				
			||||||
  font-size: 12px;
 | 
					  font-size: 12px;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
.markdown-body pre {
 | 
					.markdown-body pre {
 | 
				
			||||||
  margin-top: 0;
 | 
					  margin-top: 0;
 | 
				
			||||||
  margin-bottom: 0;
 | 
					  margin-bottom: 0;
 | 
				
			||||||
  font-family:
 | 
					  font-family: ui-monospace, SFMono-Regular, SF Mono, Menlo, Consolas,
 | 
				
			||||||
    ui-monospace,
 | 
					    Liberation Mono, monospace;
 | 
				
			||||||
    SFMono-Regular,
 | 
					 | 
				
			||||||
    SF Mono,
 | 
					 | 
				
			||||||
    Menlo,
 | 
					 | 
				
			||||||
    Consolas,
 | 
					 | 
				
			||||||
    Liberation Mono,
 | 
					 | 
				
			||||||
    monospace;
 | 
					 | 
				
			||||||
  font-size: 12px;
 | 
					  font-size: 12px;
 | 
				
			||||||
  word-wrap: normal;
 | 
					  word-wrap: normal;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@@ -1149,87 +1130,3 @@
 | 
				
			|||||||
#dmermaid {
 | 
					#dmermaid {
 | 
				
			||||||
  display: none;
 | 
					  display: none;
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					 | 
				
			||||||
.markdown-content {
 | 
					 | 
				
			||||||
  width: 100%;
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.markdown-paragraph {
 | 
					 | 
				
			||||||
  transition: opacity 0.3s ease;
 | 
					 | 
				
			||||||
  margin-bottom: 0.5em;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  &.markdown-paragraph-visible {
 | 
					 | 
				
			||||||
    opacity: 1;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  &.markdown-paragraph-hidden {
 | 
					 | 
				
			||||||
    opacity: 0.7;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.markdown-paragraph-placeholder {
 | 
					 | 
				
			||||||
  padding: 8px;
 | 
					 | 
				
			||||||
  color: var(--color-fg-subtle);
 | 
					 | 
				
			||||||
  background-color: var(--color-canvas-subtle);
 | 
					 | 
				
			||||||
  border-radius: 6px;
 | 
					 | 
				
			||||||
  border-left: 3px solid var(--color-border-muted);
 | 
					 | 
				
			||||||
  white-space: nowrap;
 | 
					 | 
				
			||||||
  overflow: hidden;
 | 
					 | 
				
			||||||
  text-overflow: ellipsis;
 | 
					 | 
				
			||||||
  font-family: var(--font-family-sans);
 | 
					 | 
				
			||||||
  font-size: 14px;
 | 
					 | 
				
			||||||
  min-height: 1.2em;
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.markdown-paragraph-loading {
 | 
					 | 
				
			||||||
  height: 20px;
 | 
					 | 
				
			||||||
  background-color: var(--color-canvas-subtle);
 | 
					 | 
				
			||||||
  border-radius: 6px;
 | 
					 | 
				
			||||||
  margin-bottom: 8px;
 | 
					 | 
				
			||||||
  position: relative;
 | 
					 | 
				
			||||||
  overflow: hidden;
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
  &::after {
 | 
					 | 
				
			||||||
    content: "";
 | 
					 | 
				
			||||||
    position: absolute;
 | 
					 | 
				
			||||||
    top: 0;
 | 
					 | 
				
			||||||
    left: 0;
 | 
					 | 
				
			||||||
    width: 30%;
 | 
					 | 
				
			||||||
    height: 100%;
 | 
					 | 
				
			||||||
    background: linear-gradient(
 | 
					 | 
				
			||||||
      90deg,
 | 
					 | 
				
			||||||
      transparent,
 | 
					 | 
				
			||||||
      rgba(255, 255, 255, 0.1),
 | 
					 | 
				
			||||||
      transparent
 | 
					 | 
				
			||||||
    );
 | 
					 | 
				
			||||||
    animation: shimmer 1.5s infinite;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@keyframes shimmer {
 | 
					 | 
				
			||||||
  0% {
 | 
					 | 
				
			||||||
    transform: translateX(-100%);
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
  100% {
 | 
					 | 
				
			||||||
    transform: translateX(200%);
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.markdown-streaming-content {
 | 
					 | 
				
			||||||
  width: 100%;
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.markdown-streaming-paragraph {
 | 
					 | 
				
			||||||
  opacity: 1;
 | 
					 | 
				
			||||||
  animation: fadeIn 0.3s ease-in-out;
 | 
					 | 
				
			||||||
  margin-bottom: 0.5em;
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@keyframes fadeIn {
 | 
					 | 
				
			||||||
  from {
 | 
					 | 
				
			||||||
    opacity: 0.5;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
  to {
 | 
					 | 
				
			||||||
    opacity: 1;
 | 
					 | 
				
			||||||
  }
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 
 | 
				
			|||||||
@@ -81,7 +81,7 @@
 | 
				
			|||||||
    "eslint-plugin-unused-imports": "^3.2.0",
 | 
					    "eslint-plugin-unused-imports": "^3.2.0",
 | 
				
			||||||
    "husky": "^8.0.0",
 | 
					    "husky": "^8.0.0",
 | 
				
			||||||
    "jest": "^29.7.0",
 | 
					    "jest": "^29.7.0",
 | 
				
			||||||
    "jest-environment-jsdom": "^29.7.0",
 | 
					    "jest-environment-jsdom": "^30.0.2",
 | 
				
			||||||
    "lint-staged": "^13.2.2",
 | 
					    "lint-staged": "^13.2.2",
 | 
				
			||||||
    "prettier": "^3.0.2",
 | 
					    "prettier": "^3.0.2",
 | 
				
			||||||
    "ts-node": "^10.9.2",
 | 
					    "ts-node": "^10.9.2",
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user