mirror of
				https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
				synced 2025-11-04 16:23:41 +08:00 
			
		
		
		
	feat: add 302.AI provider
This commit is contained in:
		@@ -81,3 +81,9 @@ SILICONFLOW_API_KEY=
 | 
			
		||||
 | 
			
		||||
### siliconflow Api url (optional)
 | 
			
		||||
SILICONFLOW_URL=
 | 
			
		||||
 | 
			
		||||
### 302.AI Api key (optional)
 | 
			
		||||
AI302_API_KEY=
 | 
			
		||||
 | 
			
		||||
### 302.AI Api url (optional)
 | 
			
		||||
AI302_URL=
 | 
			
		||||
 
 | 
			
		||||
@@ -22,12 +22,12 @@ English / [简体中文](./README_CN.md)
 | 
			
		||||
[![MacOS][MacOS-image]][download-url]
 | 
			
		||||
[![Linux][Linux-image]][download-url]
 | 
			
		||||
 | 
			
		||||
[NextChatAI](https://nextchat.club?utm_source=readme) / [iOS APP](https://apps.apple.com/us/app/nextchat-ai/id6743085599) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Enterprise Edition](#enterprise-edition) 
 | 
			
		||||
[NextChatAI](https://nextchat.club?utm_source=readme) / [iOS APP](https://apps.apple.com/us/app/nextchat-ai/id6743085599) / [Web App Demo](https://app.nextchat.club) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Enterprise Edition](#enterprise-edition) 
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
[saas-url]: https://nextchat.club?utm_source=readme
 | 
			
		||||
[saas-image]: https://img.shields.io/badge/NextChat-Saas-green?logo=microsoftedge
 | 
			
		||||
[web-url]: https://app.nextchat.dev/
 | 
			
		||||
[web-url]: https://app.nextchat.club/
 | 
			
		||||
[download-url]: https://github.com/Yidadaa/ChatGPT-Next-Web/releases
 | 
			
		||||
[Web-image]: https://img.shields.io/badge/Web-PWA-orange?logo=microsoftedge
 | 
			
		||||
[Windows-image]: https://img.shields.io/badge/-Windows-blue?logo=windows
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										128
									
								
								app/api/302ai.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										128
									
								
								app/api/302ai.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,128 @@
 | 
			
		||||
import { getServerSideConfig } from "@/app/config/server";
 | 
			
		||||
import {
 | 
			
		||||
  AI302_BASE_URL,
 | 
			
		||||
  ApiPath,
 | 
			
		||||
  ModelProvider,
 | 
			
		||||
  ServiceProvider,
 | 
			
		||||
} from "@/app/constant";
 | 
			
		||||
import { prettyObject } from "@/app/utils/format";
 | 
			
		||||
import { NextRequest, NextResponse } from "next/server";
 | 
			
		||||
import { auth } from "@/app/api/auth";
 | 
			
		||||
import { isModelNotavailableInServer } from "@/app/utils/model";
 | 
			
		||||
 | 
			
		||||
const serverConfig = getServerSideConfig();
 | 
			
		||||
 | 
			
		||||
export async function handle(
 | 
			
		||||
  req: NextRequest,
 | 
			
		||||
  { params }: { params: { path: string[] } },
 | 
			
		||||
) {
 | 
			
		||||
  console.log("[302.AI Route] params ", params);
 | 
			
		||||
 | 
			
		||||
  if (req.method === "OPTIONS") {
 | 
			
		||||
    return NextResponse.json({ body: "OK" }, { status: 200 });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const authResult = auth(req, ModelProvider["302.AI"]);
 | 
			
		||||
  if (authResult.error) {
 | 
			
		||||
    return NextResponse.json(authResult, {
 | 
			
		||||
      status: 401,
 | 
			
		||||
    });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  try {
 | 
			
		||||
    const response = await request(req);
 | 
			
		||||
    return response;
 | 
			
		||||
  } catch (e) {
 | 
			
		||||
    console.error("[302.AI] ", e);
 | 
			
		||||
    return NextResponse.json(prettyObject(e));
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
async function request(req: NextRequest) {
 | 
			
		||||
  const controller = new AbortController();
 | 
			
		||||
 | 
			
		||||
  // alibaba use base url or just remove the path
 | 
			
		||||
  let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath["302.AI"], "");
 | 
			
		||||
 | 
			
		||||
  let baseUrl = serverConfig.ai302Url || AI302_BASE_URL;
 | 
			
		||||
 | 
			
		||||
  if (!baseUrl.startsWith("http")) {
 | 
			
		||||
    baseUrl = `https://${baseUrl}`;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (baseUrl.endsWith("/")) {
 | 
			
		||||
    baseUrl = baseUrl.slice(0, -1);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  console.log("[Proxy] ", path);
 | 
			
		||||
  console.log("[Base Url]", baseUrl);
 | 
			
		||||
 | 
			
		||||
  const timeoutId = setTimeout(
 | 
			
		||||
    () => {
 | 
			
		||||
      controller.abort();
 | 
			
		||||
    },
 | 
			
		||||
    10 * 60 * 1000,
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  const fetchUrl = `${baseUrl}${path}`;
 | 
			
		||||
  const fetchOptions: RequestInit = {
 | 
			
		||||
    headers: {
 | 
			
		||||
      "Content-Type": "application/json",
 | 
			
		||||
      Authorization: req.headers.get("Authorization") ?? "",
 | 
			
		||||
    },
 | 
			
		||||
    method: req.method,
 | 
			
		||||
    body: req.body,
 | 
			
		||||
    redirect: "manual",
 | 
			
		||||
    // @ts-ignore
 | 
			
		||||
    duplex: "half",
 | 
			
		||||
    signal: controller.signal,
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  // #1815 try to refuse some request to some models
 | 
			
		||||
  if (serverConfig.customModels && req.body) {
 | 
			
		||||
    try {
 | 
			
		||||
      const clonedBody = await req.text();
 | 
			
		||||
      fetchOptions.body = clonedBody;
 | 
			
		||||
 | 
			
		||||
      const jsonBody = JSON.parse(clonedBody) as { model?: string };
 | 
			
		||||
 | 
			
		||||
      // not undefined and is false
 | 
			
		||||
      if (
 | 
			
		||||
        isModelNotavailableInServer(
 | 
			
		||||
          serverConfig.customModels,
 | 
			
		||||
          jsonBody?.model as string,
 | 
			
		||||
          ServiceProvider["302.AI"] as string,
 | 
			
		||||
        )
 | 
			
		||||
      ) {
 | 
			
		||||
        return NextResponse.json(
 | 
			
		||||
          {
 | 
			
		||||
            error: true,
 | 
			
		||||
            message: `you are not allowed to use ${jsonBody?.model} model`,
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            status: 403,
 | 
			
		||||
          },
 | 
			
		||||
        );
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      console.error(`[302.AI] filter`, e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  try {
 | 
			
		||||
    const res = await fetch(fetchUrl, fetchOptions);
 | 
			
		||||
 | 
			
		||||
    // to prevent browser prompt for credentials
 | 
			
		||||
    const newHeaders = new Headers(res.headers);
 | 
			
		||||
    newHeaders.delete("www-authenticate");
 | 
			
		||||
    // to disable nginx buffering
 | 
			
		||||
    newHeaders.set("X-Accel-Buffering", "no");
 | 
			
		||||
 | 
			
		||||
    return new Response(res.body, {
 | 
			
		||||
      status: res.status,
 | 
			
		||||
      statusText: res.statusText,
 | 
			
		||||
      headers: newHeaders,
 | 
			
		||||
    });
 | 
			
		||||
  } finally {
 | 
			
		||||
    clearTimeout(timeoutId);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -15,6 +15,7 @@ import { handle as siliconflowHandler } from "../../siliconflow";
 | 
			
		||||
import { handle as xaiHandler } from "../../xai";
 | 
			
		||||
import { handle as chatglmHandler } from "../../glm";
 | 
			
		||||
import { handle as proxyHandler } from "../../proxy";
 | 
			
		||||
import { handle as ai302Handler } from "../../302ai";
 | 
			
		||||
 | 
			
		||||
async function handle(
 | 
			
		||||
  req: NextRequest,
 | 
			
		||||
@@ -52,6 +53,8 @@ async function handle(
 | 
			
		||||
      return siliconflowHandler(req, { params });
 | 
			
		||||
    case ApiPath.OpenAI:
 | 
			
		||||
      return openaiHandler(req, { params });
 | 
			
		||||
    case ApiPath["302.AI"]:
 | 
			
		||||
      return ai302Handler(req, { params });
 | 
			
		||||
    default:
 | 
			
		||||
      return proxyHandler(req, { params });
 | 
			
		||||
  }
 | 
			
		||||
 
 | 
			
		||||
@@ -24,6 +24,7 @@ import { DeepSeekApi } from "./platforms/deepseek";
 | 
			
		||||
import { XAIApi } from "./platforms/xai";
 | 
			
		||||
import { ChatGLMApi } from "./platforms/glm";
 | 
			
		||||
import { SiliconflowApi } from "./platforms/siliconflow";
 | 
			
		||||
import { Ai302Api } from "./platforms/ai302";
 | 
			
		||||
 | 
			
		||||
export const ROLES = ["system", "user", "assistant"] as const;
 | 
			
		||||
export type MessageRole = (typeof ROLES)[number];
 | 
			
		||||
@@ -173,6 +174,9 @@ export class ClientApi {
 | 
			
		||||
      case ModelProvider.SiliconFlow:
 | 
			
		||||
        this.llm = new SiliconflowApi();
 | 
			
		||||
        break;
 | 
			
		||||
      case ModelProvider["302.AI"]:
 | 
			
		||||
        this.llm = new Ai302Api();
 | 
			
		||||
        break;
 | 
			
		||||
      default:
 | 
			
		||||
        this.llm = new ChatGPTApi();
 | 
			
		||||
    }
 | 
			
		||||
@@ -265,6 +269,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
			
		||||
    const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
 | 
			
		||||
    const isSiliconFlow =
 | 
			
		||||
      modelConfig.providerName === ServiceProvider.SiliconFlow;
 | 
			
		||||
    const isAI302 = modelConfig.providerName === ServiceProvider["302.AI"];
 | 
			
		||||
    const isEnabledAccessControl = accessStore.enabledAccessControl();
 | 
			
		||||
    const apiKey = isGoogle
 | 
			
		||||
      ? accessStore.googleApiKey
 | 
			
		||||
@@ -290,6 +295,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
			
		||||
      ? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
 | 
			
		||||
        ? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
 | 
			
		||||
        : ""
 | 
			
		||||
      : isAI302
 | 
			
		||||
      ? accessStore.ai302ApiKey
 | 
			
		||||
      : accessStore.openaiApiKey;
 | 
			
		||||
    return {
 | 
			
		||||
      isGoogle,
 | 
			
		||||
@@ -304,6 +311,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
			
		||||
      isXAI,
 | 
			
		||||
      isChatGLM,
 | 
			
		||||
      isSiliconFlow,
 | 
			
		||||
      isAI302,
 | 
			
		||||
      apiKey,
 | 
			
		||||
      isEnabledAccessControl,
 | 
			
		||||
    };
 | 
			
		||||
@@ -332,6 +340,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
			
		||||
    isXAI,
 | 
			
		||||
    isChatGLM,
 | 
			
		||||
    isSiliconFlow,
 | 
			
		||||
    isAI302,
 | 
			
		||||
    apiKey,
 | 
			
		||||
    isEnabledAccessControl,
 | 
			
		||||
  } = getConfig();
 | 
			
		||||
@@ -382,6 +391,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
 | 
			
		||||
      return new ClientApi(ModelProvider.ChatGLM);
 | 
			
		||||
    case ServiceProvider.SiliconFlow:
 | 
			
		||||
      return new ClientApi(ModelProvider.SiliconFlow);
 | 
			
		||||
    case ServiceProvider["302.AI"]:
 | 
			
		||||
      return new ClientApi(ModelProvider["302.AI"]);
 | 
			
		||||
    default:
 | 
			
		||||
      return new ClientApi(ModelProvider.GPT);
 | 
			
		||||
  }
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										287
									
								
								app/client/platforms/ai302.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										287
									
								
								app/client/platforms/ai302.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,287 @@
 | 
			
		||||
"use client";
 | 
			
		||||
 | 
			
		||||
import {
 | 
			
		||||
  ApiPath,
 | 
			
		||||
  AI302_BASE_URL,
 | 
			
		||||
  DEFAULT_MODELS,
 | 
			
		||||
  AI302,
 | 
			
		||||
} from "@/app/constant";
 | 
			
		||||
import {
 | 
			
		||||
  useAccessStore,
 | 
			
		||||
  useAppConfig,
 | 
			
		||||
  useChatStore,
 | 
			
		||||
  ChatMessageTool,
 | 
			
		||||
  usePluginStore,
 | 
			
		||||
} from "@/app/store";
 | 
			
		||||
import { preProcessImageContent, streamWithThink } from "@/app/utils/chat";
 | 
			
		||||
import {
 | 
			
		||||
  ChatOptions,
 | 
			
		||||
  getHeaders,
 | 
			
		||||
  LLMApi,
 | 
			
		||||
  LLMModel,
 | 
			
		||||
  SpeechOptions,
 | 
			
		||||
} from "../api";
 | 
			
		||||
import { getClientConfig } from "@/app/config/client";
 | 
			
		||||
import {
 | 
			
		||||
  getMessageTextContent,
 | 
			
		||||
  getMessageTextContentWithoutThinking,
 | 
			
		||||
  isVisionModel,
 | 
			
		||||
  getTimeoutMSByModel,
 | 
			
		||||
} from "@/app/utils";
 | 
			
		||||
import { RequestPayload } from "./openai";
 | 
			
		||||
 | 
			
		||||
import { fetch } from "@/app/utils/stream";
 | 
			
		||||
export interface Ai302ListModelResponse {
 | 
			
		||||
  object: string;
 | 
			
		||||
  data: Array<{
 | 
			
		||||
    id: string;
 | 
			
		||||
    object: string;
 | 
			
		||||
    root: string;
 | 
			
		||||
  }>;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export class Ai302Api implements LLMApi {
 | 
			
		||||
  private disableListModels = false;
 | 
			
		||||
 | 
			
		||||
  path(path: string): string {
 | 
			
		||||
    const accessStore = useAccessStore.getState();
 | 
			
		||||
 | 
			
		||||
    let baseUrl = "";
 | 
			
		||||
 | 
			
		||||
    if (accessStore.useCustomConfig) {
 | 
			
		||||
      baseUrl = accessStore.ai302Url;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (baseUrl.length === 0) {
 | 
			
		||||
      const isApp = !!getClientConfig()?.isApp;
 | 
			
		||||
      const apiPath = ApiPath["302.AI"];
 | 
			
		||||
      baseUrl = isApp ? AI302_BASE_URL : apiPath;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (baseUrl.endsWith("/")) {
 | 
			
		||||
      baseUrl = baseUrl.slice(0, baseUrl.length - 1);
 | 
			
		||||
    }
 | 
			
		||||
    if (
 | 
			
		||||
      !baseUrl.startsWith("http") &&
 | 
			
		||||
      !baseUrl.startsWith(ApiPath["302.AI"])
 | 
			
		||||
    ) {
 | 
			
		||||
      baseUrl = "https://" + baseUrl;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    console.log("[Proxy Endpoint] ", baseUrl, path);
 | 
			
		||||
 | 
			
		||||
    return [baseUrl, path].join("/");
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  extractMessage(res: any) {
 | 
			
		||||
    return res.choices?.at(0)?.message?.content ?? "";
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  speech(options: SpeechOptions): Promise<ArrayBuffer> {
 | 
			
		||||
    throw new Error("Method not implemented.");
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async chat(options: ChatOptions) {
 | 
			
		||||
    const visionModel = isVisionModel(options.config.model);
 | 
			
		||||
    const messages: ChatOptions["messages"] = [];
 | 
			
		||||
    for (const v of options.messages) {
 | 
			
		||||
      if (v.role === "assistant") {
 | 
			
		||||
        const content = getMessageTextContentWithoutThinking(v);
 | 
			
		||||
        messages.push({ role: v.role, content });
 | 
			
		||||
      } else {
 | 
			
		||||
        const content = visionModel
 | 
			
		||||
          ? await preProcessImageContent(v.content)
 | 
			
		||||
          : getMessageTextContent(v);
 | 
			
		||||
        messages.push({ role: v.role, content });
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const modelConfig = {
 | 
			
		||||
      ...useAppConfig.getState().modelConfig,
 | 
			
		||||
      ...useChatStore.getState().currentSession().mask.modelConfig,
 | 
			
		||||
      ...{
 | 
			
		||||
        model: options.config.model,
 | 
			
		||||
        providerName: options.config.providerName,
 | 
			
		||||
      },
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    const requestPayload: RequestPayload = {
 | 
			
		||||
      messages,
 | 
			
		||||
      stream: options.config.stream,
 | 
			
		||||
      model: modelConfig.model,
 | 
			
		||||
      temperature: modelConfig.temperature,
 | 
			
		||||
      presence_penalty: modelConfig.presence_penalty,
 | 
			
		||||
      frequency_penalty: modelConfig.frequency_penalty,
 | 
			
		||||
      top_p: modelConfig.top_p,
 | 
			
		||||
      // max_tokens: Math.max(modelConfig.max_tokens, 1024),
 | 
			
		||||
      // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    console.log("[Request] openai payload: ", requestPayload);
 | 
			
		||||
 | 
			
		||||
    const shouldStream = !!options.config.stream;
 | 
			
		||||
    const controller = new AbortController();
 | 
			
		||||
    options.onController?.(controller);
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      const chatPath = this.path(AI302.ChatPath);
 | 
			
		||||
      const chatPayload = {
 | 
			
		||||
        method: "POST",
 | 
			
		||||
        body: JSON.stringify(requestPayload),
 | 
			
		||||
        signal: controller.signal,
 | 
			
		||||
        headers: getHeaders(),
 | 
			
		||||
      };
 | 
			
		||||
 | 
			
		||||
      // console.log(chatPayload);
 | 
			
		||||
 | 
			
		||||
      // Use extended timeout for thinking models as they typically require more processing time
 | 
			
		||||
      const requestTimeoutId = setTimeout(
 | 
			
		||||
        () => controller.abort(),
 | 
			
		||||
        getTimeoutMSByModel(options.config.model),
 | 
			
		||||
      );
 | 
			
		||||
 | 
			
		||||
      if (shouldStream) {
 | 
			
		||||
        const [tools, funcs] = usePluginStore
 | 
			
		||||
          .getState()
 | 
			
		||||
          .getAsTools(
 | 
			
		||||
            useChatStore.getState().currentSession().mask?.plugin || [],
 | 
			
		||||
          );
 | 
			
		||||
        return streamWithThink(
 | 
			
		||||
          chatPath,
 | 
			
		||||
          requestPayload,
 | 
			
		||||
          getHeaders(),
 | 
			
		||||
          tools as any,
 | 
			
		||||
          funcs,
 | 
			
		||||
          controller,
 | 
			
		||||
          // parseSSE
 | 
			
		||||
          (text: string, runTools: ChatMessageTool[]) => {
 | 
			
		||||
            // console.log("parseSSE", text, runTools);
 | 
			
		||||
            const json = JSON.parse(text);
 | 
			
		||||
            const choices = json.choices as Array<{
 | 
			
		||||
              delta: {
 | 
			
		||||
                content: string | null;
 | 
			
		||||
                tool_calls: ChatMessageTool[];
 | 
			
		||||
                reasoning_content: string | null;
 | 
			
		||||
              };
 | 
			
		||||
            }>;
 | 
			
		||||
            const tool_calls = choices[0]?.delta?.tool_calls;
 | 
			
		||||
            if (tool_calls?.length > 0) {
 | 
			
		||||
              const index = tool_calls[0]?.index;
 | 
			
		||||
              const id = tool_calls[0]?.id;
 | 
			
		||||
              const args = tool_calls[0]?.function?.arguments;
 | 
			
		||||
              if (id) {
 | 
			
		||||
                runTools.push({
 | 
			
		||||
                  id,
 | 
			
		||||
                  type: tool_calls[0]?.type,
 | 
			
		||||
                  function: {
 | 
			
		||||
                    name: tool_calls[0]?.function?.name as string,
 | 
			
		||||
                    arguments: args,
 | 
			
		||||
                  },
 | 
			
		||||
                });
 | 
			
		||||
              } else {
 | 
			
		||||
                // @ts-ignore
 | 
			
		||||
                runTools[index]["function"]["arguments"] += args;
 | 
			
		||||
              }
 | 
			
		||||
            }
 | 
			
		||||
            const reasoning = choices[0]?.delta?.reasoning_content;
 | 
			
		||||
            const content = choices[0]?.delta?.content;
 | 
			
		||||
 | 
			
		||||
            // Skip if both content and reasoning_content are empty or null
 | 
			
		||||
            if (
 | 
			
		||||
              (!reasoning || reasoning.length === 0) &&
 | 
			
		||||
              (!content || content.length === 0)
 | 
			
		||||
            ) {
 | 
			
		||||
              return {
 | 
			
		||||
                isThinking: false,
 | 
			
		||||
                content: "",
 | 
			
		||||
              };
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            if (reasoning && reasoning.length > 0) {
 | 
			
		||||
              return {
 | 
			
		||||
                isThinking: true,
 | 
			
		||||
                content: reasoning,
 | 
			
		||||
              };
 | 
			
		||||
            } else if (content && content.length > 0) {
 | 
			
		||||
              return {
 | 
			
		||||
                isThinking: false,
 | 
			
		||||
                content: content,
 | 
			
		||||
              };
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            return {
 | 
			
		||||
              isThinking: false,
 | 
			
		||||
              content: "",
 | 
			
		||||
            };
 | 
			
		||||
          },
 | 
			
		||||
          // processToolMessage, include tool_calls message and tool call results
 | 
			
		||||
          (
 | 
			
		||||
            requestPayload: RequestPayload,
 | 
			
		||||
            toolCallMessage: any,
 | 
			
		||||
            toolCallResult: any[],
 | 
			
		||||
          ) => {
 | 
			
		||||
            // @ts-ignore
 | 
			
		||||
            requestPayload?.messages?.splice(
 | 
			
		||||
              // @ts-ignore
 | 
			
		||||
              requestPayload?.messages?.length,
 | 
			
		||||
              0,
 | 
			
		||||
              toolCallMessage,
 | 
			
		||||
              ...toolCallResult,
 | 
			
		||||
            );
 | 
			
		||||
          },
 | 
			
		||||
          options,
 | 
			
		||||
        );
 | 
			
		||||
      } else {
 | 
			
		||||
        const res = await fetch(chatPath, chatPayload);
 | 
			
		||||
        clearTimeout(requestTimeoutId);
 | 
			
		||||
 | 
			
		||||
        const resJson = await res.json();
 | 
			
		||||
        const message = this.extractMessage(resJson);
 | 
			
		||||
        options.onFinish(message, res);
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      console.log("[Request] failed to make a chat request", e);
 | 
			
		||||
      options.onError?.(e as Error);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  async usage() {
 | 
			
		||||
    return {
 | 
			
		||||
      used: 0,
 | 
			
		||||
      total: 0,
 | 
			
		||||
    };
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async models(): Promise<LLMModel[]> {
 | 
			
		||||
    if (this.disableListModels) {
 | 
			
		||||
      return DEFAULT_MODELS.slice();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const res = await fetch(this.path(AI302.ListModelPath), {
 | 
			
		||||
      method: "GET",
 | 
			
		||||
      headers: {
 | 
			
		||||
        ...getHeaders(),
 | 
			
		||||
      },
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    const resJson = (await res.json()) as Ai302ListModelResponse;
 | 
			
		||||
    const chatModels = resJson.data;
 | 
			
		||||
    console.log("[Models]", chatModels);
 | 
			
		||||
 | 
			
		||||
    if (!chatModels) {
 | 
			
		||||
      return [];
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    let seq = 1000; //同 Constant.ts 中的排序保持一致
 | 
			
		||||
    return chatModels.map((m) => ({
 | 
			
		||||
      name: m.id,
 | 
			
		||||
      available: true,
 | 
			
		||||
      sorted: seq++,
 | 
			
		||||
      provider: {
 | 
			
		||||
        id: "ai302",
 | 
			
		||||
        providerName: "302.AI",
 | 
			
		||||
        providerType: "ai302",
 | 
			
		||||
        sorted: 15,
 | 
			
		||||
      },
 | 
			
		||||
    }));
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -75,6 +75,7 @@ import {
 | 
			
		||||
  ChatGLM,
 | 
			
		||||
  DeepSeek,
 | 
			
		||||
  SiliconFlow,
 | 
			
		||||
  AI302,
 | 
			
		||||
} from "../constant";
 | 
			
		||||
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
 | 
			
		||||
import { ErrorBoundary } from "./error";
 | 
			
		||||
@@ -1458,6 +1459,46 @@ export function Settings() {
 | 
			
		||||
    </>
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  const ai302ConfigComponent = accessStore.provider === ServiceProvider["302.AI"] && (
 | 
			
		||||
    <>
 | 
			
		||||
      <ListItem
 | 
			
		||||
          title={Locale.Settings.Access.AI302.Endpoint.Title}
 | 
			
		||||
          subTitle={
 | 
			
		||||
            Locale.Settings.Access.AI302.Endpoint.SubTitle +
 | 
			
		||||
            AI302.ExampleEndpoint
 | 
			
		||||
          }
 | 
			
		||||
        >
 | 
			
		||||
          <input
 | 
			
		||||
            aria-label={Locale.Settings.Access.AI302.Endpoint.Title}
 | 
			
		||||
            type="text"
 | 
			
		||||
            value={accessStore.ai302Url}
 | 
			
		||||
            placeholder={AI302.ExampleEndpoint}
 | 
			
		||||
            onChange={(e) =>
 | 
			
		||||
              accessStore.update(
 | 
			
		||||
                (access) => (access.ai302Url = e.currentTarget.value),
 | 
			
		||||
              )
 | 
			
		||||
            }
 | 
			
		||||
          ></input>
 | 
			
		||||
        </ListItem>
 | 
			
		||||
        <ListItem
 | 
			
		||||
          title={Locale.Settings.Access.AI302.ApiKey.Title}
 | 
			
		||||
          subTitle={Locale.Settings.Access.AI302.ApiKey.SubTitle}
 | 
			
		||||
        >
 | 
			
		||||
          <PasswordInput
 | 
			
		||||
            aria-label={Locale.Settings.Access.AI302.ApiKey.Title}
 | 
			
		||||
            value={accessStore.ai302ApiKey}
 | 
			
		||||
            type="text"
 | 
			
		||||
            placeholder={Locale.Settings.Access.AI302.ApiKey.Placeholder}
 | 
			
		||||
            onChange={(e) => {
 | 
			
		||||
              accessStore.update(
 | 
			
		||||
                (access) => (access.ai302ApiKey = e.currentTarget.value),
 | 
			
		||||
              );
 | 
			
		||||
            }}
 | 
			
		||||
          />
 | 
			
		||||
        </ListItem>
 | 
			
		||||
      </>
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  return (
 | 
			
		||||
    <ErrorBoundary>
 | 
			
		||||
      <div className="window-header" data-tauri-drag-region>
 | 
			
		||||
@@ -1822,6 +1863,7 @@ export function Settings() {
 | 
			
		||||
                  {XAIConfigComponent}
 | 
			
		||||
                  {chatglmConfigComponent}
 | 
			
		||||
                  {siliconflowConfigComponent}
 | 
			
		||||
                  {ai302ConfigComponent}
 | 
			
		||||
                </>
 | 
			
		||||
              )}
 | 
			
		||||
            </>
 | 
			
		||||
 
 | 
			
		||||
@@ -88,6 +88,10 @@ declare global {
 | 
			
		||||
      SILICONFLOW_URL?: string;
 | 
			
		||||
      SILICONFLOW_API_KEY?: string;
 | 
			
		||||
 | 
			
		||||
      // 302.AI only
 | 
			
		||||
      AI302_URL?: string;
 | 
			
		||||
      AI302_API_KEY?: string;
 | 
			
		||||
 | 
			
		||||
      // custom template for preprocessing user input
 | 
			
		||||
      DEFAULT_INPUT_TEMPLATE?: string;
 | 
			
		||||
 | 
			
		||||
@@ -163,6 +167,7 @@ export const getServerSideConfig = () => {
 | 
			
		||||
  const isXAI = !!process.env.XAI_API_KEY;
 | 
			
		||||
  const isChatGLM = !!process.env.CHATGLM_API_KEY;
 | 
			
		||||
  const isSiliconFlow = !!process.env.SILICONFLOW_API_KEY;
 | 
			
		||||
  const isAI302 = !!process.env.AI302_API_KEY;
 | 
			
		||||
  // const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
 | 
			
		||||
  // const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
 | 
			
		||||
  // const randomIndex = Math.floor(Math.random() * apiKeys.length);
 | 
			
		||||
@@ -246,6 +251,10 @@ export const getServerSideConfig = () => {
 | 
			
		||||
    siliconFlowUrl: process.env.SILICONFLOW_URL,
 | 
			
		||||
    siliconFlowApiKey: getApiKey(process.env.SILICONFLOW_API_KEY),
 | 
			
		||||
 | 
			
		||||
    isAI302,
 | 
			
		||||
    ai302Url: process.env.AI302_URL,
 | 
			
		||||
    ai302ApiKey: getApiKey(process.env.AI302_API_KEY),
 | 
			
		||||
 | 
			
		||||
    gtmId: process.env.GTM_ID,
 | 
			
		||||
    gaId: process.env.GA_ID || DEFAULT_GA_ID,
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -36,6 +36,8 @@ export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
 | 
			
		||||
 | 
			
		||||
export const SILICONFLOW_BASE_URL = "https://api.siliconflow.cn";
 | 
			
		||||
 | 
			
		||||
export const AI302_BASE_URL = "https://api.302.ai";
 | 
			
		||||
 | 
			
		||||
export const CACHE_URL_PREFIX = "/api/cache";
 | 
			
		||||
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
 | 
			
		||||
 | 
			
		||||
@@ -72,6 +74,7 @@ export enum ApiPath {
 | 
			
		||||
  ChatGLM = "/api/chatglm",
 | 
			
		||||
  DeepSeek = "/api/deepseek",
 | 
			
		||||
  SiliconFlow = "/api/siliconflow",
 | 
			
		||||
  "302.AI" = "/api/302ai",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export enum SlotID {
 | 
			
		||||
@@ -130,6 +133,7 @@ export enum ServiceProvider {
 | 
			
		||||
  ChatGLM = "ChatGLM",
 | 
			
		||||
  DeepSeek = "DeepSeek",
 | 
			
		||||
  SiliconFlow = "SiliconFlow",
 | 
			
		||||
  "302.AI" = "302.AI",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
 | 
			
		||||
@@ -156,6 +160,7 @@ export enum ModelProvider {
 | 
			
		||||
  ChatGLM = "ChatGLM",
 | 
			
		||||
  DeepSeek = "DeepSeek",
 | 
			
		||||
  SiliconFlow = "SiliconFlow",
 | 
			
		||||
  "302.AI" = "302.AI",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const Stability = {
 | 
			
		||||
@@ -266,6 +271,13 @@ export const SiliconFlow = {
 | 
			
		||||
  ListModelPath: "v1/models?&sub_type=chat",
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
export const AI302 = {
 | 
			
		||||
  ExampleEndpoint: AI302_BASE_URL,
 | 
			
		||||
  ChatPath: "v1/chat/completions",
 | 
			
		||||
  EmbeddingsPath: "jina/v1/embeddings",
 | 
			
		||||
  ListModelPath: "v1/models?llm=1",
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
 | 
			
		||||
// export const DEFAULT_SYSTEM_TEMPLATE = `
 | 
			
		||||
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
 | 
			
		||||
@@ -679,6 +691,31 @@ const siliconflowModels = [
 | 
			
		||||
  "Pro/deepseek-ai/DeepSeek-V3",
 | 
			
		||||
];
 | 
			
		||||
 | 
			
		||||
const ai302Models = [
 | 
			
		||||
  "deepseek-chat",
 | 
			
		||||
  "gpt-4o",
 | 
			
		||||
  "chatgpt-4o-latest",
 | 
			
		||||
  "llama3.3-70b",
 | 
			
		||||
  "deepseek-reasoner",
 | 
			
		||||
  "gemini-2.0-flash",
 | 
			
		||||
  "claude-3-7-sonnet-20250219",
 | 
			
		||||
  "claude-3-7-sonnet-latest",
 | 
			
		||||
  "grok-3-beta",
 | 
			
		||||
  "grok-3-mini-beta",
 | 
			
		||||
  "gpt-4.1",
 | 
			
		||||
  "gpt-4.1-mini",
 | 
			
		||||
  "o3",
 | 
			
		||||
  "o4-mini",
 | 
			
		||||
  "qwen3-235b-a22b",
 | 
			
		||||
  "qwen3-32b",
 | 
			
		||||
  "gemini-2.5-pro-preview-05-06",
 | 
			
		||||
  "llama-4-maverick",
 | 
			
		||||
  "gemini-2.5-flash",
 | 
			
		||||
  "claude-sonnet-4-20250514",
 | 
			
		||||
  "claude-opus-4-20250514",
 | 
			
		||||
  "gemini-2.5-pro",
 | 
			
		||||
];
 | 
			
		||||
 | 
			
		||||
let seq = 1000; // 内置的模型序号生成器从1000开始
 | 
			
		||||
export const DEFAULT_MODELS = [
 | 
			
		||||
  ...openaiModels.map((name) => ({
 | 
			
		||||
@@ -835,6 +872,17 @@ export const DEFAULT_MODELS = [
 | 
			
		||||
      sorted: 14,
 | 
			
		||||
    },
 | 
			
		||||
  })),
 | 
			
		||||
  ...ai302Models.map((name) => ({
 | 
			
		||||
    name,
 | 
			
		||||
    available: true,
 | 
			
		||||
    sorted: seq++,
 | 
			
		||||
    provider: {
 | 
			
		||||
      id: "ai302",
 | 
			
		||||
      providerName: "302.AI",
 | 
			
		||||
      providerType: "ai302",
 | 
			
		||||
      sorted: 15,
 | 
			
		||||
    },
 | 
			
		||||
  })),
 | 
			
		||||
] as const;
 | 
			
		||||
 | 
			
		||||
export const CHAT_PAGE_SIZE = 15;
 | 
			
		||||
 
 | 
			
		||||
@@ -416,6 +416,17 @@ const ar: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "مثال:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "مفتاح 302.AI API",
 | 
			
		||||
          SubTitle: "استخدم مفتاح 302.AI API مخصص",
 | 
			
		||||
          Placeholder: "مفتاح 302.AI API",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "عنوان الواجهة",
 | 
			
		||||
          SubTitle: "مثال:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "اسم النموذج المخصص",
 | 
			
		||||
        SubTitle: "أضف خيارات نموذج مخصص، مفصولة بفواصل إنجليزية",
 | 
			
		||||
 
 | 
			
		||||
@@ -423,6 +423,17 @@ const bn: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "উদাহরণ:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "ইন্টারফেস কী",
 | 
			
		||||
          SubTitle: "স্বনির্ধারিত 302.AI API কী ব্যবহার করুন",
 | 
			
		||||
          Placeholder: "302.AI API কী",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "ইন্টারফেস ঠিকানা",
 | 
			
		||||
          SubTitle: "উদাহরণ:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "স্বনির্ধারিত মডেল নাম",
 | 
			
		||||
        SubTitle:
 | 
			
		||||
 
 | 
			
		||||
@@ -538,6 +538,17 @@ const cn = {
 | 
			
		||||
        Title: "自定义模型名",
 | 
			
		||||
        SubTitle: "增加自定义模型可选项,使用英文逗号隔开",
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "接口密钥",
 | 
			
		||||
          SubTitle: "使用自定义302.AI API Key",
 | 
			
		||||
          Placeholder: "302.AI API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "接口地址",
 | 
			
		||||
          SubTitle: "样例:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    Model: "模型 (model)",
 | 
			
		||||
 
 | 
			
		||||
@@ -423,6 +423,17 @@ const cs: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Příklad:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Rozhraní klíč",
 | 
			
		||||
          SubTitle: "Použijte vlastní 302.AI API Key",
 | 
			
		||||
          Placeholder: "302.AI API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Adresa rozhraní",
 | 
			
		||||
          SubTitle: "Příklad:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Vlastní názvy modelů",
 | 
			
		||||
        SubTitle: "Přidejte možnosti vlastních modelů, oddělené čárkami",
 | 
			
		||||
 
 | 
			
		||||
@@ -517,6 +517,17 @@ const da: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Vælg et niveau for indholdskontrol",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "302.AI API Key",
 | 
			
		||||
          SubTitle: "Brug en custom 302.AI API Key",
 | 
			
		||||
          Placeholder: "302.AI API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Endpoint-adresse",
 | 
			
		||||
          SubTitle: "Eksempel: ",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
    },
 | 
			
		||||
    Model: "Model",
 | 
			
		||||
    CompressModel: {
 | 
			
		||||
 
 | 
			
		||||
@@ -434,6 +434,17 @@ const de: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Beispiel:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Schnittstellenschlüssel",
 | 
			
		||||
          SubTitle: "Verwenden Sie einen benutzerdefinierten 302.AI API-Schlüssel",
 | 
			
		||||
          Placeholder: "302.AI API-Schlüssel",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Endpunktadresse",
 | 
			
		||||
          SubTitle: "Beispiel:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Benutzerdefinierter Modellname",
 | 
			
		||||
        SubTitle:
 | 
			
		||||
 
 | 
			
		||||
@@ -543,6 +543,17 @@ const en: LocaleType = {
 | 
			
		||||
          SubTitle: "Select a safety filtering level",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "302.AI API Key",
 | 
			
		||||
          SubTitle: "Use a custom 302.AI API Key",
 | 
			
		||||
          Placeholder: "302.AI API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Endpoint Address",
 | 
			
		||||
          SubTitle: "Example: ",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    Model: "Model",
 | 
			
		||||
 
 | 
			
		||||
@@ -436,6 +436,17 @@ const es: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Ejemplo:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Clave de interfaz",
 | 
			
		||||
          SubTitle: "Usa una clave API de 302.AI personalizada",
 | 
			
		||||
          Placeholder: "Clave API de 302.AI",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Dirección del endpoint",
 | 
			
		||||
          SubTitle: "Ejemplo:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Nombre del modelo personalizado",
 | 
			
		||||
        SubTitle:
 | 
			
		||||
 
 | 
			
		||||
@@ -435,6 +435,17 @@ const fr: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Exemple :",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Clé d'interface",
 | 
			
		||||
          SubTitle: "Utiliser une clé API 302.AI personnalisée",
 | 
			
		||||
          Placeholder: "Clé API 302.AI",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Adresse de l'endpoint",
 | 
			
		||||
          SubTitle: "Exemple :",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Nom du modèle personnalisé",
 | 
			
		||||
        SubTitle:
 | 
			
		||||
 
 | 
			
		||||
@@ -424,6 +424,17 @@ const id: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Contoh:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Kunci Antarmuka",
 | 
			
		||||
          SubTitle: "Gunakan 302.AI API Key kustom",
 | 
			
		||||
          Placeholder: "302.AI API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Alamat Antarmuka",
 | 
			
		||||
          SubTitle: "Contoh:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Nama Model Kustom",
 | 
			
		||||
        SubTitle: "Tambahkan opsi model kustom, pisahkan dengan koma",
 | 
			
		||||
 
 | 
			
		||||
@@ -436,6 +436,17 @@ const it: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Esempio:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Chiave dell'interfaccia",
 | 
			
		||||
          SubTitle: "Utilizza una chiave API 302.AI personalizzata",
 | 
			
		||||
          Placeholder: "Chiave API 302.AI",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Indirizzo dell'interfaccia",
 | 
			
		||||
          SubTitle: "Esempio:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Nome del modello personalizzato",
 | 
			
		||||
        SubTitle:
 | 
			
		||||
 
 | 
			
		||||
@@ -420,6 +420,17 @@ const jp: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "例:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "APIキー",
 | 
			
		||||
          SubTitle: "カスタム302.AI APIキーを使用",
 | 
			
		||||
          Placeholder: "302.AI APIキー",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "エンドポイント",
 | 
			
		||||
          SubTitle: "例:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "カスタムモデル名",
 | 
			
		||||
        SubTitle: "カスタムモデルの選択肢を追加、英語のカンマで区切る",
 | 
			
		||||
 
 | 
			
		||||
@@ -421,6 +421,17 @@ const ko: PartialLocaleType = {
 | 
			
		||||
        Title: "커스텀 모델 이름",
 | 
			
		||||
        SubTitle: "커스텀 모델 옵션 추가, 영어 쉼표로 구분",
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "엔드포인트 키",
 | 
			
		||||
          SubTitle: "커스텀 302.AI API 키 사용",
 | 
			
		||||
          Placeholder: "302.AI API 키",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "엔드포인트 주소",
 | 
			
		||||
          SubTitle: "예: ",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    Model: "모델 (model)",
 | 
			
		||||
 
 | 
			
		||||
@@ -433,6 +433,17 @@ const no: PartialLocaleType = {
 | 
			
		||||
        Title: "Egendefinert modellnavn",
 | 
			
		||||
        SubTitle: "Legg til egendefinerte modellalternativer, skill med komma",
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "API-nøkkel",
 | 
			
		||||
          SubTitle: "Bruk egendefinert 302.AI API-nøkkel",
 | 
			
		||||
          Placeholder: "302.AI API-nøkkel",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "API-adresse",
 | 
			
		||||
          SubTitle: "Eksempel:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    Model: "Modell",
 | 
			
		||||
 
 | 
			
		||||
@@ -359,6 +359,17 @@ const pt: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Verifique sua versão API do console Anthropic",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Chave API 302.AI",
 | 
			
		||||
          SubTitle: "Use uma chave API 302.AI personalizada",
 | 
			
		||||
          Placeholder: "302.AI API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Endpoint Address",
 | 
			
		||||
          SubTitle: "Exemplo: ",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Modelos Personalizados",
 | 
			
		||||
        SubTitle: "Opções de modelo personalizado, separados por vírgula",
 | 
			
		||||
 
 | 
			
		||||
@@ -426,6 +426,17 @@ const ru: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Пример:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Ключ интерфейса",
 | 
			
		||||
          SubTitle: "Использовать пользовательский 302.AI API-ключ",
 | 
			
		||||
          Placeholder: "302.AI API-ключ",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Адрес интерфейса",
 | 
			
		||||
          SubTitle: "Пример:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Название пользовательской модели",
 | 
			
		||||
        SubTitle:
 | 
			
		||||
 
 | 
			
		||||
@@ -381,6 +381,17 @@ const sk: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Vyberte špecifickú verziu časti",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "API kľúč",
 | 
			
		||||
          SubTitle: "Použiť vlastný API kľúč 302.AI",
 | 
			
		||||
          Placeholder: "302.AI API kľúč",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Adresa koncového bodu",
 | 
			
		||||
          SubTitle: "Príklad:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    Model: "Model",
 | 
			
		||||
 
 | 
			
		||||
@@ -426,6 +426,17 @@ const tr: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Örnek:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "API Anahtarı",
 | 
			
		||||
          SubTitle: "Özelleştirilmiş 302.AI API Anahtarı kullanın",
 | 
			
		||||
          Placeholder: "302.AI API Anahtarı",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "API Adresi",
 | 
			
		||||
          SubTitle: "Örnek:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Özelleştirilmiş Model Adı",
 | 
			
		||||
        SubTitle:
 | 
			
		||||
 
 | 
			
		||||
@@ -382,6 +382,17 @@ const tw = {
 | 
			
		||||
          SubTitle: "選擇一個特定的 API 版本",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "API 金鑰",
 | 
			
		||||
          SubTitle: "使用自訂 302.AI API 金鑰",
 | 
			
		||||
          Placeholder: "302.AI API 金鑰",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "端點位址",
 | 
			
		||||
          SubTitle: "範例:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "自訂模型名稱",
 | 
			
		||||
        SubTitle: "增加自訂模型可選擇項目,使用英文逗號隔開",
 | 
			
		||||
 
 | 
			
		||||
@@ -422,6 +422,17 @@ const vi: PartialLocaleType = {
 | 
			
		||||
          SubTitle: "Ví dụ:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      AI302: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Khóa API 302.AI",
 | 
			
		||||
          SubTitle: "Sử dụng khóa API 302.AI tùy chỉnh",
 | 
			
		||||
          Placeholder: "302.AI API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Địa chỉ giao diện",
 | 
			
		||||
          SubTitle: "Ví dụ:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      CustomModel: {
 | 
			
		||||
        Title: "Tên mô hình tùy chỉnh",
 | 
			
		||||
        SubTitle:
 | 
			
		||||
 
 | 
			
		||||
@@ -17,6 +17,7 @@ import {
 | 
			
		||||
  XAI_BASE_URL,
 | 
			
		||||
  CHATGLM_BASE_URL,
 | 
			
		||||
  SILICONFLOW_BASE_URL,
 | 
			
		||||
  AI302_BASE_URL,
 | 
			
		||||
} from "../constant";
 | 
			
		||||
import { getHeaders } from "../client/api";
 | 
			
		||||
import { getClientConfig } from "../config/client";
 | 
			
		||||
@@ -59,6 +60,8 @@ const DEFAULT_SILICONFLOW_URL = isApp
 | 
			
		||||
  ? SILICONFLOW_BASE_URL
 | 
			
		||||
  : ApiPath.SiliconFlow;
 | 
			
		||||
 | 
			
		||||
const DEFAULT_AI302_URL = isApp ? AI302_BASE_URL : ApiPath["302.AI"];
 | 
			
		||||
 | 
			
		||||
const DEFAULT_ACCESS_STATE = {
 | 
			
		||||
  accessCode: "",
 | 
			
		||||
  useCustomConfig: false,
 | 
			
		||||
@@ -132,6 +135,10 @@ const DEFAULT_ACCESS_STATE = {
 | 
			
		||||
  siliconflowUrl: DEFAULT_SILICONFLOW_URL,
 | 
			
		||||
  siliconflowApiKey: "",
 | 
			
		||||
 | 
			
		||||
  // 302.AI
 | 
			
		||||
  ai302Url: DEFAULT_AI302_URL,
 | 
			
		||||
  ai302ApiKey: "",
 | 
			
		||||
 | 
			
		||||
  // server config
 | 
			
		||||
  needCode: true,
 | 
			
		||||
  hideUserApiKey: false,
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										12
									
								
								yarn.lock
									
									
									
									
									
								
							
							
						
						
									
										12
									
								
								yarn.lock
									
									
									
									
									
								
							@@ -3077,9 +3077,9 @@ camelcase@^6.2.0:
 | 
			
		||||
  integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==
 | 
			
		||||
 | 
			
		||||
caniuse-lite@^1.0.30001449, caniuse-lite@^1.0.30001503, caniuse-lite@^1.0.30001579, caniuse-lite@^1.0.30001646:
 | 
			
		||||
  version "1.0.30001692"
 | 
			
		||||
  resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001692.tgz"
 | 
			
		||||
  integrity sha512-A95VKan0kdtrsnMubMKxEKUKImOPSuCpYgxSQBo036P5YYgVIcOYJEgt/txJWqObiRQeISNCfef9nvlQ0vbV7A==
 | 
			
		||||
  version "1.0.30001724"
 | 
			
		||||
  resolved "https://mirrors.huaweicloud.com/repository/npm/caniuse-lite/-/caniuse-lite-1.0.30001724.tgz"
 | 
			
		||||
  integrity sha512-WqJo7p0TbHDOythNTqYujmaJTvtYRZrjpP8TCvH6Vb9CYJerJNKamKzIWOM4BkQatWj9H2lYulpdAQNBe7QhNA==
 | 
			
		||||
 | 
			
		||||
ccount@^2.0.0:
 | 
			
		||||
  version "2.0.1"
 | 
			
		||||
@@ -4334,14 +4334,14 @@ eslint-plugin-react@^7.31.7:
 | 
			
		||||
 | 
			
		||||
eslint-plugin-unused-imports@^3.2.0:
 | 
			
		||||
  version "3.2.0"
 | 
			
		||||
  resolved "https://registry.yarnpkg.com/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-3.2.0.tgz#63a98c9ad5f622cd9f830f70bc77739f25ccfe0d"
 | 
			
		||||
  resolved "https://mirrors.huaweicloud.com/repository/npm/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-3.2.0.tgz#63a98c9ad5f622cd9f830f70bc77739f25ccfe0d"
 | 
			
		||||
  integrity sha512-6uXyn6xdINEpxE1MtDjxQsyXB37lfyO2yKGVVgtD7WEWQGORSOZjgrD6hBhvGv4/SO+TOlS+UnC6JppRqbuwGQ==
 | 
			
		||||
  dependencies:
 | 
			
		||||
    eslint-rule-composer "^0.3.0"
 | 
			
		||||
 | 
			
		||||
eslint-rule-composer@^0.3.0:
 | 
			
		||||
  version "0.3.0"
 | 
			
		||||
  resolved "https://registry.yarnpkg.com/eslint-rule-composer/-/eslint-rule-composer-0.3.0.tgz#79320c927b0c5c0d3d3d2b76c8b4a488f25bbaf9"
 | 
			
		||||
  resolved "https://mirrors.huaweicloud.com/repository/npm/eslint-rule-composer/-/eslint-rule-composer-0.3.0.tgz#79320c927b0c5c0d3d3d2b76c8b4a488f25bbaf9"
 | 
			
		||||
  integrity sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==
 | 
			
		||||
 | 
			
		||||
eslint-scope@5.1.1:
 | 
			
		||||
@@ -8156,7 +8156,7 @@ typed-array-length@^1.0.4:
 | 
			
		||||
 | 
			
		||||
typescript@5.2.2:
 | 
			
		||||
  version "5.2.2"
 | 
			
		||||
  resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78"
 | 
			
		||||
  resolved "https://mirrors.huaweicloud.com/repository/npm/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78"
 | 
			
		||||
  integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==
 | 
			
		||||
 | 
			
		||||
unbox-primitive@^1.0.2:
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user