mirror of
				https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
				synced 2025-11-04 16:23:41 +08:00 
			
		
		
		
	@@ -11,6 +11,7 @@ import { handle as moonshotHandler } from "../../moonshot";
 | 
			
		||||
import { handle as stabilityHandler } from "../../stability";
 | 
			
		||||
import { handle as iflytekHandler } from "../../iflytek";
 | 
			
		||||
import { handle as xaiHandler } from "../../xai";
 | 
			
		||||
import { handle as chatglmHandler } from "../../glm";
 | 
			
		||||
import { handle as proxyHandler } from "../../proxy";
 | 
			
		||||
 | 
			
		||||
async function handle(
 | 
			
		||||
@@ -41,6 +42,8 @@ async function handle(
 | 
			
		||||
      return iflytekHandler(req, { params });
 | 
			
		||||
    case ApiPath.XAI:
 | 
			
		||||
      return xaiHandler(req, { params });
 | 
			
		||||
    case ApiPath.ChatGLM:
 | 
			
		||||
      return chatglmHandler(req, { params });
 | 
			
		||||
    case ApiPath.OpenAI:
 | 
			
		||||
      return openaiHandler(req, { params });
 | 
			
		||||
    default:
 | 
			
		||||
 
 | 
			
		||||
@@ -95,6 +95,9 @@ export function auth(req: NextRequest, modelProvider: ModelProvider) {
 | 
			
		||||
      case ModelProvider.XAI:
 | 
			
		||||
        systemApiKey = serverConfig.xaiApiKey;
 | 
			
		||||
        break;
 | 
			
		||||
      case ModelProvider.ChatGLM:
 | 
			
		||||
        systemApiKey = serverConfig.chatglmApiKey;
 | 
			
		||||
        break;
 | 
			
		||||
      case ModelProvider.GPT:
 | 
			
		||||
      default:
 | 
			
		||||
        if (req.nextUrl.pathname.includes("azure/deployments")) {
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										129
									
								
								app/api/glm.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										129
									
								
								app/api/glm.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,129 @@
 | 
			
		||||
import { getServerSideConfig } from "@/app/config/server";
 | 
			
		||||
import {
 | 
			
		||||
  CHATGLM_BASE_URL,
 | 
			
		||||
  ApiPath,
 | 
			
		||||
  ModelProvider,
 | 
			
		||||
  ServiceProvider,
 | 
			
		||||
} from "@/app/constant";
 | 
			
		||||
import { prettyObject } from "@/app/utils/format";
 | 
			
		||||
import { NextRequest, NextResponse } from "next/server";
 | 
			
		||||
import { auth } from "@/app/api/auth";
 | 
			
		||||
import { isModelAvailableInServer } from "@/app/utils/model";
 | 
			
		||||
 | 
			
		||||
const serverConfig = getServerSideConfig();
 | 
			
		||||
 | 
			
		||||
export async function handle(
 | 
			
		||||
  req: NextRequest,
 | 
			
		||||
  { params }: { params: { path: string[] } },
 | 
			
		||||
) {
 | 
			
		||||
  console.log("[GLM Route] params ", params);
 | 
			
		||||
 | 
			
		||||
  if (req.method === "OPTIONS") {
 | 
			
		||||
    return NextResponse.json({ body: "OK" }, { status: 200 });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  const authResult = auth(req, ModelProvider.GLM);
 | 
			
		||||
  if (authResult.error) {
 | 
			
		||||
    return NextResponse.json(authResult, {
 | 
			
		||||
      status: 401,
 | 
			
		||||
    });
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  try {
 | 
			
		||||
    const response = await request(req);
 | 
			
		||||
    return response;
 | 
			
		||||
  } catch (e) {
 | 
			
		||||
    console.error("[GLM] ", e);
 | 
			
		||||
    return NextResponse.json(prettyObject(e));
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
async function request(req: NextRequest) {
 | 
			
		||||
  const controller = new AbortController();
 | 
			
		||||
 | 
			
		||||
  // alibaba use base url or just remove the path
 | 
			
		||||
  let path = `${req.nextUrl.pathname}`.replaceAll(ApiPath.ChatGLM, "");
 | 
			
		||||
 | 
			
		||||
  let baseUrl = serverConfig.chatglmUrl || CHATGLM_BASE_URL;
 | 
			
		||||
 | 
			
		||||
  if (!baseUrl.startsWith("http")) {
 | 
			
		||||
    baseUrl = `https://${baseUrl}`;
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  if (baseUrl.endsWith("/")) {
 | 
			
		||||
    baseUrl = baseUrl.slice(0, -1);
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  console.log("[Proxy] ", path);
 | 
			
		||||
  console.log("[Base Url]", baseUrl);
 | 
			
		||||
 | 
			
		||||
  const timeoutId = setTimeout(
 | 
			
		||||
    () => {
 | 
			
		||||
      controller.abort();
 | 
			
		||||
    },
 | 
			
		||||
    10 * 60 * 1000,
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  const fetchUrl = `${baseUrl}${path}`;
 | 
			
		||||
  console.log("[Fetch Url] ", fetchUrl);
 | 
			
		||||
  const fetchOptions: RequestInit = {
 | 
			
		||||
    headers: {
 | 
			
		||||
      "Content-Type": "application/json",
 | 
			
		||||
      Authorization: req.headers.get("Authorization") ?? "",
 | 
			
		||||
    },
 | 
			
		||||
    method: req.method,
 | 
			
		||||
    body: req.body,
 | 
			
		||||
    redirect: "manual",
 | 
			
		||||
    // @ts-ignore
 | 
			
		||||
    duplex: "half",
 | 
			
		||||
    signal: controller.signal,
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  // #1815 try to refuse some request to some models
 | 
			
		||||
  if (serverConfig.customModels && req.body) {
 | 
			
		||||
    try {
 | 
			
		||||
      const clonedBody = await req.text();
 | 
			
		||||
      fetchOptions.body = clonedBody;
 | 
			
		||||
 | 
			
		||||
      const jsonBody = JSON.parse(clonedBody) as { model?: string };
 | 
			
		||||
 | 
			
		||||
      // not undefined and is false
 | 
			
		||||
      if (
 | 
			
		||||
        isModelAvailableInServer(
 | 
			
		||||
          serverConfig.customModels,
 | 
			
		||||
          jsonBody?.model as string,
 | 
			
		||||
          ServiceProvider.ChatGLM as string,
 | 
			
		||||
        )
 | 
			
		||||
      ) {
 | 
			
		||||
        return NextResponse.json(
 | 
			
		||||
          {
 | 
			
		||||
            error: true,
 | 
			
		||||
            message: `you are not allowed to use ${jsonBody?.model} model`,
 | 
			
		||||
          },
 | 
			
		||||
          {
 | 
			
		||||
            status: 403,
 | 
			
		||||
          },
 | 
			
		||||
        );
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      console.error(`[GLM] filter`, e);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  try {
 | 
			
		||||
    const res = await fetch(fetchUrl, fetchOptions);
 | 
			
		||||
 | 
			
		||||
    // to prevent browser prompt for credentials
 | 
			
		||||
    const newHeaders = new Headers(res.headers);
 | 
			
		||||
    newHeaders.delete("www-authenticate");
 | 
			
		||||
    // to disable nginx buffering
 | 
			
		||||
    newHeaders.set("X-Accel-Buffering", "no");
 | 
			
		||||
 | 
			
		||||
    return new Response(res.body, {
 | 
			
		||||
      status: res.status,
 | 
			
		||||
      statusText: res.statusText,
 | 
			
		||||
      headers: newHeaders,
 | 
			
		||||
    });
 | 
			
		||||
  } finally {
 | 
			
		||||
    clearTimeout(timeoutId);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -21,6 +21,7 @@ import { HunyuanApi } from "./platforms/tencent";
 | 
			
		||||
import { MoonshotApi } from "./platforms/moonshot";
 | 
			
		||||
import { SparkApi } from "./platforms/iflytek";
 | 
			
		||||
import { XAIApi } from "./platforms/xai";
 | 
			
		||||
import { ChatGLMApi } from "./platforms/glm";
 | 
			
		||||
 | 
			
		||||
export const ROLES = ["system", "user", "assistant"] as const;
 | 
			
		||||
export type MessageRole = (typeof ROLES)[number];
 | 
			
		||||
@@ -156,6 +157,9 @@ export class ClientApi {
 | 
			
		||||
      case ModelProvider.XAI:
 | 
			
		||||
        this.llm = new XAIApi();
 | 
			
		||||
        break;
 | 
			
		||||
      case ModelProvider.ChatGLM:
 | 
			
		||||
        this.llm = new ChatGLMApi();
 | 
			
		||||
        break;
 | 
			
		||||
      default:
 | 
			
		||||
        this.llm = new ChatGPTApi();
 | 
			
		||||
    }
 | 
			
		||||
@@ -244,6 +248,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
			
		||||
    const isMoonshot = modelConfig.providerName === ServiceProvider.Moonshot;
 | 
			
		||||
    const isIflytek = modelConfig.providerName === ServiceProvider.Iflytek;
 | 
			
		||||
    const isXAI = modelConfig.providerName === ServiceProvider.XAI;
 | 
			
		||||
    const isChatGLM = modelConfig.providerName === ServiceProvider.ChatGLM;
 | 
			
		||||
    const isEnabledAccessControl = accessStore.enabledAccessControl();
 | 
			
		||||
    const apiKey = isGoogle
 | 
			
		||||
      ? accessStore.googleApiKey
 | 
			
		||||
@@ -259,6 +264,8 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
			
		||||
      ? accessStore.moonshotApiKey
 | 
			
		||||
      : isXAI
 | 
			
		||||
      ? accessStore.xaiApiKey
 | 
			
		||||
      : isChatGLM
 | 
			
		||||
      ? accessStore.chatglmApiKey
 | 
			
		||||
      : isIflytek
 | 
			
		||||
      ? accessStore.iflytekApiKey && accessStore.iflytekApiSecret
 | 
			
		||||
        ? accessStore.iflytekApiKey + ":" + accessStore.iflytekApiSecret
 | 
			
		||||
@@ -274,6 +281,7 @@ export function getHeaders(ignoreHeaders: boolean = false) {
 | 
			
		||||
      isMoonshot,
 | 
			
		||||
      isIflytek,
 | 
			
		||||
      isXAI,
 | 
			
		||||
      isChatGLM,
 | 
			
		||||
      apiKey,
 | 
			
		||||
      isEnabledAccessControl,
 | 
			
		||||
    };
 | 
			
		||||
@@ -338,6 +346,8 @@ export function getClientApi(provider: ServiceProvider): ClientApi {
 | 
			
		||||
      return new ClientApi(ModelProvider.Iflytek);
 | 
			
		||||
    case ServiceProvider.XAI:
 | 
			
		||||
      return new ClientApi(ModelProvider.XAI);
 | 
			
		||||
    case ServiceProvider.ChatGLM:
 | 
			
		||||
      return new ClientApi(ModelProvider.ChatGLM);
 | 
			
		||||
    default:
 | 
			
		||||
      return new ClientApi(ModelProvider.GPT);
 | 
			
		||||
  }
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										197
									
								
								app/client/platforms/glm.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										197
									
								
								app/client/platforms/glm.ts
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,197 @@
 | 
			
		||||
"use client";
 | 
			
		||||
import {
 | 
			
		||||
  ApiPath,
 | 
			
		||||
  CHATGLM_BASE_URL,
 | 
			
		||||
  ChatGLM,
 | 
			
		||||
  REQUEST_TIMEOUT_MS,
 | 
			
		||||
} from "@/app/constant";
 | 
			
		||||
import {
 | 
			
		||||
  useAccessStore,
 | 
			
		||||
  useAppConfig,
 | 
			
		||||
  useChatStore,
 | 
			
		||||
  ChatMessageTool,
 | 
			
		||||
  usePluginStore,
 | 
			
		||||
} from "@/app/store";
 | 
			
		||||
import { stream } from "@/app/utils/chat";
 | 
			
		||||
import {
 | 
			
		||||
  ChatOptions,
 | 
			
		||||
  getHeaders,
 | 
			
		||||
  LLMApi,
 | 
			
		||||
  LLMModel,
 | 
			
		||||
  SpeechOptions,
 | 
			
		||||
} from "../api";
 | 
			
		||||
import { getClientConfig } from "@/app/config/client";
 | 
			
		||||
import { getMessageTextContent } from "@/app/utils";
 | 
			
		||||
import { RequestPayload } from "./openai";
 | 
			
		||||
import { fetch } from "@/app/utils/stream";
 | 
			
		||||
 | 
			
		||||
export class ChatGLMApi implements LLMApi {
 | 
			
		||||
  private disableListModels = true;
 | 
			
		||||
 | 
			
		||||
  path(path: string): string {
 | 
			
		||||
    const accessStore = useAccessStore.getState();
 | 
			
		||||
 | 
			
		||||
    let baseUrl = "";
 | 
			
		||||
 | 
			
		||||
    if (accessStore.useCustomConfig) {
 | 
			
		||||
      baseUrl = accessStore.chatglmUrl;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (baseUrl.length === 0) {
 | 
			
		||||
      const isApp = !!getClientConfig()?.isApp;
 | 
			
		||||
      const apiPath = ApiPath.ChatGLM;
 | 
			
		||||
      baseUrl = isApp ? CHATGLM_BASE_URL : apiPath;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    if (baseUrl.endsWith("/")) {
 | 
			
		||||
      baseUrl = baseUrl.slice(0, baseUrl.length - 1);
 | 
			
		||||
    }
 | 
			
		||||
    if (!baseUrl.startsWith("http") && !baseUrl.startsWith(ApiPath.ChatGLM)) {
 | 
			
		||||
      baseUrl = "https://" + baseUrl;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    console.log("[Proxy Endpoint] ", baseUrl, path);
 | 
			
		||||
 | 
			
		||||
    return [baseUrl, path].join("/");
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  extractMessage(res: any) {
 | 
			
		||||
    return res.choices?.at(0)?.message?.content ?? "";
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  speech(options: SpeechOptions): Promise<ArrayBuffer> {
 | 
			
		||||
    throw new Error("Method not implemented.");
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async chat(options: ChatOptions) {
 | 
			
		||||
    const messages: ChatOptions["messages"] = [];
 | 
			
		||||
    for (const v of options.messages) {
 | 
			
		||||
      const content = getMessageTextContent(v);
 | 
			
		||||
      messages.push({ role: v.role, content });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    const modelConfig = {
 | 
			
		||||
      ...useAppConfig.getState().modelConfig,
 | 
			
		||||
      ...useChatStore.getState().currentSession().mask.modelConfig,
 | 
			
		||||
      ...{
 | 
			
		||||
        model: options.config.model,
 | 
			
		||||
        providerName: options.config.providerName,
 | 
			
		||||
      },
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    const requestPayload: RequestPayload = {
 | 
			
		||||
      messages,
 | 
			
		||||
      stream: options.config.stream,
 | 
			
		||||
      model: modelConfig.model,
 | 
			
		||||
      temperature: modelConfig.temperature,
 | 
			
		||||
      presence_penalty: modelConfig.presence_penalty,
 | 
			
		||||
      frequency_penalty: modelConfig.frequency_penalty,
 | 
			
		||||
      top_p: modelConfig.top_p,
 | 
			
		||||
    };
 | 
			
		||||
 | 
			
		||||
    console.log("[Request] glm payload: ", requestPayload);
 | 
			
		||||
 | 
			
		||||
    const shouldStream = !!options.config.stream;
 | 
			
		||||
    const controller = new AbortController();
 | 
			
		||||
    options.onController?.(controller);
 | 
			
		||||
 | 
			
		||||
    try {
 | 
			
		||||
      const chatPath = this.path(ChatGLM.ChatPath);
 | 
			
		||||
      const chatPayload = {
 | 
			
		||||
        method: "POST",
 | 
			
		||||
        body: JSON.stringify(requestPayload),
 | 
			
		||||
        signal: controller.signal,
 | 
			
		||||
        headers: getHeaders(),
 | 
			
		||||
      };
 | 
			
		||||
 | 
			
		||||
      // make a fetch request
 | 
			
		||||
      const requestTimeoutId = setTimeout(
 | 
			
		||||
        () => controller.abort(),
 | 
			
		||||
        REQUEST_TIMEOUT_MS,
 | 
			
		||||
      );
 | 
			
		||||
 | 
			
		||||
      if (shouldStream) {
 | 
			
		||||
        const [tools, funcs] = usePluginStore
 | 
			
		||||
          .getState()
 | 
			
		||||
          .getAsTools(
 | 
			
		||||
            useChatStore.getState().currentSession().mask?.plugin || [],
 | 
			
		||||
          );
 | 
			
		||||
        return stream(
 | 
			
		||||
          chatPath,
 | 
			
		||||
          requestPayload,
 | 
			
		||||
          getHeaders(),
 | 
			
		||||
          tools as any,
 | 
			
		||||
          funcs,
 | 
			
		||||
          controller,
 | 
			
		||||
          // parseSSE
 | 
			
		||||
          (text: string, runTools: ChatMessageTool[]) => {
 | 
			
		||||
            // console.log("parseSSE", text, runTools);
 | 
			
		||||
            const json = JSON.parse(text);
 | 
			
		||||
            const choices = json.choices as Array<{
 | 
			
		||||
              delta: {
 | 
			
		||||
                content: string;
 | 
			
		||||
                tool_calls: ChatMessageTool[];
 | 
			
		||||
              };
 | 
			
		||||
            }>;
 | 
			
		||||
            const tool_calls = choices[0]?.delta?.tool_calls;
 | 
			
		||||
            if (tool_calls?.length > 0) {
 | 
			
		||||
              const index = tool_calls[0]?.index;
 | 
			
		||||
              const id = tool_calls[0]?.id;
 | 
			
		||||
              const args = tool_calls[0]?.function?.arguments;
 | 
			
		||||
              if (id) {
 | 
			
		||||
                runTools.push({
 | 
			
		||||
                  id,
 | 
			
		||||
                  type: tool_calls[0]?.type,
 | 
			
		||||
                  function: {
 | 
			
		||||
                    name: tool_calls[0]?.function?.name as string,
 | 
			
		||||
                    arguments: args,
 | 
			
		||||
                  },
 | 
			
		||||
                });
 | 
			
		||||
              } else {
 | 
			
		||||
                // @ts-ignore
 | 
			
		||||
                runTools[index]["function"]["arguments"] += args;
 | 
			
		||||
              }
 | 
			
		||||
            }
 | 
			
		||||
            return choices[0]?.delta?.content;
 | 
			
		||||
          },
 | 
			
		||||
          // processToolMessage, include tool_calls message and tool call results
 | 
			
		||||
          (
 | 
			
		||||
            requestPayload: RequestPayload,
 | 
			
		||||
            toolCallMessage: any,
 | 
			
		||||
            toolCallResult: any[],
 | 
			
		||||
          ) => {
 | 
			
		||||
            // @ts-ignore
 | 
			
		||||
            requestPayload?.messages?.splice(
 | 
			
		||||
              // @ts-ignore
 | 
			
		||||
              requestPayload?.messages?.length,
 | 
			
		||||
              0,
 | 
			
		||||
              toolCallMessage,
 | 
			
		||||
              ...toolCallResult,
 | 
			
		||||
            );
 | 
			
		||||
          },
 | 
			
		||||
          options,
 | 
			
		||||
        );
 | 
			
		||||
      } else {
 | 
			
		||||
        const res = await fetch(chatPath, chatPayload);
 | 
			
		||||
        clearTimeout(requestTimeoutId);
 | 
			
		||||
 | 
			
		||||
        const resJson = await res.json();
 | 
			
		||||
        const message = this.extractMessage(resJson);
 | 
			
		||||
        options.onFinish(message);
 | 
			
		||||
      }
 | 
			
		||||
    } catch (e) {
 | 
			
		||||
      console.log("[Request] failed to make a chat request", e);
 | 
			
		||||
      options.onError?.(e as Error);
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
  async usage() {
 | 
			
		||||
    return {
 | 
			
		||||
      used: 0,
 | 
			
		||||
      total: 0,
 | 
			
		||||
    };
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async models(): Promise<LLMModel[]> {
 | 
			
		||||
    return [];
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@@ -72,6 +72,7 @@ import {
 | 
			
		||||
  Stability,
 | 
			
		||||
  Iflytek,
 | 
			
		||||
  SAAS_CHAT_URL,
 | 
			
		||||
  ChatGLM,
 | 
			
		||||
} from "../constant";
 | 
			
		||||
import { Prompt, SearchService, usePromptStore } from "../store/prompt";
 | 
			
		||||
import { ErrorBoundary } from "./error";
 | 
			
		||||
@@ -1234,6 +1235,47 @@ export function Settings() {
 | 
			
		||||
    </>
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  const chatglmConfigComponent = accessStore.provider ===
 | 
			
		||||
    ServiceProvider.ChatGLM && (
 | 
			
		||||
    <>
 | 
			
		||||
      <ListItem
 | 
			
		||||
        title={Locale.Settings.Access.ChatGLM.Endpoint.Title}
 | 
			
		||||
        subTitle={
 | 
			
		||||
          Locale.Settings.Access.ChatGLM.Endpoint.SubTitle +
 | 
			
		||||
          ChatGLM.ExampleEndpoint
 | 
			
		||||
        }
 | 
			
		||||
      >
 | 
			
		||||
        <input
 | 
			
		||||
          aria-label={Locale.Settings.Access.ChatGLM.Endpoint.Title}
 | 
			
		||||
          type="text"
 | 
			
		||||
          value={accessStore.chatglmUrl}
 | 
			
		||||
          placeholder={ChatGLM.ExampleEndpoint}
 | 
			
		||||
          onChange={(e) =>
 | 
			
		||||
            accessStore.update(
 | 
			
		||||
              (access) => (access.chatglmUrl = e.currentTarget.value),
 | 
			
		||||
            )
 | 
			
		||||
          }
 | 
			
		||||
        ></input>
 | 
			
		||||
      </ListItem>
 | 
			
		||||
      <ListItem
 | 
			
		||||
        title={Locale.Settings.Access.ChatGLM.ApiKey.Title}
 | 
			
		||||
        subTitle={Locale.Settings.Access.ChatGLM.ApiKey.SubTitle}
 | 
			
		||||
      >
 | 
			
		||||
        <PasswordInput
 | 
			
		||||
          aria-label={Locale.Settings.Access.ChatGLM.ApiKey.Title}
 | 
			
		||||
          value={accessStore.chatglmApiKey}
 | 
			
		||||
          type="text"
 | 
			
		||||
          placeholder={Locale.Settings.Access.ChatGLM.ApiKey.Placeholder}
 | 
			
		||||
          onChange={(e) => {
 | 
			
		||||
            accessStore.update(
 | 
			
		||||
              (access) => (access.chatglmApiKey = e.currentTarget.value),
 | 
			
		||||
            );
 | 
			
		||||
          }}
 | 
			
		||||
        />
 | 
			
		||||
      </ListItem>
 | 
			
		||||
    </>
 | 
			
		||||
  );
 | 
			
		||||
 | 
			
		||||
  const stabilityConfigComponent = accessStore.provider ===
 | 
			
		||||
    ServiceProvider.Stability && (
 | 
			
		||||
    <>
 | 
			
		||||
@@ -1693,6 +1735,7 @@ export function Settings() {
 | 
			
		||||
                  {stabilityConfigComponent}
 | 
			
		||||
                  {lflytekConfigComponent}
 | 
			
		||||
                  {XAIConfigComponent}
 | 
			
		||||
                  {chatglmConfigComponent}
 | 
			
		||||
                </>
 | 
			
		||||
              )}
 | 
			
		||||
            </>
 | 
			
		||||
 
 | 
			
		||||
@@ -75,6 +75,10 @@ declare global {
 | 
			
		||||
      XAI_URL?: string;
 | 
			
		||||
      XAI_API_KEY?: string;
 | 
			
		||||
 | 
			
		||||
      // chatglm only
 | 
			
		||||
      CHATGLM_URL?: string;
 | 
			
		||||
      CHATGLM_API_KEY?: string;
 | 
			
		||||
 | 
			
		||||
      // custom template for preprocessing user input
 | 
			
		||||
      DEFAULT_INPUT_TEMPLATE?: string;
 | 
			
		||||
    }
 | 
			
		||||
@@ -151,6 +155,7 @@ export const getServerSideConfig = () => {
 | 
			
		||||
  const isMoonshot = !!process.env.MOONSHOT_API_KEY;
 | 
			
		||||
  const isIflytek = !!process.env.IFLYTEK_API_KEY;
 | 
			
		||||
  const isXAI = !!process.env.XAI_API_KEY;
 | 
			
		||||
  const isChatGLM = !!process.env.CHATGLM_API_KEY;
 | 
			
		||||
  // const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
 | 
			
		||||
  // const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
 | 
			
		||||
  // const randomIndex = Math.floor(Math.random() * apiKeys.length);
 | 
			
		||||
@@ -217,6 +222,10 @@ export const getServerSideConfig = () => {
 | 
			
		||||
    xaiUrl: process.env.XAI_URL,
 | 
			
		||||
    xaiApiKey: getApiKey(process.env.XAI_API_KEY),
 | 
			
		||||
 | 
			
		||||
    isChatGLM,
 | 
			
		||||
    chatglmUrl: process.env.CHATGLM_URL,
 | 
			
		||||
    chatglmApiKey: getApiKey(process.env.CHATGLM_API_KEY),
 | 
			
		||||
 | 
			
		||||
    cloudflareAccountId: process.env.CLOUDFLARE_ACCOUNT_ID,
 | 
			
		||||
    cloudflareKVNamespaceId: process.env.CLOUDFLARE_KV_NAMESPACE_ID,
 | 
			
		||||
    cloudflareKVApiKey: getApiKey(process.env.CLOUDFLARE_KV_API_KEY),
 | 
			
		||||
 
 | 
			
		||||
@@ -30,6 +30,8 @@ export const IFLYTEK_BASE_URL = "https://spark-api-open.xf-yun.com";
 | 
			
		||||
 | 
			
		||||
export const XAI_BASE_URL = "https://api.x.ai";
 | 
			
		||||
 | 
			
		||||
export const CHATGLM_BASE_URL = "https://open.bigmodel.cn";
 | 
			
		||||
 | 
			
		||||
export const CACHE_URL_PREFIX = "/api/cache";
 | 
			
		||||
export const UPLOAD_URL = `${CACHE_URL_PREFIX}/upload`;
 | 
			
		||||
 | 
			
		||||
@@ -62,6 +64,7 @@ export enum ApiPath {
 | 
			
		||||
  Stability = "/api/stability",
 | 
			
		||||
  Artifacts = "/api/artifacts",
 | 
			
		||||
  XAI = "/api/xai",
 | 
			
		||||
  ChatGLM = "/api/chatglm",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export enum SlotID {
 | 
			
		||||
@@ -115,6 +118,7 @@ export enum ServiceProvider {
 | 
			
		||||
  Stability = "Stability",
 | 
			
		||||
  Iflytek = "Iflytek",
 | 
			
		||||
  XAI = "XAI",
 | 
			
		||||
  ChatGLM = "ChatGLM",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Google API safety settings, see https://ai.google.dev/gemini-api/docs/safety-settings
 | 
			
		||||
@@ -138,6 +142,7 @@ export enum ModelProvider {
 | 
			
		||||
  Moonshot = "Moonshot",
 | 
			
		||||
  Iflytek = "Iflytek",
 | 
			
		||||
  XAI = "XAI",
 | 
			
		||||
  ChatGLM = "ChatGLM",
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export const Stability = {
 | 
			
		||||
@@ -225,6 +230,11 @@ export const XAI = {
 | 
			
		||||
  ChatPath: "v1/chat/completions",
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
export const ChatGLM = {
 | 
			
		||||
  ExampleEndpoint: CHATGLM_BASE_URL,
 | 
			
		||||
  ChatPath: "/api/paas/v4/chat/completions",
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
 | 
			
		||||
// export const DEFAULT_SYSTEM_TEMPLATE = `
 | 
			
		||||
// You are ChatGPT, a large language model trained by {{ServiceProvider}}.
 | 
			
		||||
@@ -379,6 +389,17 @@ const iflytekModels = [
 | 
			
		||||
 | 
			
		||||
const xAIModes = ["grok-beta"];
 | 
			
		||||
 | 
			
		||||
const chatglmModels = [
 | 
			
		||||
  "glm-4-plus",
 | 
			
		||||
  "glm-4-0520",
 | 
			
		||||
  "glm-4",
 | 
			
		||||
  "glm-4-air",
 | 
			
		||||
  "glm-4-airx",
 | 
			
		||||
  "glm-4-long",
 | 
			
		||||
  "glm-4-flashx",
 | 
			
		||||
  "glm-4-flash",
 | 
			
		||||
];
 | 
			
		||||
 | 
			
		||||
let seq = 1000; // 内置的模型序号生成器从1000开始
 | 
			
		||||
export const DEFAULT_MODELS = [
 | 
			
		||||
  ...openaiModels.map((name) => ({
 | 
			
		||||
@@ -502,6 +523,17 @@ export const DEFAULT_MODELS = [
 | 
			
		||||
      sorted: 11,
 | 
			
		||||
    },
 | 
			
		||||
  })),
 | 
			
		||||
  ...chatglmModels.map((name) => ({
 | 
			
		||||
    name,
 | 
			
		||||
    available: true,
 | 
			
		||||
    sorted: seq++,
 | 
			
		||||
    provider: {
 | 
			
		||||
      id: "chatglm",
 | 
			
		||||
      providerName: "ChatGLM",
 | 
			
		||||
      providerType: "chatglm",
 | 
			
		||||
      sorted: 12,
 | 
			
		||||
    },
 | 
			
		||||
  })),
 | 
			
		||||
] as const;
 | 
			
		||||
 | 
			
		||||
export const CHAT_PAGE_SIZE = 15;
 | 
			
		||||
 
 | 
			
		||||
@@ -473,6 +473,17 @@ const cn = {
 | 
			
		||||
          SubTitle: "样例:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      ChatGLM: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "接口密钥",
 | 
			
		||||
          SubTitle: "使用自定义 ChatGLM API Key",
 | 
			
		||||
          Placeholder: "ChatGLM API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "接口地址",
 | 
			
		||||
          SubTitle: "样例:",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      Stability: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "接口密钥",
 | 
			
		||||
 
 | 
			
		||||
@@ -457,6 +457,17 @@ const en: LocaleType = {
 | 
			
		||||
          SubTitle: "Example: ",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      ChatGLM: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "ChatGLM API Key",
 | 
			
		||||
          SubTitle: "Use a custom ChatGLM API Key",
 | 
			
		||||
          Placeholder: "ChatGLM API Key",
 | 
			
		||||
        },
 | 
			
		||||
        Endpoint: {
 | 
			
		||||
          Title: "Endpoint Address",
 | 
			
		||||
          SubTitle: "Example: ",
 | 
			
		||||
        },
 | 
			
		||||
      },
 | 
			
		||||
      Stability: {
 | 
			
		||||
        ApiKey: {
 | 
			
		||||
          Title: "Stability API Key",
 | 
			
		||||
 
 | 
			
		||||
@@ -14,6 +14,7 @@ import {
 | 
			
		||||
  STABILITY_BASE_URL,
 | 
			
		||||
  IFLYTEK_BASE_URL,
 | 
			
		||||
  XAI_BASE_URL,
 | 
			
		||||
  CHATGLM_BASE_URL,
 | 
			
		||||
} from "../constant";
 | 
			
		||||
import { getHeaders } from "../client/api";
 | 
			
		||||
import { getClientConfig } from "../config/client";
 | 
			
		||||
@@ -47,6 +48,8 @@ const DEFAULT_IFLYTEK_URL = isApp ? IFLYTEK_BASE_URL : ApiPath.Iflytek;
 | 
			
		||||
 | 
			
		||||
const DEFAULT_XAI_URL = isApp ? XAI_BASE_URL : ApiPath.XAI;
 | 
			
		||||
 | 
			
		||||
const DEFAULT_CHATGLM_URL = isApp ? CHATGLM_BASE_URL : ApiPath.ChatGLM;
 | 
			
		||||
 | 
			
		||||
const DEFAULT_ACCESS_STATE = {
 | 
			
		||||
  accessCode: "",
 | 
			
		||||
  useCustomConfig: false,
 | 
			
		||||
@@ -108,6 +111,10 @@ const DEFAULT_ACCESS_STATE = {
 | 
			
		||||
  xaiUrl: DEFAULT_XAI_URL,
 | 
			
		||||
  xaiApiKey: "",
 | 
			
		||||
 | 
			
		||||
  // chatglm
 | 
			
		||||
  chatglmUrl: DEFAULT_CHATGLM_URL,
 | 
			
		||||
  chatglmApiKey: "",
 | 
			
		||||
 | 
			
		||||
  // server config
 | 
			
		||||
  needCode: true,
 | 
			
		||||
  hideUserApiKey: false,
 | 
			
		||||
@@ -180,6 +187,10 @@ export const useAccessStore = createPersistStore(
 | 
			
		||||
      return ensure(get(), ["xaiApiKey"]);
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    isValidChatGLM() {
 | 
			
		||||
      return ensure(get(), ["chatglmApiKey"]);
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    isAuthorized() {
 | 
			
		||||
      this.fetch();
 | 
			
		||||
 | 
			
		||||
@@ -196,6 +207,7 @@ export const useAccessStore = createPersistStore(
 | 
			
		||||
        this.isValidMoonshot() ||
 | 
			
		||||
        this.isValidIflytek() ||
 | 
			
		||||
        this.isValidXAI() ||
 | 
			
		||||
        this.isValidChatGLM() ||
 | 
			
		||||
        !this.enabledAccessControl() ||
 | 
			
		||||
        (this.enabledAccessControl() && ensure(get(), ["accessCode"]))
 | 
			
		||||
      );
 | 
			
		||||
 
 | 
			
		||||
@@ -278,7 +278,8 @@ export function showPlugins(provider: ServiceProvider, model: string) {
 | 
			
		||||
  if (
 | 
			
		||||
    provider == ServiceProvider.OpenAI ||
 | 
			
		||||
    provider == ServiceProvider.Azure ||
 | 
			
		||||
    provider == ServiceProvider.Moonshot
 | 
			
		||||
    provider == ServiceProvider.Moonshot ||
 | 
			
		||||
    provider == ServiceProvider.ChatGLM
 | 
			
		||||
  ) {
 | 
			
		||||
    return true;
 | 
			
		||||
  }
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user