mirror of
				https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
				synced 2025-11-04 16:23:41 +08:00 
			
		
		
		
	feat: #2330 disable /list/models
This commit is contained in:
		@@ -1,5 +1,6 @@
 | 
				
			|||||||
import {
 | 
					import {
 | 
				
			||||||
  DEFAULT_API_HOST,
 | 
					  DEFAULT_API_HOST,
 | 
				
			||||||
 | 
					  DEFAULT_MODELS,
 | 
				
			||||||
  OpenaiPath,
 | 
					  OpenaiPath,
 | 
				
			||||||
  REQUEST_TIMEOUT_MS,
 | 
					  REQUEST_TIMEOUT_MS,
 | 
				
			||||||
} from "@/app/constant";
 | 
					} from "@/app/constant";
 | 
				
			||||||
@@ -23,6 +24,8 @@ export interface OpenAIListModelResponse {
 | 
				
			|||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export class ChatGPTApi implements LLMApi {
 | 
					export class ChatGPTApi implements LLMApi {
 | 
				
			||||||
 | 
					  private disableListModels = true;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  path(path: string): string {
 | 
					  path(path: string): string {
 | 
				
			||||||
    let openaiUrl = useAccessStore.getState().openaiUrl;
 | 
					    let openaiUrl = useAccessStore.getState().openaiUrl;
 | 
				
			||||||
    if (openaiUrl.length === 0) {
 | 
					    if (openaiUrl.length === 0) {
 | 
				
			||||||
@@ -246,6 +249,10 @@ export class ChatGPTApi implements LLMApi {
 | 
				
			|||||||
  }
 | 
					  }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  async models(): Promise<LLMModel[]> {
 | 
					  async models(): Promise<LLMModel[]> {
 | 
				
			||||||
 | 
					    if (this.disableListModels) {
 | 
				
			||||||
 | 
					      return DEFAULT_MODELS.slice();
 | 
				
			||||||
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    const res = await fetch(this.path(OpenaiPath.ListModelPath), {
 | 
					    const res = await fetch(this.path(OpenaiPath.ListModelPath), {
 | 
				
			||||||
      method: "GET",
 | 
					      method: "GET",
 | 
				
			||||||
      headers: {
 | 
					      headers: {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -28,6 +28,7 @@ import { useAppConfig } from "../store/config";
 | 
				
			|||||||
import { AuthPage } from "./auth";
 | 
					import { AuthPage } from "./auth";
 | 
				
			||||||
import { getClientConfig } from "../config/client";
 | 
					import { getClientConfig } from "../config/client";
 | 
				
			||||||
import { api } from "../client/api";
 | 
					import { api } from "../client/api";
 | 
				
			||||||
 | 
					import { useAccessStore } from "../store";
 | 
				
			||||||
 | 
					
 | 
				
			||||||
export function Loading(props: { noLogo?: boolean }) {
 | 
					export function Loading(props: { noLogo?: boolean }) {
 | 
				
			||||||
  return (
 | 
					  return (
 | 
				
			||||||
@@ -171,6 +172,7 @@ export function Home() {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  useEffect(() => {
 | 
					  useEffect(() => {
 | 
				
			||||||
    console.log("[Config] got config from build time", getClientConfig());
 | 
					    console.log("[Config] got config from build time", getClientConfig());
 | 
				
			||||||
 | 
					    useAccessStore.getState().fetch();
 | 
				
			||||||
  }, []);
 | 
					  }, []);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  if (!useHasHydrated()) {
 | 
					  if (!useHasHydrated()) {
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -108,24 +108,4 @@ export const DEFAULT_MODELS = [
 | 
				
			|||||||
    name: "gpt-3.5-turbo-16k-0613",
 | 
					    name: "gpt-3.5-turbo-16k-0613",
 | 
				
			||||||
    available: true,
 | 
					    available: true,
 | 
				
			||||||
  },
 | 
					  },
 | 
				
			||||||
  {
 | 
					 | 
				
			||||||
    name: "qwen-v1", // 通义千问
 | 
					 | 
				
			||||||
    available: false,
 | 
					 | 
				
			||||||
  },
 | 
					 | 
				
			||||||
  {
 | 
					 | 
				
			||||||
    name: "ernie", // 文心一言
 | 
					 | 
				
			||||||
    available: false,
 | 
					 | 
				
			||||||
  },
 | 
					 | 
				
			||||||
  {
 | 
					 | 
				
			||||||
    name: "spark", // 讯飞星火
 | 
					 | 
				
			||||||
    available: false,
 | 
					 | 
				
			||||||
  },
 | 
					 | 
				
			||||||
  {
 | 
					 | 
				
			||||||
    name: "llama", // llama
 | 
					 | 
				
			||||||
    available: false,
 | 
					 | 
				
			||||||
  },
 | 
					 | 
				
			||||||
  {
 | 
					 | 
				
			||||||
    name: "chatglm", // chatglm-6b
 | 
					 | 
				
			||||||
    available: false,
 | 
					 | 
				
			||||||
  },
 | 
					 | 
				
			||||||
] as const;
 | 
					] as const;
 | 
				
			||||||
 
 | 
				
			|||||||
@@ -1,6 +1,6 @@
 | 
				
			|||||||
import { create } from "zustand";
 | 
					import { create } from "zustand";
 | 
				
			||||||
import { persist } from "zustand/middleware";
 | 
					import { persist } from "zustand/middleware";
 | 
				
			||||||
import { DEFAULT_API_HOST, StoreKey } from "../constant";
 | 
					import { DEFAULT_API_HOST, DEFAULT_MODELS, StoreKey } from "../constant";
 | 
				
			||||||
import { getHeaders } from "../client/api";
 | 
					import { getHeaders } from "../client/api";
 | 
				
			||||||
import { BOT_HELLO } from "./chat";
 | 
					import { BOT_HELLO } from "./chat";
 | 
				
			||||||
import { getClientConfig } from "../config/client";
 | 
					import { getClientConfig } from "../config/client";
 | 
				
			||||||
@@ -11,8 +11,10 @@ export interface AccessControlStore {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
  needCode: boolean;
 | 
					  needCode: boolean;
 | 
				
			||||||
  hideUserApiKey: boolean;
 | 
					  hideUserApiKey: boolean;
 | 
				
			||||||
  openaiUrl: string;
 | 
					 | 
				
			||||||
  hideBalanceQuery: boolean;
 | 
					  hideBalanceQuery: boolean;
 | 
				
			||||||
 | 
					  disableGPT4: boolean;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					  openaiUrl: string;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
  updateToken: (_: string) => void;
 | 
					  updateToken: (_: string) => void;
 | 
				
			||||||
  updateCode: (_: string) => void;
 | 
					  updateCode: (_: string) => void;
 | 
				
			||||||
@@ -35,8 +37,10 @@ export const useAccessStore = create<AccessControlStore>()(
 | 
				
			|||||||
      accessCode: "",
 | 
					      accessCode: "",
 | 
				
			||||||
      needCode: true,
 | 
					      needCode: true,
 | 
				
			||||||
      hideUserApiKey: false,
 | 
					      hideUserApiKey: false,
 | 
				
			||||||
      openaiUrl: DEFAULT_OPENAI_URL,
 | 
					 | 
				
			||||||
      hideBalanceQuery: false,
 | 
					      hideBalanceQuery: false,
 | 
				
			||||||
 | 
					      disableGPT4: false,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					      openaiUrl: DEFAULT_OPENAI_URL,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
      enabledAccessControl() {
 | 
					      enabledAccessControl() {
 | 
				
			||||||
        get().fetch();
 | 
					        get().fetch();
 | 
				
			||||||
@@ -75,8 +79,10 @@ export const useAccessStore = create<AccessControlStore>()(
 | 
				
			|||||||
            console.log("[Config] got config from server", res);
 | 
					            console.log("[Config] got config from server", res);
 | 
				
			||||||
            set(() => ({ ...res }));
 | 
					            set(() => ({ ...res }));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if ((res as any).botHello) {
 | 
					            if (res.disableGPT4) {
 | 
				
			||||||
              BOT_HELLO.content = (res as any).botHello;
 | 
					              DEFAULT_MODELS.forEach(
 | 
				
			||||||
 | 
					                (m: any) => (m.available = !m.name.startsWith("gpt-4")),
 | 
				
			||||||
 | 
					              );
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
          })
 | 
					          })
 | 
				
			||||||
          .catch(() => {
 | 
					          .catch(() => {
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user