mirror of
				https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
				synced 2025-10-31 22:33:45 +08:00 
			
		
		
		
	Compare commits
	
		
			1 Commits
		
	
	
		
			Leizhenpen
			...
			Leizhenpen
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | b95b1ac6f3 | 
							
								
								
									
										21
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										21
									
								
								README.md
									
									
									
									
									
								
							| @@ -7,7 +7,7 @@ | ||||
|  | ||||
|  | ||||
|  | ||||
| <h1 align="center">NextChat</h1> | ||||
| <h1 align="center">NextChat (ChatGPT Next Web)</h1> | ||||
|  | ||||
| English / [简体中文](./README_CN.md) | ||||
|  | ||||
| @@ -22,6 +22,7 @@ English / [简体中文](./README_CN.md) | ||||
| [![MacOS][MacOS-image]][download-url] | ||||
| [![Linux][Linux-image]][download-url] | ||||
|  | ||||
| [NextChatAI](https://nextchat.dev/chat?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases)  | ||||
| [NextChatAI](https://nextchat.club?utm_source=readme) / [Web App Demo](https://app.nextchat.dev) / [Desktop App](https://github.com/Yidadaa/ChatGPT-Next-Web/releases) / [Discord](https://discord.gg/YCkeafCafC) / [Enterprise Edition](#enterprise-edition) / [Twitter](https://twitter.com/NextChatDev) | ||||
|  | ||||
|  | ||||
| @@ -40,6 +41,24 @@ English / [简体中文](./README_CN.md) | ||||
|  | ||||
| </div> | ||||
|  | ||||
| ## 👋 Hey, NextChat is going to develop a native app! | ||||
|  | ||||
| > This week we are going to start working on iOS and Android APP, and we want to find some reliable friends to do it together! | ||||
|  | ||||
|  | ||||
| ✨ Several key points: | ||||
|  | ||||
| - Starting from 0, you are a veteran | ||||
| - Completely open source, not hidden | ||||
| - Native development, pursuing the ultimate experience | ||||
|  | ||||
| Will you come and do something together? 😎 | ||||
|  | ||||
| https://github.com/ChatGPTNextWeb/NextChat/issues/6269 | ||||
|  | ||||
| #Seeking for talents is thirsty #lack of people | ||||
|  | ||||
|  | ||||
| ## 🥳 Cheer for DeepSeek, China's AI star! | ||||
|  > Purpose-Built UI for DeepSeek Reasoner Model | ||||
|   | ||||
|   | ||||
| @@ -40,11 +40,6 @@ export interface MultimodalContent { | ||||
|   }; | ||||
| } | ||||
|  | ||||
| export interface MultimodalContentForAlibaba { | ||||
|   text?: string; | ||||
|   image?: string; | ||||
| } | ||||
|  | ||||
| export interface RequestMessage { | ||||
|   role: MessageRole; | ||||
|   content: string | MultimodalContent[]; | ||||
|   | ||||
| @@ -7,10 +7,7 @@ import { | ||||
|   ChatMessageTool, | ||||
|   usePluginStore, | ||||
| } from "@/app/store"; | ||||
| import { | ||||
|   preProcessImageContentForAlibabaDashScope, | ||||
|   streamWithThink, | ||||
| } from "@/app/utils/chat"; | ||||
| import { streamWithThink } from "@/app/utils/chat"; | ||||
| import { | ||||
|   ChatOptions, | ||||
|   getHeaders, | ||||
| @@ -18,14 +15,12 @@ import { | ||||
|   LLMModel, | ||||
|   SpeechOptions, | ||||
|   MultimodalContent, | ||||
|   MultimodalContentForAlibaba, | ||||
| } from "../api"; | ||||
| import { getClientConfig } from "@/app/config/client"; | ||||
| import { | ||||
|   getMessageTextContent, | ||||
|   getMessageTextContentWithoutThinking, | ||||
|   getTimeoutMSByModel, | ||||
|   isVisionModel, | ||||
| } from "@/app/utils"; | ||||
| import { fetch } from "@/app/utils/stream"; | ||||
|  | ||||
| @@ -94,6 +89,14 @@ export class QwenApi implements LLMApi { | ||||
|   } | ||||
|  | ||||
|   async chat(options: ChatOptions) { | ||||
|     const messages = options.messages.map((v) => ({ | ||||
|       role: v.role, | ||||
|       content: | ||||
|         v.role === "assistant" | ||||
|           ? getMessageTextContentWithoutThinking(v) | ||||
|           : getMessageTextContent(v), | ||||
|     })); | ||||
|  | ||||
|     const modelConfig = { | ||||
|       ...useAppConfig.getState().modelConfig, | ||||
|       ...useChatStore.getState().currentSession().mask.modelConfig, | ||||
| @@ -102,21 +105,6 @@ export class QwenApi implements LLMApi { | ||||
|       }, | ||||
|     }; | ||||
|  | ||||
|     const visionModel = isVisionModel(options.config.model); | ||||
|  | ||||
|     const messages: ChatOptions["messages"] = []; | ||||
|     for (const v of options.messages) { | ||||
|       const content = ( | ||||
|         visionModel | ||||
|           ? await preProcessImageContentForAlibabaDashScope(v.content) | ||||
|           : v.role === "assistant" | ||||
|           ? getMessageTextContentWithoutThinking(v) | ||||
|           : getMessageTextContent(v) | ||||
|       ) as any; | ||||
|  | ||||
|       messages.push({ role: v.role, content }); | ||||
|     } | ||||
|  | ||||
|     const shouldStream = !!options.config.stream; | ||||
|     const requestPayload: RequestPayload = { | ||||
|       model: modelConfig.model, | ||||
| @@ -141,7 +129,7 @@ export class QwenApi implements LLMApi { | ||||
|         "X-DashScope-SSE": shouldStream ? "enable" : "disable", | ||||
|       }; | ||||
|  | ||||
|       const chatPath = this.path(Alibaba.ChatPath(modelConfig.model)); | ||||
|       const chatPath = this.path(Alibaba.ChatPath); | ||||
|       const chatPayload = { | ||||
|         method: "POST", | ||||
|         body: JSON.stringify(requestPayload), | ||||
| @@ -174,7 +162,7 @@ export class QwenApi implements LLMApi { | ||||
|             const json = JSON.parse(text); | ||||
|             const choices = json.output.choices as Array<{ | ||||
|               message: { | ||||
|                 content: string | null | MultimodalContentForAlibaba[]; | ||||
|                 content: string | null; | ||||
|                 tool_calls: ChatMessageTool[]; | ||||
|                 reasoning_content: string | null; | ||||
|               }; | ||||
| @@ -224,9 +212,7 @@ export class QwenApi implements LLMApi { | ||||
|             } else if (content && content.length > 0) { | ||||
|               return { | ||||
|                 isThinking: false, | ||||
|                 content: Array.isArray(content) | ||||
|                   ? content.map((item) => item.text).join(",") | ||||
|                   : content, | ||||
|                 content: content, | ||||
|               }; | ||||
|             } | ||||
|  | ||||
|   | ||||
| @@ -75,25 +75,6 @@ export class DeepSeekApi implements LLMApi { | ||||
|       } | ||||
|     } | ||||
|  | ||||
|     // 检测并修复消息顺序,确保除system外的第一个消息是user | ||||
|     const filteredMessages: ChatOptions["messages"] = []; | ||||
|     let hasFoundFirstUser = false; | ||||
|  | ||||
|     for (const msg of messages) { | ||||
|       if (msg.role === "system") { | ||||
|         // Keep all system messages | ||||
|         filteredMessages.push(msg); | ||||
|       } else if (msg.role === "user") { | ||||
|         // User message directly added | ||||
|         filteredMessages.push(msg); | ||||
|         hasFoundFirstUser = true; | ||||
|       } else if (hasFoundFirstUser) { | ||||
|         // After finding the first user message, all subsequent non-system messages are retained. | ||||
|         filteredMessages.push(msg); | ||||
|       } | ||||
|       // If hasFoundFirstUser is false and it is not a system message, it will be skipped. | ||||
|     } | ||||
|  | ||||
|     const modelConfig = { | ||||
|       ...useAppConfig.getState().modelConfig, | ||||
|       ...useChatStore.getState().currentSession().mask.modelConfig, | ||||
| @@ -104,7 +85,7 @@ export class DeepSeekApi implements LLMApi { | ||||
|     }; | ||||
|  | ||||
|     const requestPayload: RequestPayload = { | ||||
|       messages: filteredMessages, | ||||
|       messages, | ||||
|       stream: options.config.stream, | ||||
|       model: modelConfig.model, | ||||
|       temperature: modelConfig.temperature, | ||||
|   | ||||
| @@ -66,11 +66,11 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) { | ||||
|       LlmIcon = BotIconGemma; | ||||
|     } else if (modelName.startsWith("claude")) { | ||||
|       LlmIcon = BotIconClaude; | ||||
|     } else if (modelName.includes("llama")) { | ||||
|     } else if (modelName.toLowerCase().includes("llama")) { | ||||
|       LlmIcon = BotIconMeta; | ||||
|     } else if (modelName.startsWith("mixtral") || modelName.startsWith("codestral")) { | ||||
|     } else if (modelName.startsWith("mixtral")) { | ||||
|       LlmIcon = BotIconMistral; | ||||
|     } else if (modelName.includes("deepseek")) { | ||||
|     } else if (modelName.toLowerCase().includes("deepseek")) { | ||||
|       LlmIcon = BotIconDeepseek; | ||||
|     } else if (modelName.startsWith("moonshot")) { | ||||
|       LlmIcon = BotIconMoonshot; | ||||
| @@ -85,7 +85,7 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) { | ||||
|     } else if (modelName.startsWith("doubao") || modelName.startsWith("ep-")) { | ||||
|       LlmIcon = BotIconDoubao; | ||||
|     } else if ( | ||||
|       modelName.includes("glm") || | ||||
|       modelName.toLowerCase().includes("glm") || | ||||
|       modelName.startsWith("cogview-") || | ||||
|       modelName.startsWith("cogvideox-") | ||||
|     ) { | ||||
|   | ||||
| @@ -221,12 +221,7 @@ export const ByteDance = { | ||||
|  | ||||
| export const Alibaba = { | ||||
|   ExampleEndpoint: ALIBABA_BASE_URL, | ||||
|   ChatPath: (modelName: string) => { | ||||
|     if (modelName.includes("vl") || modelName.includes("omni")) { | ||||
|       return "v1/services/aigc/multimodal-generation/generation"; | ||||
|     } | ||||
|     return `v1/services/aigc/text-generation/generation`; | ||||
|   }, | ||||
|   ChatPath: "v1/services/aigc/text-generation/generation", | ||||
| }; | ||||
|  | ||||
| export const Tencent = { | ||||
| @@ -540,8 +535,6 @@ const anthropicModels = [ | ||||
|   "claude-3-5-sonnet-20240620", | ||||
|   "claude-3-5-sonnet-20241022", | ||||
|   "claude-3-5-sonnet-latest", | ||||
|   "claude-3-7-sonnet-20250219", | ||||
|   "claude-3-7-sonnet-latest", | ||||
| ]; | ||||
|  | ||||
| const baiduModels = [ | ||||
| @@ -575,9 +568,6 @@ const alibabaModes = [ | ||||
|   "qwen-max-0403", | ||||
|   "qwen-max-0107", | ||||
|   "qwen-max-longcontext", | ||||
|   "qwen-omni-turbo", | ||||
|   "qwen-vl-plus", | ||||
|   "qwen-vl-max", | ||||
| ]; | ||||
|  | ||||
| const tencentModels = [ | ||||
|   | ||||
| @@ -3,7 +3,7 @@ import { | ||||
|   UPLOAD_URL, | ||||
|   REQUEST_TIMEOUT_MS, | ||||
| } from "@/app/constant"; | ||||
| import { MultimodalContent, RequestMessage } from "@/app/client/api"; | ||||
| import { RequestMessage } from "@/app/client/api"; | ||||
| import Locale from "@/app/locales"; | ||||
| import { | ||||
|   EventStreamContentType, | ||||
| @@ -70,9 +70,8 @@ export function compressImage(file: Blob, maxSize: number): Promise<string> { | ||||
|   }); | ||||
| } | ||||
|  | ||||
| export async function preProcessImageContentBase( | ||||
| export async function preProcessImageContent( | ||||
|   content: RequestMessage["content"], | ||||
|   transformImageUrl: (url: string) => Promise<{ [key: string]: any }>, | ||||
| ) { | ||||
|   if (typeof content === "string") { | ||||
|     return content; | ||||
| @@ -82,7 +81,7 @@ export async function preProcessImageContentBase( | ||||
|     if (part?.type == "image_url" && part?.image_url?.url) { | ||||
|       try { | ||||
|         const url = await cacheImageToBase64Image(part?.image_url?.url); | ||||
|         result.push(await transformImageUrl(url)); | ||||
|         result.push({ type: part.type, image_url: { url } }); | ||||
|       } catch (error) { | ||||
|         console.error("Error processing image URL:", error); | ||||
|       } | ||||
| @@ -93,23 +92,6 @@ export async function preProcessImageContentBase( | ||||
|   return result; | ||||
| } | ||||
|  | ||||
| export async function preProcessImageContent( | ||||
|   content: RequestMessage["content"], | ||||
| ) { | ||||
|   return preProcessImageContentBase(content, async (url) => ({ | ||||
|     type: "image_url", | ||||
|     image_url: { url }, | ||||
|   })) as Promise<MultimodalContent[] | string>; | ||||
| } | ||||
|  | ||||
| export async function preProcessImageContentForAlibabaDashScope( | ||||
|   content: RequestMessage["content"], | ||||
| ) { | ||||
|   return preProcessImageContentBase(content, async (url) => ({ | ||||
|     image: url, | ||||
|   })); | ||||
| } | ||||
|  | ||||
| const imageCaches: Record<string, string> = {}; | ||||
| export function cacheImageToBase64Image(imageUrl: string) { | ||||
|   if (imageUrl.includes(CACHE_URL_PREFIX)) { | ||||
|   | ||||
| @@ -15,8 +15,6 @@ const config: Config = { | ||||
|   moduleNameMapper: { | ||||
|     "^@/(.*)$": "<rootDir>/$1", | ||||
|   }, | ||||
|   extensionsToTreatAsEsm: [".ts", ".tsx"], | ||||
|   injectGlobals: true, | ||||
| }; | ||||
|  | ||||
| // createJestConfig is exported this way to ensure that next/jest can load the Next.js config which is async | ||||
|   | ||||
| @@ -1,22 +1,24 @@ | ||||
| // Learn more: https://github.com/testing-library/jest-dom | ||||
| import "@testing-library/jest-dom"; | ||||
| import { jest } from "@jest/globals"; | ||||
|  | ||||
| global.fetch = jest.fn(() => | ||||
|   Promise.resolve({ | ||||
|     ok: true, | ||||
|     status: 200, | ||||
|     json: () => Promise.resolve([]), | ||||
|     json: () => Promise.resolve({}), | ||||
|     headers: new Headers(), | ||||
|     redirected: false, | ||||
|     statusText: "OK", | ||||
|     type: "basic", | ||||
|     url: "", | ||||
|     clone: function () { | ||||
|       return this; | ||||
|     }, | ||||
|     body: null, | ||||
|     bodyUsed: false, | ||||
|     arrayBuffer: () => Promise.resolve(new ArrayBuffer(0)), | ||||
|     blob: () => Promise.resolve(new Blob()), | ||||
|     formData: () => Promise.resolve(new FormData()), | ||||
|     text: () => Promise.resolve(""), | ||||
|   } as Response), | ||||
|   }), | ||||
| ); | ||||
|   | ||||
| @@ -17,8 +17,8 @@ | ||||
|     "prompts": "node ./scripts/fetch-prompts.mjs", | ||||
|     "prepare": "husky install", | ||||
|     "proxy-dev": "sh ./scripts/init-proxy.sh && proxychains -f ./scripts/proxychains.conf yarn dev", | ||||
|     "test": "node --no-warnings --experimental-vm-modules $(yarn bin jest) --watch", | ||||
|     "test:ci": "node --no-warnings --experimental-vm-modules $(yarn bin jest) --ci" | ||||
|     "test": "jest --watch", | ||||
|     "test:ci": "jest --ci" | ||||
|   }, | ||||
|   "dependencies": { | ||||
|     "@fortaine/fetch-event-source": "^3.0.6", | ||||
|   | ||||
| @@ -1,4 +1,3 @@ | ||||
| import { jest } from "@jest/globals"; | ||||
| import { isVisionModel } from "../app/utils"; | ||||
|  | ||||
| describe("isVisionModel", () => { | ||||
|   | ||||
		Reference in New Issue
	
	Block a user