From 32fc4d86a26a99dc04d39a162f158156a1e93b47 Mon Sep 17 00:00:00 2001 From: GeekMaster Date: Tue, 27 May 2025 15:48:07 +0800 Subject: [PATCH] =?UTF-8?q?SSE=20=E6=B6=88=E6=81=AF=E9=87=8D=E6=9E=84?= =?UTF-8?q?=E5=B7=B2=E5=AE=8C=E6=88=90?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 2 + api/core/types/locked_map.go | 2 +- api/handler/admin/chat_handler.go | 4 +- api/handler/admin/redeem_handler.go | 4 +- api/handler/chat_handler.go | 362 ++++++++++-------- api/handler/chat_item_handler.go | 18 +- api/handler/chat_openai_handler.go | 21 +- api/service/crawler/service.go | 94 ++--- api/service/types.go | 2 +- .../{chat_history.go => chat_message.go} | 0 api/store/vo/chat_history.go | 14 - api/store/vo/chat_message.go | 19 + web/src/components/ChatPrompt.vue | 20 +- web/src/components/ChatReply.vue | 30 +- web/src/views/ChatPlus.vue | 141 +++---- 15 files changed, 394 insertions(+), 339 deletions(-) rename api/store/model/{chat_history.go => chat_message.go} (100%) delete mode 100644 api/store/vo/chat_history.go create mode 100644 api/store/vo/chat_message.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 750a681a..1c950f63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ ## v4.2.4 +- 功能优化:更改前端构建技术选型,使用 Vite 构建,提升构建速度和兼容性 +- 功能优化:使用 SSE 发送消息,替换原来的 Websocket 消息方案 - 功能新增:管理后台支持设置默认昵称 - 功能优化:支持 Suno v4.5 模型支持 - 功能新增:用户注册和用户登录增加用户协议和隐私政策功能,需要用户同意协议才可注册和登录。 diff --git a/api/core/types/locked_map.go b/api/core/types/locked_map.go index 4382ceee..2cb9362c 100644 --- a/api/core/types/locked_map.go +++ b/api/core/types/locked_map.go @@ -16,7 +16,7 @@ type MKey interface { string | int | uint } type MValue interface { - *WsClient | *ChatSession | context.CancelFunc | []any + *WsClient | context.CancelFunc | []any } type LMap[K MKey, T MValue] struct { lock sync.RWMutex diff --git a/api/handler/admin/chat_handler.go b/api/handler/admin/chat_handler.go index a97eacc2..d3670eeb 100644 --- a/api/handler/admin/chat_handler.go +++ b/api/handler/admin/chat_handler.go @@ -209,14 +209,14 @@ func (h *ChatHandler) Messages(c *gin.Context) { func (h *ChatHandler) History(c *gin.Context) { chatId := c.Query("chat_id") // 会话 ID var items []model.ChatMessage - var messages = make([]vo.HistoryMessage, 0) + var messages = make([]vo.ChatMessage, 0) res := h.DB.Where("chat_id = ?", chatId).Find(&items) if res.Error != nil { resp.ERROR(c, "No history message") return } else { for _, item := range items { - var v vo.HistoryMessage + var v vo.ChatMessage err := utils.CopyObject(item, &v) v.CreatedAt = item.CreatedAt.Unix() v.UpdatedAt = item.UpdatedAt.Unix() diff --git a/api/handler/admin/redeem_handler.go b/api/handler/admin/redeem_handler.go index b2559a9c..6fae1e91 100644 --- a/api/handler/admin/redeem_handler.go +++ b/api/handler/admin/redeem_handler.go @@ -106,8 +106,8 @@ func (h *RedeemHandler) Export(c *gin.Context) { } // 设置响应头,告诉浏览器这是一个附件,需要下载 - c.Header("Content-Disposition", "attachment; filename=output.csv") - c.Header("Content-Type", "text/csv") + c.Header("Prompt-Disposition", "attachment; filename=output.csv") + c.Header("Prompt-Type", "text/csv") // 创建一个 CSV writer writer := csv.NewWriter(c.Writer) diff --git a/api/handler/chat_handler.go b/api/handler/chat_handler.go index b6317b24..cd2db827 100644 --- a/api/handler/chat_handler.go +++ b/api/handler/chat_handler.go @@ -21,11 +21,11 @@ import ( "geekai/store/vo" "geekai/utils" "geekai/utils/resp" - "html/template" "io" "net/http" "net/url" "os" + "path" "strings" "time" "unicode/utf8" @@ -45,14 +45,17 @@ const ( ) type ChatInput struct { - UserId uint `json:"user_id"` - RoleId int `json:"role_id"` - ModelId int `json:"model_id"` - ChatId string `json:"chat_id"` - Content string `json:"content"` - Tools []int `json:"tools"` - Stream bool `json:"stream"` - Files []vo.File `json:"files"` + UserId uint `json:"user_id"` + RoleId uint `json:"role_id"` + ModelId uint `json:"model_id"` + ChatId string `json:"chat_id"` + Prompt string `json:"prompt"` + Tools []uint `json:"tools"` + Stream bool `json:"stream"` + Files []vo.File `json:"files"` + ChatModel model.ChatModel `json:"chat_model,omitempty"` + ChatRole model.ChatRole `json:"chat_role,omitempty"` + LastMsgId uint `json:"last_msg_id,omitempty"` // 最后的消息ID,用于重新生成答案的时候过滤上下文 } type ChatHandler struct { @@ -79,14 +82,14 @@ func NewChatHandler(app *core.AppServer, db *gorm.DB, redis *redis.Client, manag // Chat 处理聊天请求 func (h *ChatHandler) Chat(c *gin.Context) { - var data ChatInput - if err := c.ShouldBindJSON(&data); err != nil { + var input ChatInput + if err := c.ShouldBindJSON(&input); err != nil { resp.ERROR(c, types.InvalidArgs) return } // 设置SSE响应头 - c.Header("Content-Type", "text/event-stream") + c.Header("Prompt-Type", "text/event-stream") c.Header("Cache-Control", "no-cache") c.Header("Connection", "keep-alive") c.Header("X-Accel-Buffering", "no") @@ -94,44 +97,34 @@ func (h *ChatHandler) Chat(c *gin.Context) { ctx, cancel := context.WithCancel(c.Request.Context()) defer cancel() + // 使用旧的聊天数据覆盖模型和角色ID + var chat model.ChatItem + h.DB.Where("chat_id", input.ChatId).First(&chat) + if chat.Id > 0 { + input.ModelId = chat.ModelId + input.RoleId = chat.RoleId + } + // 验证聊天角色 var chatRole model.ChatRole - err := h.DB.First(&chatRole, data.RoleId).Error + err := h.DB.First(&chatRole, input.RoleId).Error if err != nil || !chatRole.Enable { pushMessage(c, ChatEventError, "当前聊天角色不存在或者未启用,请更换角色之后再发起对话!") return } - - // 如果角色绑定了模型ID,使用角色的模型ID - if chatRole.ModelId > 0 { - data.ModelId = int(chatRole.ModelId) - } + input.ChatRole = chatRole // 获取模型信息 var chatModel model.ChatModel - err = h.DB.Where("id", data.ModelId).First(&chatModel).Error + err = h.DB.Where("id", input.ModelId).First(&chatModel).Error if err != nil || !chatModel.Enabled { pushMessage(c, ChatEventError, "当前AI模型暂未启用,请更换模型后再发起对话!") return } - - // 使用旧的聊天数据覆盖模型和角色ID - var chat model.ChatItem - h.DB.Where("chat_id", data.ChatId).First(&chat) - if chat.Id > 0 { - chatModel.Id = chat.ModelId - data.RoleId = int(chat.RoleId) - } - - // 复制模型数据 - err = utils.CopyObject(chatModel, &session.Model) - if err != nil { - logger.Error(err, chatModel) - } - session.Model.Id = chatModel.Id + input.ChatModel = chatModel // 发送消息 - err = h.sendMessage(ctx, session, chatRole, data.Content, c) + err = h.sendMessage(ctx, input, c) if err != nil { pushMessage(c, ChatEventError, err.Error()) return @@ -148,9 +141,9 @@ func pushMessage(c *gin.Context, msgType string, content interface{}) { c.Writer.Flush() } -func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSession, role model.ChatRole, prompt string, c *gin.Context) error { +func (h *ChatHandler) sendMessage(ctx context.Context, input ChatInput, c *gin.Context) error { var user model.User - res := h.DB.Model(&model.User{}).First(&user, session.UserId) + res := h.DB.Model(&model.User{}).First(&user, input.UserId) if res.Error != nil { return errors.New("未授权用户,您正在进行非法操作!") } @@ -165,8 +158,8 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio return errors.New("您的账号已经被禁用,如果疑问,请联系管理员!") } - if userVo.Power < session.Model.Power { - return fmt.Errorf("您当前剩余算力 %d 已不足以支付当前模型的单次对话需要消耗的算力 %d,[立即购买](/member)。", userVo.Power, session.Model.Power) + if userVo.Power < input.ChatModel.Power { + return fmt.Errorf("您当前剩余算力 %d 已不足以支付当前模型的单次对话需要消耗的算力 %d,[立即购买](/member)。", userVo.Power, input.ChatModel.Power) } if userVo.ExpiredTime > 0 && userVo.ExpiredTime <= time.Now().Unix() { @@ -174,30 +167,29 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio } // 检查 prompt 长度是否超过了当前模型允许的最大上下文长度 - promptTokens, _ := utils.CalcTokens(prompt, session.Model.Value) - if promptTokens > session.Model.MaxContext { + promptTokens, _ := utils.CalcTokens(input.Prompt, input.ChatModel.Value) + if promptTokens > input.ChatModel.MaxContext { return errors.New("对话内容超出了当前模型允许的最大上下文长度!") } var req = types.ApiRequest{ - Model: session.Model.Value, - Stream: session.Stream, - Temperature: session.Model.Temperature, + Model: input.ChatModel.Value, + Stream: input.Stream, + Temperature: input.ChatModel.Temperature, } // 兼容 OpenAI 模型 - if strings.HasPrefix(session.Model.Value, "o1-") || - strings.HasPrefix(session.Model.Value, "o3-") || - strings.HasPrefix(session.Model.Value, "gpt") { - req.MaxCompletionTokens = session.Model.MaxTokens - session.Start = time.Now().Unix() + if strings.HasPrefix(input.ChatModel.Value, "o1-") || + strings.HasPrefix(input.ChatModel.Value, "o3-") || + strings.HasPrefix(input.ChatModel.Value, "gpt") { + req.MaxCompletionTokens = input.ChatModel.MaxTokens } else { - req.MaxTokens = session.Model.MaxTokens + req.MaxTokens = input.ChatModel.MaxTokens } - if len(session.Tools) > 0 && !strings.HasPrefix(session.Model.Value, "o1-") { + if len(input.Tools) > 0 && !strings.HasPrefix(input.ChatModel.Value, "o1-") { var items []model.Function - res = h.DB.Where("enabled", true).Where("id IN ?", session.Tools).Find(&items) + res = h.DB.Where("enabled", true).Where("id IN ?", input.Tools).Find(&items) if res.Error == nil { var tools = make([]types.Tool, 0) for _, v := range items { @@ -231,14 +223,18 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio chatCtx := make([]interface{}, 0) messages := make([]interface{}, 0) if h.App.SysConfig.EnableContext { - if h.ChatContexts.Has(session.ChatId) { - messages = h.ChatContexts.Get(session.ChatId) + if h.ChatContexts.Has(input.ChatId) { + messages = h.ChatContexts.Get(input.ChatId) } else { - _ = utils.JsonDecode(role.Context, &messages) + _ = utils.JsonDecode(input.ChatRole.Context, &messages) if h.App.SysConfig.ContextDeep > 0 { var historyMessages []model.ChatMessage - res := h.DB.Where("chat_id = ? and use_context = 1", session.ChatId).Limit(h.App.SysConfig.ContextDeep).Order("id DESC").Find(&historyMessages) - if res.Error == nil { + dbSession := h.DB.Session(&gorm.Session{}).Where("chat_id", input.ChatId) + if input.LastMsgId > 0 { // 重新生成逻辑 + dbSession = dbSession.Where("id < ?", input.LastMsgId) + } + err = dbSession.Limit(h.App.SysConfig.ContextDeep).Order("id DESC").Find(&historyMessages).Error + if err == nil { for i := len(historyMessages) - 1; i >= 0; i-- { msg := historyMessages[i] ms := types.Message{Role: "user", Content: msg.Content} @@ -261,7 +257,7 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio v := messages[i] tks, _ = utils.CalcTokens(utils.JsonEncode(v), req.Model) // 上下文 token 超出了模型的最大上下文长度 - if tokens+tks >= session.Model.MaxContext { + if tokens+tks >= input.ChatModel.MaxContext { break } @@ -282,71 +278,101 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio reqMgs = append(reqMgs, chatCtx[i]) } - fullPrompt := prompt - text := prompt - - for _, file := range session.Files { - // extract files in prompt - files := utils.ExtractFileURLs(prompt) - logger.Debugf("detected FILES: %+v", files) - // 如果不是逆向模型,则提取文件内容 - if len(files) > 0 && !(session.Model.Value == "gpt-4-all" || - strings.HasPrefix(session.Model.Value, "gpt-4-gizmo") || - strings.HasPrefix(session.Model.Value, "claude-3")) { - contents := make([]string, 0) - var file model.File - for _, v := range files { - h.DB.Where("url = ?", v).First(&file) - content, err := utils.ReadFileContent(v, h.App.Config.TikaHost) - if err != nil { - logger.Error("error with read file: ", err) - } else { - contents = append(contents, fmt.Sprintf("%s 文件内容:%s", file.Name, content)) - } - text = strings.Replace(text, v, "", 1) - } - if len(contents) > 0 { - fullPrompt = fmt.Sprintf("请根据提供的文件内容信息回答问题(其中Excel 已转成 HTML):\n\n %s\n\n 问题:%s", strings.Join(contents, "\n"), text) - } - - tokens, _ := utils.CalcTokens(fullPrompt, req.Model) - if tokens > session.Model.MaxContext { - return fmt.Errorf("文件的长度超出模型允许的最大上下文长度,请减少文件内容数量或文件大小。") - } - } - logger.Debug("最终Prompt:", fullPrompt) - - // extract images from prompt - imgURLs := utils.ExtractImgURLs(prompt) - logger.Debugf("detected IMG: %+v", imgURLs) - var content interface{} - if len(imgURLs) > 0 { - data := make([]interface{}, 0) - for _, v := range imgURLs { - text = strings.Replace(text, v, "", 1) - data = append(data, gin.H{ + fileContents := make([]string, 0) // 文件内容 + var finalPrompt = input.Prompt + imgList := make([]any, 0) + for _, file := range input.Files { + logger.Debugf("detected file: %+v", file.URL) + // 处理图片 + if isImageURL(file.URL) { + imgList = append(imgList, gin.H{ "type": "image_url", "image_url": gin.H{ - "url": v, + "url": file.URL, }, }) + } else { + // 如果不是逆向模型,则提取文件内容 + modelValue := input.ChatModel.Value + if !(strings.Contains(modelValue, "-all") || strings.HasPrefix(modelValue, "gpt-4-gizmo") || strings.HasPrefix(modelValue, "claude")) { + content, err := utils.ReadFileContent(file.URL, h.App.Config.TikaHost) + if err != nil { + logger.Error("error with read file: ", err) + continue + } else { + fileContents = append(fileContents, fmt.Sprintf("%s 文件内容:%s", file.Name, content)) + } + } } - data = append(data, gin.H{ - "type": "text", - "text": strings.TrimSpace(text), - }) - content = data - } else { - content = fullPrompt } - req.Messages = append(reqMgs, map[string]interface{}{ - "role": "user", - "content": content, - }) - logger.Debugf("%+v", req.Messages) + if len(fileContents) > 0 { + finalPrompt = fmt.Sprintf("请根据提供的文件内容信息回答问题(其中Excel 已转成 HTML):\n\n %s\n\n 问题:%s", strings.Join(fileContents, "\n"), input.Prompt) + tokens, _ := utils.CalcTokens(finalPrompt, req.Model) + if tokens > input.ChatModel.MaxContext { + return fmt.Errorf("文件的长度超出模型允许的最大上下文长度,请减少文件内容数量或文件大小。") + } + } else { + finalPrompt = input.Prompt + } - return h.sendOpenAiMessage(req, userVo, ctx, session, role, prompt, c) + if len(imgList) > 0 { + imgList = append(imgList, map[string]interface{}{ + "type": "text", + "text": input.Prompt, + }) + req.Messages = append(reqMgs, map[string]interface{}{ + "role": "user", + "content": imgList, + }) + } else { + req.Messages = append(reqMgs, map[string]interface{}{ + "role": "user", + "content": finalPrompt, + }) + } + + logger.Debugf("请求消息: %+v", req.Messages) + + return h.sendOpenAiMessage(req, userVo, ctx, input, c) +} + +// 判断一个 URL 是否图片链接 +func isImageURL(url string) bool { + // 检查是否是有效的URL + if !strings.HasPrefix(url, "http://") && !strings.HasPrefix(url, "https://") { + return false + } + + // 检查文件扩展名 + ext := strings.ToLower(path.Ext(url)) + validImageExts := map[string]bool{ + ".jpg": true, + ".jpeg": true, + ".png": true, + ".gif": true, + ".bmp": true, + ".webp": true, + ".svg": true, + ".ico": true, + } + + if !validImageExts[ext] { + return false + } + + // 发送HEAD请求检查Content-Type + client := &http.Client{ + Timeout: 5 * time.Second, + } + resp, err := client.Head(url) + if err != nil { + return false + } + defer resp.Body.Close() + + contentType := resp.Header.Get("Content-Type") + return strings.HasPrefix(contentType, "image/") } // Tokens 统计 token 数量 @@ -415,10 +441,10 @@ func (h *ChatHandler) StopGenerate(c *gin.Context) { // 发送请求到 OpenAI 服务器 // useOwnApiKey: 是否使用了用户自己的 API KEY -func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, session *types.ChatSession, apiKey *model.ApiKey) (*http.Response, error) { +func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, input ChatInput, apiKey *model.ApiKey) (*http.Response, error) { // if the chat model bind a KEY, use it directly - if session.Model.KeyId > 0 { - h.DB.Where("id", session.Model.KeyId).Find(apiKey) + if input.ChatModel.KeyId > 0 { + h.DB.Where("id", input.ChatModel.KeyId).Find(apiKey) } else { // use the last unused key h.DB.Where("type", "chat").Where("enabled", true).Order("last_used_at ASC").First(apiKey) } @@ -472,16 +498,16 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi } // 扣减用户算力 -func (h *ChatHandler) subUserPower(userVo vo.User, session *types.ChatSession, promptTokens int, replyTokens int) { +func (h *ChatHandler) subUserPower(userVo vo.User, input ChatInput, promptTokens int, replyTokens int) { power := 1 - if session.Model.Power > 0 { - power = session.Model.Power + if input.ChatModel.Power > 0 { + power = input.ChatModel.Power } err := h.userService.DecreasePower(userVo.Id, power, model.PowerLog{ Type: types.PowerConsume, - Model: session.Model.Value, - Remark: fmt.Sprintf("模型名称:%s, 提问长度:%d,回复长度:%d", session.Model.Name, promptTokens, replyTokens), + Model: input.ChatModel.Value, + Remark: fmt.Sprintf("模型名称:%s, 提问长度:%d,回复长度:%d", input.ChatModel.Name, promptTokens, replyTokens), }) if err != nil { logger.Error(err) @@ -492,8 +518,7 @@ func (h *ChatHandler) saveChatHistory( req types.ApiRequest, usage Usage, message types.Message, - session *types.ChatSession, - role model.ChatRole, + input ChatInput, userVo vo.User, promptCreatedAt time.Time, replyCreatedAt time.Time) { @@ -502,7 +527,7 @@ func (h *ChatHandler) saveChatHistory( if h.App.SysConfig.EnableContext { chatCtx := req.Messages // 提问消息 chatCtx = append(chatCtx, message) // 回复消息 - h.ChatContexts.Put(session.ChatId, chatCtx) + h.ChatContexts.Put(input.ChatId, chatCtx) } // 追加聊天记录 @@ -515,12 +540,15 @@ func (h *ChatHandler) saveChatHistory( } historyUserMsg := model.ChatMessage{ - UserId: userVo.Id, - ChatId: session.ChatId, - RoleId: role.Id, - Type: types.PromptMsg, - Icon: userVo.Avatar, - Content: template.HTMLEscapeString(usage.Prompt), + UserId: userVo.Id, + ChatId: input.ChatId, + RoleId: input.RoleId, + Type: types.PromptMsg, + Icon: userVo.Avatar, + Content: utils.JsonEncode(vo.MsgContent{ + Text: usage.Prompt, + Files: input.Files, + }), Tokens: promptTokens, TotalTokens: promptTokens, UseContext: true, @@ -543,12 +571,15 @@ func (h *ChatHandler) saveChatHistory( totalTokens = replyTokens + getTotalTokens(req) } historyReplyMsg := model.ChatMessage{ - UserId: userVo.Id, - ChatId: session.ChatId, - RoleId: role.Id, - Type: types.ReplyMsg, - Icon: role.Icon, - Content: usage.Content, + UserId: userVo.Id, + ChatId: input.ChatId, + RoleId: input.RoleId, + Type: types.ReplyMsg, + Icon: input.ChatRole.Icon, + Content: utils.JsonEncode(vo.MsgContent{ + Text: message.Content, + Files: input.Files, + }), Tokens: replyTokens, TotalTokens: totalTokens, UseContext: true, @@ -562,17 +593,17 @@ func (h *ChatHandler) saveChatHistory( } // 更新用户算力 - if session.Model.Power > 0 { - h.subUserPower(userVo, session, promptTokens, replyTokens) + if input.ChatModel.Power > 0 { + h.subUserPower(userVo, input, promptTokens, replyTokens) } // 保存当前会话 var chatItem model.ChatItem - err = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem).Error + err = h.DB.Where("chat_id = ?", input.ChatId).First(&chatItem).Error if err != nil { - chatItem.ChatId = session.ChatId + chatItem.ChatId = input.ChatId chatItem.UserId = userVo.Id - chatItem.RoleId = role.Id - chatItem.ModelId = session.Model.Id + chatItem.RoleId = input.RoleId + chatItem.ModelId = input.ModelId if utf8.RuneCountInString(usage.Prompt) > 30 { chatItem.Title = string([]rune(usage.Prompt)[:30]) + "..." } else { @@ -586,7 +617,7 @@ func (h *ChatHandler) saveChatHistory( } } -// 文本生成语音 +// TextToSpeech 文本生成语音 func (h *ChatHandler) TextToSpeech(c *gin.Context) { var data struct { ModelId int `json:"model_id"` @@ -600,13 +631,19 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) { textHash := utils.Sha256(fmt.Sprintf("%d/%s", data.ModelId, data.Text)) audioFile := fmt.Sprintf("%s/audio", h.App.Config.StaticDir) if _, err := os.Stat(audioFile); err != nil { - os.MkdirAll(audioFile, 0755) + resp.ERROR(c, err.Error()) + return + } + + if err := os.MkdirAll(audioFile, 0755); err != nil { + resp.ERROR(c, err.Error()) + return } audioFile = fmt.Sprintf("%s/%s.mp3", audioFile, textHash) if _, err := os.Stat(audioFile); err == nil { // 设置响应头 - c.Header("Content-Type", "audio/mpeg") - c.Header("Content-Disposition", "attachment; filename=speech.mp3") + c.Header("Prompt-Type", "audio/mpeg") + c.Header("Prompt-Disposition", "attachment; filename=speech.mp3") c.File(audioFile) return } @@ -670,11 +707,14 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) { } // 设置响应头 - c.Header("Content-Type", "audio/mpeg") - c.Header("Content-Disposition", "attachment; filename=speech.mp3") + c.Header("Prompt-Type", "audio/mpeg") + c.Header("Prompt-Disposition", "attachment; filename=speech.mp3") // 直接写入完整的音频数据到响应 - c.Writer.Write(audioBytes) + _, err = c.Writer.Write(audioBytes) + if err != nil { + logger.Error("写入音频数据到响应失败:", err) + } } // // OPenAI 消息发送实现 @@ -707,7 +747,7 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) { // return fmt.Errorf("请求 OpenAI API 失败:%d, %v", response.StatusCode, string(body)) // } -// contentType := response.Header.Get("Content-Type") +// contentType := response.Header.Get("Prompt-Type") // if strings.Contains(contentType, "text/event-stream") { // replyCreatedAt := time.Now() // 记录回复时间 // // 循环读取 Chunk 消息 @@ -733,7 +773,7 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) { // if len(responseBody.Choices) == 0 { // Fixed: 兼容 Azure API 第一个输出空行 // continue // } -// if responseBody.Choices[0].Delta.Content == nil && +// if responseBody.Choices[0].Delta.Prompt == nil && // responseBody.Choices[0].Delta.ToolCalls == nil && // responseBody.Choices[0].Delta.ReasoningContent == "" { // continue @@ -799,10 +839,10 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) { // "content": reasoningContent, // }) // contents = append(contents, reasoningContent) -// } else if responseBody.Choices[0].Delta.Content != "" { -// finalContent := responseBody.Choices[0].Delta.Content +// } else if responseBody.Choices[0].Delta.Prompt != "" { +// finalContent := responseBody.Choices[0].Delta.Prompt // if reasoning { -// finalContent = fmt.Sprintf("%s", responseBody.Choices[0].Delta.Content) +// finalContent = fmt.Sprintf("%s", responseBody.Choices[0].Delta.Prompt) // reasoning = false // } // contents = append(contents, utils.InterfaceToString(finalContent)) @@ -861,12 +901,12 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) { // if len(contents) > 0 { // usage := Usage{ // Prompt: prompt, -// Content: strings.Join(contents, ""), +// Prompt: strings.Join(contents, ""), // PromptTokens: 0, // CompletionTokens: 0, // TotalTokens: 0, // } -// message.Content = usage.Content +// message.Prompt = usage.Prompt // h.saveChatHistory(req, usage, message, session, role, userVo, promptCreatedAt, replyCreatedAt) // } // } else { @@ -879,16 +919,16 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) { // if err != nil { // return fmt.Errorf("解析响应失败:%v", body) // } -// content := respVo.Choices[0].Message.Content +// content := respVo.Choices[0].Message.Prompt // if strings.HasPrefix(req.Model, "o1-") { -// content = fmt.Sprintf("AI思考结束,耗时:%d 秒。\n%s", time.Now().Unix()-session.Start, respVo.Choices[0].Message.Content) +// content = fmt.Sprintf("AI思考结束,耗时:%d 秒。\n%s", time.Now().Unix()-session.Start, respVo.Choices[0].Message.Prompt) // } // pushMessage(c, ChatEventMessageDelta, map[string]interface{}{ // "type": "text", // "content": content, // }) // respVo.Usage.Prompt = prompt -// respVo.Usage.Content = content +// respVo.Usage.Prompt = content // h.saveChatHistory(req, respVo.Usage, respVo.Choices[0].Message, session, role, userVo, promptCreatedAt, time.Now()) // } diff --git a/api/handler/chat_item_handler.go b/api/handler/chat_item_handler.go index f08be3fe..ce2971df 100644 --- a/api/handler/chat_item_handler.go +++ b/api/handler/chat_item_handler.go @@ -133,20 +133,28 @@ func (h *ChatHandler) Clear(c *gin.Context) { func (h *ChatHandler) History(c *gin.Context) { chatId := c.Query("chat_id") // 会话 ID var items []model.ChatMessage - var messages = make([]vo.HistoryMessage, 0) + var messages = make([]vo.ChatMessage, 0) res := h.DB.Where("chat_id = ?", chatId).Find(&items) if res.Error != nil { resp.ERROR(c, "No history message") return } else { for _, item := range items { - var v vo.HistoryMessage + var v vo.ChatMessage err := utils.CopyObject(item, &v) + if err != nil { + continue + } + // 解析内容 + var content vo.MsgContent + err = utils.JsonDecode(item.Content, &content) + if err != nil { + content.Text = item.Content + } + v.Content = content + messages = append(messages, v) v.CreatedAt = item.CreatedAt.Unix() v.UpdatedAt = item.UpdatedAt.Unix() - if err == nil { - messages = append(messages, v) - } } } diff --git a/api/handler/chat_openai_handler.go b/api/handler/chat_openai_handler.go index ef9a5722..6a71c405 100644 --- a/api/handler/chat_openai_handler.go +++ b/api/handler/chat_openai_handler.go @@ -56,18 +56,16 @@ func (h *ChatHandler) sendOpenAiMessage( req types.ApiRequest, userVo vo.User, ctx context.Context, - session *types.ChatSession, - role model.ChatRole, - prompt string, + input ChatInput, c *gin.Context) error { promptCreatedAt := time.Now() // 记录提问时间 start := time.Now() var apiKey = model.ApiKey{} - response, err := h.doRequest(ctx, req, session, &apiKey) + response, err := h.doRequest(ctx, req, input, &apiKey) logger.Info("HTTP请求完成,耗时:", time.Since(start)) if err != nil { if strings.Contains(err.Error(), "context canceled") { - return fmt.Errorf("用户取消了请求:%s", prompt) + return fmt.Errorf("用户取消了请求:%s", input.Prompt) } else if strings.Contains(err.Error(), "no available key") { return errors.New("抱歉😔😔😔,系统已经没有可用的 API KEY,请联系管理员!") } @@ -180,7 +178,7 @@ func (h *ChatHandler) sendOpenAiMessage( if err := scanner.Err(); err != nil { if strings.Contains(err.Error(), "context canceled") { - logger.Info("用户取消了请求:", prompt) + logger.Info("用户取消了请求:", input.Prompt) } else { logger.Error("信息读取出错:", err) } @@ -221,14 +219,14 @@ func (h *ChatHandler) sendOpenAiMessage( // 消息发送成功 if len(contents) > 0 { usage := Usage{ - Prompt: prompt, + Prompt: input.Prompt, Content: strings.Join(contents, ""), PromptTokens: 0, CompletionTokens: 0, TotalTokens: 0, } message.Content = usage.Content - h.saveChatHistory(req, usage, message, session, role, userVo, promptCreatedAt, replyCreatedAt) + h.saveChatHistory(req, usage, message, input, userVo, promptCreatedAt, replyCreatedAt) } } else { // 非流式输出 var respVo OpenAIResVo @@ -241,13 +239,10 @@ func (h *ChatHandler) sendOpenAiMessage( return fmt.Errorf("解析响应失败:%v", body) } content := respVo.Choices[0].Message.Content - if strings.HasPrefix(req.Model, "o1-") { - content = fmt.Sprintf("AI思考结束,耗时:%d 秒。\n%s", time.Now().Unix()-session.Start, respVo.Choices[0].Message.Content) - } pushMessage(c, "text", content) - respVo.Usage.Prompt = prompt + respVo.Usage.Prompt = input.Prompt respVo.Usage.Content = content - h.saveChatHistory(req, respVo.Usage, respVo.Choices[0].Message, session, role, userVo, promptCreatedAt, time.Now()) + h.saveChatHistory(req, respVo.Usage, respVo.Choices[0].Message, input, userVo, promptCreatedAt, time.Now()) } return nil diff --git a/api/service/crawler/service.go b/api/service/crawler/service.go index 2899a8e2..39fb2fa7 100644 --- a/api/service/crawler/service.go +++ b/api/service/crawler/service.go @@ -24,15 +24,15 @@ func NewService() (*Service, error) { // 启动浏览器 path, _ := launcher.LookPath() u := launcher.New().Bin(path). - Headless(true). // 无头模式 - Set("disable-web-security", ""). // 禁用网络安全限制 - Set("disable-gpu", ""). // 禁用 GPU 加速 - Set("no-sandbox", ""). // 禁用沙箱模式 - Set("disable-setuid-sandbox", "").// 禁用 setuid 沙箱 + Headless(true). // 无头模式 + Set("disable-web-security", ""). // 禁用网络安全限制 + Set("disable-gpu", ""). // 禁用 GPU 加速 + Set("no-sandbox", ""). // 禁用沙箱模式 + Set("disable-setuid-sandbox", ""). // 禁用 setuid 沙箱 MustLaunch() browser := rod.New().ControlURL(u).MustConnect() - + return &Service{ browser: browser, }, nil @@ -50,7 +50,7 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error if keyword == "" { return nil, errors.New("搜索关键词不能为空") } - + if maxPages <= 0 { maxPages = 1 } @@ -59,18 +59,18 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error } results := make([]SearchResult, 0) - + // 使用百度搜索 searchURL := fmt.Sprintf("https://www.baidu.com/s?wd=%s", url.QueryEscape(keyword)) - + // 设置页面超时 ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() - + // 创建页面 page := s.browser.MustPage() defer page.MustClose() - + // 设置视口大小 err := page.SetViewport(&proto.EmulationSetDeviceMetricsOverride{ Width: 1280, @@ -79,19 +79,19 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error if err != nil { return nil, fmt.Errorf("设置视口失败: %v", err) } - + // 导航到搜索页面 err = page.Context(ctx).Navigate(searchURL) if err != nil { return nil, fmt.Errorf("导航到搜索页面失败: %v", err) } - + // 等待搜索结果加载完成 err = page.WaitLoad() if err != nil { return nil, fmt.Errorf("等待页面加载完成失败: %v", err) } - + // 分析当前页面的搜索结果 for i := 0; i < maxPages; i++ { if i > 0 { @@ -100,52 +100,52 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error if err != nil || nextPage == nil { break // 没有下一页 } - + err = nextPage.Click(proto.InputMouseButtonLeft, 1) if err != nil { break // 点击下一页失败 } - + // 等待新页面加载 err = page.WaitLoad() if err != nil { break } } - + // 提取搜索结果 resultElements, err := page.Elements(".result, .c-container") if err != nil || resultElements == nil { continue } - + for _, result := range resultElements { // 获取标题 titleElement, err := result.Element("h3, .t") if err != nil || titleElement == nil { continue } - + title, err := titleElement.Text() if err != nil { continue } - + // 获取 URL linkElement, err := titleElement.Element("a") if err != nil || linkElement == nil { continue } - + href, err := linkElement.Attribute("href") if err != nil || href == nil { continue } - + // 获取内容摘要 - 尝试多个可能的选择器 var contentElement *rod.Element var content string - + // 尝试多个可能的选择器来适应不同版本的百度搜索结果 selectors := []string{".content-right_8Zs40", ".c-abstract", ".content_LJ0WN", ".content"} for _, selector := range selectors { @@ -157,7 +157,7 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error } } } - + // 如果所有选择器都失败,尝试直接从结果块中提取文本 if content == "" { // 获取结果元素的所有文本 @@ -173,21 +173,21 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error } } } - + // 添加到结果集 results = append(results, SearchResult{ Title: title, URL: *href, Content: content, }) - + // 限制结果数量,每页最多 10 条 if len(results) >= 10*maxPages { break } } } - + // 获取真实 URL(百度搜索结果中的 URL 是短链接,需要跳转获取真实 URL) for i, result := range results { realURL, err := s.getRedirectURL(result.URL) @@ -195,7 +195,7 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error results[i].URL = realURL } } - + return results, nil } @@ -209,22 +209,22 @@ func (s *Service) getRedirectURL(shortURL string) (string, error) { defer func() { _ = page.Close() }() - + // 导航到短链接 err = page.Navigate(shortURL) if err != nil { return shortURL, err // 返回原始URL } - + // 等待重定向完成 time.Sleep(2 * time.Second) - + // 获取当前 URL info, err := page.Info() if err != nil { return shortURL, err // 返回原始URL } - + return info.URL, nil } @@ -247,21 +247,21 @@ func SearchWeb(keyword string, maxPages int) (string, error) { log.Errorf("爬虫服务崩溃: %v", r) } }() - + service, err := NewService() if err != nil { return "", fmt.Errorf("创建爬虫服务失败: %v", err) } defer service.Close() - + // 设置超时上下文 ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second) defer cancel() - + // 使用goroutine和通道来处理超时 resultChan := make(chan []SearchResult, 1) errChan := make(chan error, 1) - + go func() { results, err := service.WebSearch(keyword, maxPages) if err != nil { @@ -270,7 +270,7 @@ func SearchWeb(keyword string, maxPages int) (string, error) { } resultChan <- results }() - + // 等待结果或超时 select { case <-ctx.Done(): @@ -281,32 +281,32 @@ func SearchWeb(keyword string, maxPages int) (string, error) { if len(results) == 0 { return "未找到关于 \"" + keyword + "\" 的相关搜索结果", nil } - + // 格式化结果 var builder strings.Builder builder.WriteString(fmt.Sprintf("为您找到关于 \"%s\" 的 %d 条搜索结果:\n\n", keyword, len(results))) - + for i, result := range results { // // 尝试打开链接获取实际内容 // page := service.browser.MustPage() // defer page.MustClose() - + // // 设置页面超时 // pageCtx, pageCancel := context.WithTimeout(context.Background(), 10*time.Second) // defer pageCancel() - + // // 导航到目标页面 // err := page.Context(pageCtx).Navigate(result.URL) // if err == nil { // // 等待页面加载 // _ = page.WaitLoad() - + // // 获取页面标题 // title, err := page.Eval("() => document.title") // if err == nil && title.Value.String() != "" { // result.Title = title.Value.String() // } - + // // 获取页面主要内容 // if content, err := page.Element("body"); err == nil { // if text, err := content.Text(); err == nil { @@ -315,11 +315,11 @@ func SearchWeb(keyword string, maxPages int) (string, error) { // if len(text) > 200 { // text = text[:200] + "..." // } - // result.Content = text + // result.Prompt = text // } // } // } - + builder.WriteString(fmt.Sprintf("%d. **%s**\n", i+1, result.Title)) builder.WriteString(fmt.Sprintf(" 链接: %s\n", result.URL)) if result.Content != "" { @@ -327,7 +327,7 @@ func SearchWeb(keyword string, maxPages int) (string, error) { } builder.WriteString("\n") } - + return builder.String(), nil } -} \ No newline at end of file +} diff --git a/api/service/types.go b/api/service/types.go index 9c774cbf..f39bf73c 100644 --- a/api/service/types.go +++ b/api/service/types.go @@ -113,7 +113,7 @@ Please remember, the final output must be the same language with user’s input. - What kinds of examples may need to be included, how many, and whether they are complex enough to benefit from placeholders. - Clarity and Conciseness: Use clear, specific language. Avoid unnecessary instructions or bland statements. - Formatting: Use markdown features for readability. DO NOT USE CODE BLOCKS UNLESS SPECIFICALLY REQUESTED. -- Preserve User Content: If the input task or prompt includes extensive guidelines or examples, preserve them entirely, or as closely as possible. If they are vague, consider breaking down into sub-steps. Keep any details, guidelines, examples, variables, or placeholders provided by the user. +- Preserve User Prompt: If the input task or prompt includes extensive guidelines or examples, preserve them entirely, or as closely as possible. If they are vague, consider breaking down into sub-steps. Keep any details, guidelines, examples, variables, or placeholders provided by the user. - Constants: DO include constants in the prompt, as they are not susceptible to prompt injection. Such as guides, rubrics, and examples. - Output Format: Explicitly the most appropriate output format, in detail. This should include length and syntax (e.g. short sentence, paragraph, JSON, etc.) - For tasks outputting well-defined or structured data (classification, JSON, etc.) bias toward outputting a JSON. diff --git a/api/store/model/chat_history.go b/api/store/model/chat_message.go similarity index 100% rename from api/store/model/chat_history.go rename to api/store/model/chat_message.go diff --git a/api/store/vo/chat_history.go b/api/store/vo/chat_history.go deleted file mode 100644 index 3f534f39..00000000 --- a/api/store/vo/chat_history.go +++ /dev/null @@ -1,14 +0,0 @@ -package vo - -type HistoryMessage struct { - BaseVo - ChatId string `json:"chat_id"` - UserId uint `json:"user_id"` - RoleId uint `json:"role_id"` - Model string `json:"model"` - Type string `json:"type"` - Icon string `json:"icon"` - Tokens int `json:"tokens"` - Content string `json:"content"` - UseContext bool `json:"use_context"` -} diff --git a/api/store/vo/chat_message.go b/api/store/vo/chat_message.go new file mode 100644 index 00000000..30509bf4 --- /dev/null +++ b/api/store/vo/chat_message.go @@ -0,0 +1,19 @@ +package vo + +type MsgContent struct { + Text string `json:"text"` + Files []File `json:"files"` +} + +type ChatMessage struct { + BaseVo + ChatId string `json:"chat_id"` + UserId uint `json:"user_id"` + RoleId uint `json:"role_id"` + Model string `json:"model"` + Type string `json:"type"` + Icon string `json:"icon"` + Tokens int `json:"tokens"` + Content MsgContent `json:"content"` + UseContext bool `json:"use_context"` +} diff --git a/web/src/components/ChatPrompt.vue b/web/src/components/ChatPrompt.vue index 0841f467..65192b61 100644 --- a/web/src/components/ChatPrompt.vue +++ b/web/src/components/ChatPrompt.vue @@ -6,7 +6,7 @@
-
+
@@ -49,7 +49,7 @@
-
+
@@ -90,9 +90,8 @@