mirror of
				https://github.com/yangjian102621/geekai.git
				synced 2025-11-04 16:23:42 +08:00 
			
		
		
		
	feat: adjust package struct, put chat code the seperate 'chatimpl' package, fix bug: baidu api chat context number must be even number
This commit is contained in:
		@@ -80,6 +80,7 @@ type ChatConfig struct {
 | 
			
		||||
	Azure   ModelAPIConfig `json:"azure"`
 | 
			
		||||
	ChatGML ModelAPIConfig `json:"chat_gml"`
 | 
			
		||||
	Baidu   ModelAPIConfig `json:"baidu"`
 | 
			
		||||
	XunFei  ModelAPIConfig `json:"xun_fei"`
 | 
			
		||||
 | 
			
		||||
	EnableContext bool `json:"enable_context"` // 是否开启聊天上下文
 | 
			
		||||
	EnableHistory bool `json:"enable_history"` // 是否允许保存聊天记录
 | 
			
		||||
@@ -92,6 +93,7 @@ const OpenAI = Platform("OpenAI")
 | 
			
		||||
const Azure = Platform("Azure")
 | 
			
		||||
const ChatGLM = Platform("ChatGLM")
 | 
			
		||||
const Baidu = Platform("Baidu")
 | 
			
		||||
const XunFei = Platform("XunFei")
 | 
			
		||||
 | 
			
		||||
// UserChatConfig 用户的聊天配置
 | 
			
		||||
type UserChatConfig struct {
 | 
			
		||||
 
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package handler
 | 
			
		||||
package chatimpl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
@@ -16,7 +16,8 @@ import (
 | 
			
		||||
	"unicode/utf8"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// 将消息发送给 Azure API 并获取结果,通过 WebSocket 推送到客户端
 | 
			
		||||
// 微软 Azure 模型消息发送实现
 | 
			
		||||
 | 
			
		||||
func (h *ChatHandler) sendAzureMessage(
 | 
			
		||||
	chatCtx []interface{},
 | 
			
		||||
	req types.ApiRequest,
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package handler
 | 
			
		||||
package chatimpl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
@@ -33,7 +33,8 @@ type baiduResp struct {
 | 
			
		||||
	} `json:"usage"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// 将消息发送给百度文心一言大模型 API 并获取结果,通过 WebSocket 推送到客户端
 | 
			
		||||
// 百度文心一言消息发送实现
 | 
			
		||||
 | 
			
		||||
func (h *ChatHandler) sendBaiduMessage(
 | 
			
		||||
	chatCtx []interface{},
 | 
			
		||||
	req types.ApiRequest,
 | 
			
		||||
@@ -1,9 +1,11 @@
 | 
			
		||||
package handler
 | 
			
		||||
package chatimpl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bytes"
 | 
			
		||||
	"chatplus/core"
 | 
			
		||||
	"chatplus/core/types"
 | 
			
		||||
	"chatplus/handler"
 | 
			
		||||
	logger2 "chatplus/logger"
 | 
			
		||||
	"chatplus/service/mj"
 | 
			
		||||
	"chatplus/store"
 | 
			
		||||
	"chatplus/store/model"
 | 
			
		||||
@@ -26,8 +28,10 @@ import (
 | 
			
		||||
 | 
			
		||||
const ErrorMsg = "抱歉,AI 助手开小差了,请稍后再试。"
 | 
			
		||||
 | 
			
		||||
var logger = logger2.GetLogger()
 | 
			
		||||
 | 
			
		||||
type ChatHandler struct {
 | 
			
		||||
	BaseHandler
 | 
			
		||||
	handler.BaseHandler
 | 
			
		||||
	db        *gorm.DB
 | 
			
		||||
	leveldb   *store.LevelDB
 | 
			
		||||
	redis     *redis.Client
 | 
			
		||||
@@ -35,9 +39,9 @@ type ChatHandler struct {
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func NewChatHandler(app *core.AppServer, db *gorm.DB, levelDB *store.LevelDB, redis *redis.Client, service *mj.Service) *ChatHandler {
 | 
			
		||||
	handler := ChatHandler{db: db, leveldb: levelDB, redis: redis, mjService: service}
 | 
			
		||||
	handler.App = app
 | 
			
		||||
	return &handler
 | 
			
		||||
	h := ChatHandler{db: db, leveldb: levelDB, redis: redis, mjService: service}
 | 
			
		||||
	h.App = app
 | 
			
		||||
	return &h
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
var chatConfig types.ChatConfig
 | 
			
		||||
@@ -249,9 +253,10 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
 | 
			
		||||
			// loading recent chat history as chat context
 | 
			
		||||
			if chatConfig.ContextDeep > 0 {
 | 
			
		||||
				var historyMessages []model.HistoryMessage
 | 
			
		||||
				res := h.db.Where("chat_id = ? and use_context = 1", session.ChatId).Limit(chatConfig.ContextDeep).Order("created_at desc").Find(&historyMessages)
 | 
			
		||||
				res := h.db.Debug().Where("chat_id = ? and use_context = 1", session.ChatId).Limit(chatConfig.ContextDeep).Order("id desc").Find(&historyMessages)
 | 
			
		||||
				if res.Error == nil {
 | 
			
		||||
					for _, msg := range historyMessages {
 | 
			
		||||
					for i := len(historyMessages) - 1; i >= 0; i-- {
 | 
			
		||||
						msg := historyMessages[i]
 | 
			
		||||
						if tokens+msg.Tokens >= types.ModelToTokens[session.Model.Value] {
 | 
			
		||||
							break
 | 
			
		||||
						}
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package handler
 | 
			
		||||
package chatimpl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"chatplus/core/types"
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package handler
 | 
			
		||||
package chatimpl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"chatplus/core/types"
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package handler
 | 
			
		||||
package chatimpl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
@@ -17,7 +17,8 @@ import (
 | 
			
		||||
	"unicode/utf8"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// 将消息发送给 ChatGLM API 并获取结果,通过 WebSocket 推送到客户端
 | 
			
		||||
// 清华大学 ChatGML 消息发送实现
 | 
			
		||||
 | 
			
		||||
func (h *ChatHandler) sendChatGLMMessage(
 | 
			
		||||
	chatCtx []interface{},
 | 
			
		||||
	req types.ApiRequest,
 | 
			
		||||
@@ -1,4 +1,4 @@
 | 
			
		||||
package handler
 | 
			
		||||
package chatimpl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
@@ -16,7 +16,7 @@ import (
 | 
			
		||||
	"unicode/utf8"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// 将消息发送给 OpenAI API 并获取结果,通过 WebSocket 推送到客户端
 | 
			
		||||
// OPenAI 消息发送实现
 | 
			
		||||
func (h *ChatHandler) sendOpenAiMessage(
 | 
			
		||||
	chatCtx []interface{},
 | 
			
		||||
	req types.ApiRequest,
 | 
			
		||||
							
								
								
									
										257
									
								
								api/handler/chatimpl/xunfei_handler.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										257
									
								
								api/handler/chatimpl/xunfei_handler.go
									
									
									
									
									
										Normal file
									
								
							@@ -0,0 +1,257 @@
 | 
			
		||||
package chatimpl
 | 
			
		||||
 | 
			
		||||
import (
 | 
			
		||||
	"bufio"
 | 
			
		||||
	"chatplus/core/types"
 | 
			
		||||
	"chatplus/store/model"
 | 
			
		||||
	"chatplus/store/vo"
 | 
			
		||||
	"chatplus/utils"
 | 
			
		||||
	"context"
 | 
			
		||||
	"encoding/json"
 | 
			
		||||
	"fmt"
 | 
			
		||||
	"gorm.io/gorm"
 | 
			
		||||
	"io"
 | 
			
		||||
	"net/http"
 | 
			
		||||
	"strings"
 | 
			
		||||
	"time"
 | 
			
		||||
	"unicode/utf8"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
// 科大讯飞消息发送实现
 | 
			
		||||
 | 
			
		||||
func (h *ChatHandler) sendXunFeiMessage(
 | 
			
		||||
	chatCtx []interface{},
 | 
			
		||||
	req types.ApiRequest,
 | 
			
		||||
	userVo vo.User,
 | 
			
		||||
	ctx context.Context,
 | 
			
		||||
	session *types.ChatSession,
 | 
			
		||||
	role model.ChatRole,
 | 
			
		||||
	prompt string,
 | 
			
		||||
	ws *types.WsClient) error {
 | 
			
		||||
	promptCreatedAt := time.Now() // 记录提问时间
 | 
			
		||||
	start := time.Now()
 | 
			
		||||
	var apiKey = userVo.ChatConfig.ApiKeys[session.Model.Platform]
 | 
			
		||||
	response, err := h.doRequest(ctx, req, session.Model.Platform, &apiKey)
 | 
			
		||||
	logger.Info("HTTP请求完成,耗时:", time.Now().Sub(start))
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		if strings.Contains(err.Error(), "context canceled") {
 | 
			
		||||
			logger.Info("用户取消了请求:", prompt)
 | 
			
		||||
			return nil
 | 
			
		||||
		} else if strings.Contains(err.Error(), "no available key") {
 | 
			
		||||
			utils.ReplyMessage(ws, "抱歉😔😔😔,系统已经没有可用的 API KEY,请联系管理员!")
 | 
			
		||||
			return nil
 | 
			
		||||
		} else {
 | 
			
		||||
			logger.Error(err)
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		utils.ReplyMessage(ws, ErrorMsg)
 | 
			
		||||
		utils.ReplyMessage(ws, "")
 | 
			
		||||
		return err
 | 
			
		||||
	} else {
 | 
			
		||||
		defer response.Body.Close()
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	contentType := response.Header.Get("Content-Type")
 | 
			
		||||
	if strings.Contains(contentType, "text/event-stream") {
 | 
			
		||||
		replyCreatedAt := time.Now() // 记录回复时间
 | 
			
		||||
		// 循环读取 Chunk 消息
 | 
			
		||||
		var message = types.Message{}
 | 
			
		||||
		var contents = make([]string, 0)
 | 
			
		||||
		var content string
 | 
			
		||||
		scanner := bufio.NewScanner(response.Body)
 | 
			
		||||
		for scanner.Scan() {
 | 
			
		||||
			line := scanner.Text()
 | 
			
		||||
			if len(line) < 5 || strings.HasPrefix(line, "id:") {
 | 
			
		||||
				continue
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if strings.HasPrefix(line, "data:") {
 | 
			
		||||
				content = line[5:]
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			var resp baiduResp
 | 
			
		||||
			err := utils.JsonDecode(content, &resp)
 | 
			
		||||
			if err != nil {
 | 
			
		||||
				logger.Error("error with parse data line: ", err)
 | 
			
		||||
				utils.ReplyMessage(ws, fmt.Sprintf("**解析数据行失败:%s**", err))
 | 
			
		||||
				break
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if len(contents) == 0 {
 | 
			
		||||
				utils.ReplyChunkMessage(ws, types.WsMessage{Type: types.WsStart})
 | 
			
		||||
			}
 | 
			
		||||
			utils.ReplyChunkMessage(ws, types.WsMessage{
 | 
			
		||||
				Type:    types.WsMiddle,
 | 
			
		||||
				Content: utils.InterfaceToString(resp.Result),
 | 
			
		||||
			})
 | 
			
		||||
			contents = append(contents, resp.Result)
 | 
			
		||||
 | 
			
		||||
			if resp.IsTruncated {
 | 
			
		||||
				utils.ReplyMessage(ws, "AI 输出异常中断")
 | 
			
		||||
				break
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if resp.IsEnd {
 | 
			
		||||
				break
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
		} // end for
 | 
			
		||||
 | 
			
		||||
		if err := scanner.Err(); err != nil {
 | 
			
		||||
			if strings.Contains(err.Error(), "context canceled") {
 | 
			
		||||
				logger.Info("用户取消了请求:", prompt)
 | 
			
		||||
			} else {
 | 
			
		||||
				logger.Error("信息读取出错:", err)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		// 消息发送成功
 | 
			
		||||
		if len(contents) > 0 {
 | 
			
		||||
			// 更新用户的对话次数
 | 
			
		||||
			if userVo.ChatConfig.ApiKeys[session.Model.Platform] == "" {
 | 
			
		||||
				h.db.Model(&model.User{}).Where("id = ?", userVo.Id).UpdateColumn("calls", gorm.Expr("calls - ?", 1))
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			if message.Role == "" {
 | 
			
		||||
				message.Role = "assistant"
 | 
			
		||||
			}
 | 
			
		||||
			message.Content = strings.Join(contents, "")
 | 
			
		||||
			useMsg := types.Message{Role: "user", Content: prompt}
 | 
			
		||||
 | 
			
		||||
			// 更新上下文消息,如果是调用函数则不需要更新上下文
 | 
			
		||||
			if h.App.ChatConfig.EnableContext {
 | 
			
		||||
				chatCtx = append(chatCtx, useMsg)  // 提问消息
 | 
			
		||||
				chatCtx = append(chatCtx, message) // 回复消息
 | 
			
		||||
				h.App.ChatContexts.Put(session.ChatId, chatCtx)
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			// 追加聊天记录
 | 
			
		||||
			if h.App.ChatConfig.EnableHistory {
 | 
			
		||||
				// for prompt
 | 
			
		||||
				promptToken, err := utils.CalcTokens(prompt, req.Model)
 | 
			
		||||
				if err != nil {
 | 
			
		||||
					logger.Error(err)
 | 
			
		||||
				}
 | 
			
		||||
				historyUserMsg := model.HistoryMessage{
 | 
			
		||||
					UserId:     userVo.Id,
 | 
			
		||||
					ChatId:     session.ChatId,
 | 
			
		||||
					RoleId:     role.Id,
 | 
			
		||||
					Type:       types.PromptMsg,
 | 
			
		||||
					Icon:       userVo.Avatar,
 | 
			
		||||
					Content:    prompt,
 | 
			
		||||
					Tokens:     promptToken,
 | 
			
		||||
					UseContext: true,
 | 
			
		||||
				}
 | 
			
		||||
				historyUserMsg.CreatedAt = promptCreatedAt
 | 
			
		||||
				historyUserMsg.UpdatedAt = promptCreatedAt
 | 
			
		||||
				res := h.db.Save(&historyUserMsg)
 | 
			
		||||
				if res.Error != nil {
 | 
			
		||||
					logger.Error("failed to save prompt history message: ", res.Error)
 | 
			
		||||
				}
 | 
			
		||||
 | 
			
		||||
				// for reply
 | 
			
		||||
				// 计算本次对话消耗的总 token 数量
 | 
			
		||||
				replyToken, _ := utils.CalcTokens(message.Content, req.Model)
 | 
			
		||||
				totalTokens := replyToken + getTotalTokens(req)
 | 
			
		||||
				historyReplyMsg := model.HistoryMessage{
 | 
			
		||||
					UserId:     userVo.Id,
 | 
			
		||||
					ChatId:     session.ChatId,
 | 
			
		||||
					RoleId:     role.Id,
 | 
			
		||||
					Type:       types.ReplyMsg,
 | 
			
		||||
					Icon:       role.Icon,
 | 
			
		||||
					Content:    message.Content,
 | 
			
		||||
					Tokens:     totalTokens,
 | 
			
		||||
					UseContext: true,
 | 
			
		||||
				}
 | 
			
		||||
				historyReplyMsg.CreatedAt = replyCreatedAt
 | 
			
		||||
				historyReplyMsg.UpdatedAt = replyCreatedAt
 | 
			
		||||
				res = h.db.Create(&historyReplyMsg)
 | 
			
		||||
				if res.Error != nil {
 | 
			
		||||
					logger.Error("failed to save reply history message: ", res.Error)
 | 
			
		||||
				}
 | 
			
		||||
				// 更新用户信息
 | 
			
		||||
				h.db.Model(&model.User{}).Where("id = ?", userVo.Id).
 | 
			
		||||
					UpdateColumn("total_tokens", gorm.Expr("total_tokens + ?", totalTokens))
 | 
			
		||||
			}
 | 
			
		||||
 | 
			
		||||
			// 保存当前会话
 | 
			
		||||
			var chatItem model.ChatItem
 | 
			
		||||
			res := h.db.Where("chat_id = ?", session.ChatId).First(&chatItem)
 | 
			
		||||
			if res.Error != nil {
 | 
			
		||||
				chatItem.ChatId = session.ChatId
 | 
			
		||||
				chatItem.UserId = session.UserId
 | 
			
		||||
				chatItem.RoleId = role.Id
 | 
			
		||||
				chatItem.ModelId = session.Model.Id
 | 
			
		||||
				if utf8.RuneCountInString(prompt) > 30 {
 | 
			
		||||
					chatItem.Title = string([]rune(prompt)[:30]) + "..."
 | 
			
		||||
				} else {
 | 
			
		||||
					chatItem.Title = prompt
 | 
			
		||||
				}
 | 
			
		||||
				h.db.Create(&chatItem)
 | 
			
		||||
			}
 | 
			
		||||
		}
 | 
			
		||||
	} else {
 | 
			
		||||
		body, err := io.ReadAll(response.Body)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return fmt.Errorf("error with reading response: %v", err)
 | 
			
		||||
		}
 | 
			
		||||
 | 
			
		||||
		var res struct {
 | 
			
		||||
			Code int    `json:"error_code"`
 | 
			
		||||
			Msg  string `json:"error_msg"`
 | 
			
		||||
		}
 | 
			
		||||
		err = json.Unmarshal(body, &res)
 | 
			
		||||
		if err != nil {
 | 
			
		||||
			return fmt.Errorf("error with decode response: %v", err)
 | 
			
		||||
		}
 | 
			
		||||
		utils.ReplyMessage(ws, "请求百度文心大模型 API 失败:"+res.Msg)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	return nil
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (h *ChatHandler) getXunFeiToken(apiKey string) (string, error) {
 | 
			
		||||
	ctx := context.Background()
 | 
			
		||||
	tokenString, err := h.redis.Get(ctx, apiKey).Result()
 | 
			
		||||
	if err == nil {
 | 
			
		||||
		return tokenString, nil
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	expr := time.Hour * 24 * 20 // access_token 有效期
 | 
			
		||||
	key := strings.Split(apiKey, "|")
 | 
			
		||||
	if len(key) != 2 {
 | 
			
		||||
		return "", fmt.Errorf("invalid api key: %s", apiKey)
 | 
			
		||||
	}
 | 
			
		||||
	url := fmt.Sprintf("https://aip.baidubce.com/oauth/2.0/token?client_id=%s&client_secret=%s&grant_type=client_credentials", key[0], key[1])
 | 
			
		||||
	client := &http.Client{}
 | 
			
		||||
	req, err := http.NewRequest("POST", url, nil)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return "", err
 | 
			
		||||
	}
 | 
			
		||||
	req.Header.Add("Content-Type", "application/json")
 | 
			
		||||
	req.Header.Add("Accept", "application/json")
 | 
			
		||||
 | 
			
		||||
	res, err := client.Do(req)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return "", fmt.Errorf("error with send request: %w", err)
 | 
			
		||||
	}
 | 
			
		||||
	defer res.Body.Close()
 | 
			
		||||
 | 
			
		||||
	body, err := io.ReadAll(res.Body)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return "", fmt.Errorf("error with read response: %w", err)
 | 
			
		||||
	}
 | 
			
		||||
	var r map[string]interface{}
 | 
			
		||||
	err = json.Unmarshal(body, &r)
 | 
			
		||||
	if err != nil {
 | 
			
		||||
		return "", fmt.Errorf("error with parse response: %w", err)
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	if r["error"] != nil {
 | 
			
		||||
		return "", fmt.Errorf("error with api response: %s", r["error_description"])
 | 
			
		||||
	}
 | 
			
		||||
 | 
			
		||||
	tokenString = fmt.Sprintf("%s", r["access_token"])
 | 
			
		||||
	h.redis.Set(ctx, apiKey, tokenString, expr)
 | 
			
		||||
	return tokenString, nil
 | 
			
		||||
}
 | 
			
		||||
@@ -5,6 +5,7 @@ import (
 | 
			
		||||
	"chatplus/core/types"
 | 
			
		||||
	"chatplus/handler"
 | 
			
		||||
	"chatplus/handler/admin"
 | 
			
		||||
	"chatplus/handler/chatimpl"
 | 
			
		||||
	logger2 "chatplus/logger"
 | 
			
		||||
	"chatplus/service"
 | 
			
		||||
	"chatplus/service/fun"
 | 
			
		||||
@@ -115,7 +116,7 @@ func main() {
 | 
			
		||||
		// 创建控制器
 | 
			
		||||
		fx.Provide(handler.NewChatRoleHandler),
 | 
			
		||||
		fx.Provide(handler.NewUserHandler),
 | 
			
		||||
		fx.Provide(handler.NewChatHandler),
 | 
			
		||||
		fx.Provide(chatimpl.NewChatHandler),
 | 
			
		||||
		fx.Provide(handler.NewUploadHandler),
 | 
			
		||||
		fx.Provide(handler.NewSmsHandler),
 | 
			
		||||
		fx.Provide(handler.NewRewardHandler),
 | 
			
		||||
@@ -196,7 +197,7 @@ func main() {
 | 
			
		||||
			group.POST("password", h.Password)
 | 
			
		||||
			group.POST("bind/mobile", h.BindMobile)
 | 
			
		||||
		}),
 | 
			
		||||
		fx.Invoke(func(s *core.AppServer, h *handler.ChatHandler) {
 | 
			
		||||
		fx.Invoke(func(s *core.AppServer, h *chatimpl.ChatHandler) {
 | 
			
		||||
			group := s.Engine.Group("/api/chat/")
 | 
			
		||||
			group.Any("new", h.ChatHandle)
 | 
			
		||||
			group.GET("list", h.List)
 | 
			
		||||
 
 | 
			
		||||
@@ -2,115 +2,137 @@ html,
 | 
			
		||||
body,
 | 
			
		||||
#app,
 | 
			
		||||
.wrapper {
 | 
			
		||||
  width: 100%;
 | 
			
		||||
  height: 100%;
 | 
			
		||||
  overflow: hidden;
 | 
			
		||||
    width: 100%;
 | 
			
		||||
    height: 100%;
 | 
			
		||||
    overflow: hidden;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
body {
 | 
			
		||||
  font-family: Helvetica Neue, Helvetica, PingFang SC, Hiragino Sans GB, Microsoft YaHei, Arial, sans-serif;
 | 
			
		||||
  -webkit-font-smoothing: antialiased;
 | 
			
		||||
  text-rendering: optimizeLegibility;
 | 
			
		||||
    font-family: Helvetica Neue, Helvetica, PingFang SC, Hiragino Sans GB, Microsoft YaHei, Arial, sans-serif;
 | 
			
		||||
    -webkit-font-smoothing: antialiased;
 | 
			
		||||
    text-rendering: optimizeLegibility;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home a {
 | 
			
		||||
  text-decoration: none;
 | 
			
		||||
    text-decoration: none;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box {
 | 
			
		||||
  position: absolute;
 | 
			
		||||
  left: 250px;
 | 
			
		||||
  right: 0;
 | 
			
		||||
  top: 0;
 | 
			
		||||
  bottom: 0;
 | 
			
		||||
  padding-bottom: 30px;
 | 
			
		||||
  -webkit-transition: left 0.3s ease-in-out;
 | 
			
		||||
  transition: left 0.3s ease-in-out;
 | 
			
		||||
  background: #f0f0f0;
 | 
			
		||||
    position: absolute;
 | 
			
		||||
    left: 250px;
 | 
			
		||||
    right: 0;
 | 
			
		||||
    top: 0;
 | 
			
		||||
    bottom: 0;
 | 
			
		||||
    /*padding-bottom: 30px;*/
 | 
			
		||||
    -webkit-transition: left 0.3s ease-in-out;
 | 
			
		||||
    transition: left 0.3s ease-in-out;
 | 
			
		||||
    background: #f0f0f0;
 | 
			
		||||
    overflow-y: scroll;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content {
 | 
			
		||||
  width: auto;
 | 
			
		||||
  height: 100%;
 | 
			
		||||
  padding: 10px;
 | 
			
		||||
  overflow-y: scroll;
 | 
			
		||||
  box-sizing: border-box;
 | 
			
		||||
/*BaseForm*/
 | 
			
		||||
    width: auto;
 | 
			
		||||
    padding: 10px;
 | 
			
		||||
    box-sizing: border-box;
 | 
			
		||||
    /*BaseForm*/
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .container {
 | 
			
		||||
  padding: 30px;
 | 
			
		||||
  background: #fff;
 | 
			
		||||
  border: 1px solid #ddd;
 | 
			
		||||
  border-radius: 5px;
 | 
			
		||||
    padding: 30px;
 | 
			
		||||
    background: #fff;
 | 
			
		||||
    border: 1px solid #ddd;
 | 
			
		||||
    border-radius: 5px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .container .handle-box {
 | 
			
		||||
  margin-bottom: 20px;
 | 
			
		||||
    margin-bottom: 20px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .crumbs {
 | 
			
		||||
  margin: 10px 0;
 | 
			
		||||
    margin: 10px 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .el-table th {
 | 
			
		||||
  background-color: #f5f7fa !important;
 | 
			
		||||
    background-color: #f5f7fa !important;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .pagination {
 | 
			
		||||
  margin: 20px 0;
 | 
			
		||||
  display: flex;
 | 
			
		||||
  justify-content: center;
 | 
			
		||||
  width: 100%;
 | 
			
		||||
    margin: 20px 0;
 | 
			
		||||
    display: flex;
 | 
			
		||||
    justify-content: center;
 | 
			
		||||
    width: 100%;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .plugins-tips {
 | 
			
		||||
  padding: 20px 10px;
 | 
			
		||||
  margin-bottom: 20px;
 | 
			
		||||
    padding: 20px 10px;
 | 
			
		||||
    margin-bottom: 20px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .el-button + .el-tooltip {
 | 
			
		||||
  margin-left: 10px;
 | 
			
		||||
    margin-left: 10px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .el-table tr:hover {
 | 
			
		||||
  background: #f6faff;
 | 
			
		||||
    background: #f6faff;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .mgb20 {
 | 
			
		||||
  margin-bottom: 20px;
 | 
			
		||||
    margin-bottom: 20px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .move-enter-active,
 | 
			
		||||
.admin-home .content-box .content .move-leave-active {
 | 
			
		||||
  transition: opacity 0.1s ease;
 | 
			
		||||
    transition: opacity 0.1s ease;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .move-enter-from,
 | 
			
		||||
.admin-home .content-box .content .move-leave-to {
 | 
			
		||||
  opacity: 0;
 | 
			
		||||
    opacity: 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .form-box {
 | 
			
		||||
  width: 600px;
 | 
			
		||||
    width: 600px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .form-box .line {
 | 
			
		||||
  text-align: center;
 | 
			
		||||
    text-align: center;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .el-time-panel__content::after,
 | 
			
		||||
.admin-home .content-box .content .el-time-panel__content::before {
 | 
			
		||||
  margin-top: -7px;
 | 
			
		||||
    margin-top: -7px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .el-time-spinner__wrapper .el-scrollbar__wrap:not(.el-scrollbar__wrap--hidden-default) {
 | 
			
		||||
  padding-bottom: 0;
 | 
			
		||||
    padding-bottom: 0;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content [class*=" el-icon-"],
 | 
			
		||||
.admin-home .content-box .content [class^=el-icon-] {
 | 
			
		||||
  speak: none;
 | 
			
		||||
  font-style: normal;
 | 
			
		||||
  font-weight: 400;
 | 
			
		||||
  font-variant: normal;
 | 
			
		||||
  text-transform: none;
 | 
			
		||||
  line-height: 1;
 | 
			
		||||
  vertical-align: baseline;
 | 
			
		||||
  display: inline-block;
 | 
			
		||||
  -webkit-font-smoothing: antialiased;
 | 
			
		||||
  -moz-osx-font-smoothing: grayscale;
 | 
			
		||||
    speak: none;
 | 
			
		||||
    font-style: normal;
 | 
			
		||||
    font-weight: 400;
 | 
			
		||||
    font-variant: normal;
 | 
			
		||||
    text-transform: none;
 | 
			
		||||
    line-height: 1;
 | 
			
		||||
    vertical-align: baseline;
 | 
			
		||||
    display: inline-block;
 | 
			
		||||
    -webkit-font-smoothing: antialiased;
 | 
			
		||||
    -moz-osx-font-smoothing: grayscale;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content .el-sub-menu [class^=el-icon-] {
 | 
			
		||||
  vertical-align: middle;
 | 
			
		||||
  margin-right: 5px;
 | 
			
		||||
  width: 24px;
 | 
			
		||||
  text-align: center;
 | 
			
		||||
  font-size: 18px;
 | 
			
		||||
    vertical-align: middle;
 | 
			
		||||
    margin-right: 5px;
 | 
			
		||||
    width: 24px;
 | 
			
		||||
    text-align: center;
 | 
			
		||||
    font-size: 18px;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-box .content [hidden] {
 | 
			
		||||
  display: none !important;
 | 
			
		||||
    display: none !important;
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
.admin-home .content-collapse {
 | 
			
		||||
  left: 65px;
 | 
			
		||||
    left: 65px;
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
@@ -40,11 +40,14 @@
 | 
			
		||||
        v-model="showDialog"
 | 
			
		||||
        :title="title"
 | 
			
		||||
    >
 | 
			
		||||
      <el-alert title="注意:如果是百度文心一言平台,需要用竖线(|)将 API Key 和 Secret Key 串接起来填入!"
 | 
			
		||||
                type="warning"
 | 
			
		||||
                :closable="false"
 | 
			
		||||
                show-icon
 | 
			
		||||
                style="margin-bottom: 10px; font-size:14px;"/>
 | 
			
		||||
      <el-alert
 | 
			
		||||
          type="warning"
 | 
			
		||||
          :closable="false"
 | 
			
		||||
          show-icon
 | 
			
		||||
          style="margin-bottom: 10px; font-size:14px;">
 | 
			
		||||
        <p><b>注意:</b>如果是百度文心一言平台,需要用竖线(|)将 API Key 和 Secret Key 串接起来填入!</p>
 | 
			
		||||
        <p><b>注意:</b>如果是讯飞星火大模型,需要用竖线(|)将 APPID, APIKey 和 APISecret 按照顺序串接起来填入!</p>
 | 
			
		||||
      </el-alert>
 | 
			
		||||
      <el-form :model="item" label-width="120px" ref="formRef" :rules="rules">
 | 
			
		||||
        <el-form-item label="所属平台:" prop="platform">
 | 
			
		||||
          <el-select v-model="item.platform" placeholder="请选择平台">
 | 
			
		||||
@@ -87,10 +90,11 @@ const loading = ref(true)
 | 
			
		||||
const formRef = ref(null)
 | 
			
		||||
const title = ref("")
 | 
			
		||||
const platforms = ref([
 | 
			
		||||
  {name: "【OpenAI】ChatGPT", value: "OpenAI"},
 | 
			
		||||
  {name: "【讯飞】星火大模型", value: "XunFei"},
 | 
			
		||||
  {name: "【清华智普】ChatGLM", value: "ChatGLM"},
 | 
			
		||||
  {name: "【百度】文心一言", value: "Baidu"},
 | 
			
		||||
  {name: "【微软】Azure", value: "Azure"},
 | 
			
		||||
  {name: "【OpenAI】ChatGPT", value: "OpenAI"},
 | 
			
		||||
])
 | 
			
		||||
 | 
			
		||||
// 获取数据
 | 
			
		||||
 
 | 
			
		||||
@@ -95,10 +95,11 @@ const rules = reactive({
 | 
			
		||||
const loading = ref(true)
 | 
			
		||||
const formRef = ref(null)
 | 
			
		||||
const platforms = ref([
 | 
			
		||||
  {name: "【OpenAI】ChatGPT", value: "OpenAI"},
 | 
			
		||||
  {name: "【讯飞】星火大模型", value: "XunFei"},
 | 
			
		||||
  {name: "【清华智普】ChatGLM", value: "ChatGLM"},
 | 
			
		||||
  {name: "【百度】文心一言", value: "Baidu"},
 | 
			
		||||
  {name: "【微软】Azure", value: "Azure"},
 | 
			
		||||
  {name: "【OpenAI】ChatGPT", value: "OpenAI"},
 | 
			
		||||
])
 | 
			
		||||
 | 
			
		||||
// 获取数据
 | 
			
		||||
 
 | 
			
		||||
@@ -91,7 +91,7 @@
 | 
			
		||||
          <el-input-number v-model="chat['context_deep']" :min="0" :max="10"/>
 | 
			
		||||
          <div class="tip" style="margin-top: 10px;">会话上下文深度:在老会话中继续会话,默认加载多少条聊天记录作为上下文。如果设置为
 | 
			
		||||
            0
 | 
			
		||||
            则不加载聊天记录,仅仅使用当前角色的上下文。该配置参数最好设置为 2 的整数倍。
 | 
			
		||||
            则不加载聊天记录,仅仅使用当前角色的上下文。该配置参数最好设置需要为偶数,否则将无法兼容百度的 API。
 | 
			
		||||
          </div>
 | 
			
		||||
        </el-form-item>
 | 
			
		||||
 | 
			
		||||
@@ -143,6 +143,18 @@
 | 
			
		||||
          <el-input v-model.number="chat['baidu']['max_tokens']" placeholder="回复的最大字数,最大4096"/>
 | 
			
		||||
        </el-form-item>
 | 
			
		||||
 | 
			
		||||
        <el-divider content-position="center">讯飞星火</el-divider>
 | 
			
		||||
        <el-form-item label="API 地址" prop="xun_fei.api_url">
 | 
			
		||||
          <el-input v-model="chat['xun_fei']['api_url']" placeholder="支持变量,{model} => 模型名称"/>
 | 
			
		||||
        </el-form-item>
 | 
			
		||||
        <el-form-item label="模型创意度">
 | 
			
		||||
          <el-slider v-model="chat['xun_fei']['temperature']" :max="1" :step="0.1"/>
 | 
			
		||||
          <div class="tip">值越大 AI 回答越发散,值越小回答越保守,建议保持默认值</div>
 | 
			
		||||
        </el-form-item>
 | 
			
		||||
        <el-form-item label="最大响应长度">
 | 
			
		||||
          <el-input v-model.number="chat['xun_fei']['max_tokens']" placeholder="回复的最大字数,最大4096"/>
 | 
			
		||||
        </el-form-item>
 | 
			
		||||
 | 
			
		||||
        <el-form-item style="text-align: right">
 | 
			
		||||
          <el-button type="primary" @click="save('chat')">保存</el-button>
 | 
			
		||||
        </el-form-item>
 | 
			
		||||
@@ -164,6 +176,7 @@ const chat = ref({
 | 
			
		||||
  azure: {api_url: "", temperature: 1, max_tokens: 1024},
 | 
			
		||||
  chat_gml: {api_url: "", temperature: 0.95, max_tokens: 1024},
 | 
			
		||||
  baidu: {api_url: "", temperature: 0.95, max_tokens: 1024},
 | 
			
		||||
  xun_fei: {api_url: "", temperature: 0.5, max_tokens: 1024},
 | 
			
		||||
  context_deep: 0,
 | 
			
		||||
  enable_context: true,
 | 
			
		||||
  enable_history: true,
 | 
			
		||||
@@ -195,6 +208,9 @@ onMounted(() => {
 | 
			
		||||
    if (res.data.baidu) {
 | 
			
		||||
      chat.value.baidu = res.data.baidu
 | 
			
		||||
    }
 | 
			
		||||
    if (res.data.xun_fei) {
 | 
			
		||||
      chat.value.xun_fei = res.data.xun_fei
 | 
			
		||||
    }
 | 
			
		||||
    chat.value.context_deep = res.data.context_deep
 | 
			
		||||
    chat.value.enable_context = res.data.enable_context
 | 
			
		||||
    chat.value.enable_history = res.data.enable_history
 | 
			
		||||
@@ -210,9 +226,6 @@ const rules = reactive({
 | 
			
		||||
  admin_title: [{required: true, message: '请输入控制台标题', trigger: 'blur',}],
 | 
			
		||||
  user_init_calls: [{required: true, message: '请输入赠送对话次数', trigger: 'blur'}],
 | 
			
		||||
  user_img_calls: [{required: true, message: '请输入赠送绘图次数', trigger: 'blur'}],
 | 
			
		||||
  open_ai: {api_url: [{required: true, message: '请输入 API URL', trigger: 'blur'}]},
 | 
			
		||||
  azure: {api_url: [{required: true, message: '请输入 API URL', trigger: 'blur'}]},
 | 
			
		||||
  chat_gml: {api_url: [{required: true, message: '请输入 API URL', trigger: 'blur'}]},
 | 
			
		||||
})
 | 
			
		||||
const save = function (key) {
 | 
			
		||||
  if (key === 'system') {
 | 
			
		||||
@@ -226,6 +239,9 @@ const save = function (key) {
 | 
			
		||||
      }
 | 
			
		||||
    })
 | 
			
		||||
  } else if (key === 'chat') {
 | 
			
		||||
    if (chat.value.context_deep % 2 !== 0) {
 | 
			
		||||
      return ElMessage.error("会话上下文深度必须为偶数!")
 | 
			
		||||
    }
 | 
			
		||||
    chatFormRef.value.validate((valid) => {
 | 
			
		||||
      if (valid) {
 | 
			
		||||
        httpPost('/api/admin/config/update', {key: key, config: chat.value}).then(() => {
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user