extract code for saving chat history

This commit is contained in:
RockYang 2024-05-22 15:32:44 +08:00
parent 627396dbf7
commit 962de0183c
18 changed files with 261 additions and 717 deletions

View File

@ -61,15 +61,15 @@ type ChatSession struct {
} }
type ChatModel struct { type ChatModel struct {
Id uint `json:"id"` Id uint `json:"id"`
Platform Platform `json:"platform"` Platform string `json:"platform"`
Name string `json:"name"` Name string `json:"name"`
Value string `json:"value"` Value string `json:"value"`
Power int `json:"power"` Power int `json:"power"`
MaxTokens int `json:"max_tokens"` // 最大响应长度 MaxTokens int `json:"max_tokens"` // 最大响应长度
MaxContext int `json:"max_context"` // 最大上下文长度 MaxContext int `json:"max_context"` // 最大上下文长度
Temperature float32 `json:"temperature"` // 模型温度 Temperature float32 `json:"temperature"` // 模型温度
KeyId int `json:"key_id"` // 绑定 API KEY KeyId int `json:"key_id"` // 绑定 API KEY
} }
type ApiError struct { type ApiError struct {

View File

@ -137,14 +137,44 @@ func (c RedisConfig) Url() string {
return fmt.Sprintf("%s:%d", c.Host, c.Port) return fmt.Sprintf("%s:%d", c.Host, c.Port)
} }
type Platform string type Platform struct {
Name string `json:"name"`
Value string `json:"value"`
ChatURL string `json:"chat_url"`
ImgURL string `json:"img_url"`
}
const OpenAI = Platform("OpenAI") var OpenAI = Platform{
const Azure = Platform("Azure") Name: "OpenAI - GPT",
const ChatGLM = Platform("ChatGLM") Value: "OpenAI",
const Baidu = Platform("Baidu") ChatURL: "https://api.chat-plus.net/v1/chat/completions",
const XunFei = Platform("XunFei") ImgURL: "https://api.chat-plus.net/v1/images/generations",
const QWen = Platform("QWen") }
var Azure = Platform{
Name: "微软 - Azure",
Value: "Azure",
ChatURL: "https://chat-bot-api.openai.azure.com/openai/deployments/{model}/chat/completions?api-version=2023-05-15",
}
var ChatGLM = Platform{
Name: "智谱 - ChatGLM",
Value: "ChatGLM",
ChatURL: "https://open.bigmodel.cn/api/paas/v3/model-api/{model}/sse-invoke",
}
var Baidu = Platform{
Name: "百度 - 文心大模型",
Value: "Baidu",
ChatURL: "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/{model}",
}
var XunFei = Platform{
Name: "讯飞 - 星火大模型",
Value: "XunFei",
ChatURL: "wss://spark-api.xf-yun.com/{version}/chat",
}
var QWen = Platform{
Name: "阿里 - 通义千问",
Value: "QWen",
ChatURL: "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation",
}
type SystemConfig struct { type SystemConfig struct {
Title string `json:"title,omitempty"` Title string `json:"title,omitempty"`

View File

@ -28,8 +28,8 @@ type ConfigHandler struct {
handler.BaseHandler handler.BaseHandler
levelDB *store.LevelDB levelDB *store.LevelDB
licenseService *service.LicenseService licenseService *service.LicenseService
mjServicePool *mj.ServicePool mjServicePool *mj.ServicePool
sdServicePool *sd.ServicePool sdServicePool *sd.ServicePool
} }
func NewConfigHandler(app *core.AppServer, db *gorm.DB, levelDB *store.LevelDB, licenseService *service.LicenseService, mjPool *mj.ServicePool, sdPool *sd.ServicePool) *ConfigHandler { func NewConfigHandler(app *core.AppServer, db *gorm.DB, levelDB *store.LevelDB, licenseService *service.LicenseService, mjPool *mj.ServicePool, sdPool *sd.ServicePool) *ConfigHandler {
@ -140,12 +140,13 @@ func (h *ConfigHandler) GetLicense(c *gin.Context) {
resp.SUCCESS(c, license) resp.SUCCESS(c, license)
} }
// GetDrawingConfig 获取AI绘画配置 // GetAppConfig 获取内置配置
func (h *ConfigHandler) GetDrawingConfig(c *gin.Context) { func (h *ConfigHandler) GetAppConfig(c *gin.Context) {
resp.SUCCESS(c, gin.H{ resp.SUCCESS(c, gin.H{
"mj_plus": h.App.Config.MjPlusConfigs, "mj_plus": h.App.Config.MjPlusConfigs,
"mj_proxy": h.App.Config.MjProxyConfigs, "mj_proxy": h.App.Config.MjProxyConfigs,
"sd": h.App.Config.SdConfigs, "sd": h.App.Config.SdConfigs,
"platforms": Platforms,
}) })
} }

View File

@ -0,0 +1,12 @@
package admin
import "geekai/core/types"
var Platforms = []types.Platform{
types.OpenAI,
types.QWen,
types.XunFei,
types.ChatGLM,
types.Baidu,
types.Azure,
}

View File

@ -17,11 +17,9 @@ import (
"geekai/store/model" "geekai/store/model"
"geekai/store/vo" "geekai/store/vo"
"geekai/utils" "geekai/utils"
"html/template"
"io" "io"
"strings" "strings"
"time" "time"
"unicode/utf8"
) )
// 微软 Azure 模型消息发送实现 // 微软 Azure 模型消息发送实现
@ -101,104 +99,12 @@ func (h *ChatHandler) sendAzureMessage(
// 消息发送成功 // 消息发送成功
if len(contents) > 0 { if len(contents) > 0 {
h.saveChatHistory(req, prompt, contents, message, chatCtx, session, role, userVo, promptCreatedAt, replyCreatedAt)
if message.Role == "" {
message.Role = "assistant"
}
message.Content = strings.Join(contents, "")
useMsg := types.Message{Role: "user", Content: prompt}
// 更新上下文消息,如果是调用函数则不需要更新上下文
if h.App.SysConfig.EnableContext {
chatCtx = append(chatCtx, useMsg) // 提问消息
chatCtx = append(chatCtx, message) // 回复消息
h.App.ChatContexts.Put(session.ChatId, chatCtx)
}
// 追加聊天记录
// for prompt
promptToken, err := utils.CalcTokens(prompt, req.Model)
if err != nil {
logger.Error(err)
}
historyUserMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.PromptMsg,
Icon: userVo.Avatar,
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
res := h.DB.Save(&historyUserMsg)
if res.Error != nil {
logger.Error("failed to save prompt history message: ", res.Error)
}
// 计算本次对话消耗的总 token 数量
replyTokens, _ := utils.CalcTokens(message.Content, req.Model)
replyTokens += getTotalTokens(req)
historyReplyMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.ReplyMsg,
Icon: role.Icon,
Content: message.Content,
Tokens: replyTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
res = h.DB.Create(&historyReplyMsg)
if res.Error != nil {
logger.Error("failed to save reply history message: ", res.Error)
}
// 更新用户算力
h.subUserPower(userVo, session, promptToken, replyTokens)
// 保存当前会话
var chatItem model.ChatItem
res = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem)
if res.Error != nil {
chatItem.ChatId = session.ChatId
chatItem.UserId = session.UserId
chatItem.RoleId = role.Id
chatItem.ModelId = session.Model.Id
if utf8.RuneCountInString(prompt) > 30 {
chatItem.Title = string([]rune(prompt)[:30]) + "..."
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.DB.Create(&chatItem)
}
} }
} else { } else {
body, err := io.ReadAll(response.Body) body, _ := io.ReadAll(response.Body)
if err != nil { return fmt.Errorf("请求大模型 API 失败:%s", body)
return fmt.Errorf("error with reading response: %v", err)
}
var res types.ApiError
err = json.Unmarshal(body, &res)
if err != nil {
return fmt.Errorf("error with decode response: %v", err)
}
if strings.Contains(res.Error.Message, "maximum context length") {
logger.Error(res.Error.Message)
h.App.ChatContexts.Delete(session.ChatId)
return h.sendMessage(ctx, session, role, prompt, ws)
} else {
return fmt.Errorf("请求 Azure API 失败:%v", res.Error)
}
} }
return nil return nil

View File

@ -17,12 +17,10 @@ import (
"geekai/store/model" "geekai/store/model"
"geekai/store/vo" "geekai/store/vo"
"geekai/utils" "geekai/utils"
"html/template"
"io" "io"
"net/http" "net/http"
"strings" "strings"
"time" "time"
"unicode/utf8"
) )
type baiduResp struct { type baiduResp struct {
@ -130,99 +128,11 @@ func (h *ChatHandler) sendBaiduMessage(
// 消息发送成功 // 消息发送成功
if len(contents) > 0 { if len(contents) > 0 {
if message.Role == "" { h.saveChatHistory(req, prompt, contents, message, chatCtx, session, role, userVo, promptCreatedAt, replyCreatedAt)
message.Role = "assistant"
}
message.Content = strings.Join(contents, "")
useMsg := types.Message{Role: "user", Content: prompt}
// 更新上下文消息,如果是调用函数则不需要更新上下文
if h.App.SysConfig.EnableContext {
chatCtx = append(chatCtx, useMsg) // 提问消息
chatCtx = append(chatCtx, message) // 回复消息
h.App.ChatContexts.Put(session.ChatId, chatCtx)
}
// 追加聊天记录
// for prompt
promptToken, err := utils.CalcTokens(prompt, req.Model)
if err != nil {
logger.Error(err)
}
historyUserMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.PromptMsg,
Icon: userVo.Avatar,
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
res := h.DB.Save(&historyUserMsg)
if res.Error != nil {
logger.Error("failed to save prompt history message: ", res.Error)
}
// for reply
// 计算本次对话消耗的总 token 数量
replyTokens, _ := utils.CalcTokens(message.Content, req.Model)
totalTokens := replyTokens + getTotalTokens(req)
historyReplyMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.ReplyMsg,
Icon: role.Icon,
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
res = h.DB.Create(&historyReplyMsg)
if res.Error != nil {
logger.Error("failed to save reply history message: ", res.Error)
}
// 更新用户算力
h.subUserPower(userVo, session, promptToken, replyTokens)
// 保存当前会话
var chatItem model.ChatItem
res = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem)
if res.Error != nil {
chatItem.ChatId = session.ChatId
chatItem.UserId = session.UserId
chatItem.RoleId = role.Id
chatItem.ModelId = session.Model.Id
if utf8.RuneCountInString(prompt) > 30 {
chatItem.Title = string([]rune(prompt)[:30]) + "..."
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.DB.Create(&chatItem)
}
} }
} else { } else {
body, err := io.ReadAll(response.Body) body, _ := io.ReadAll(response.Body)
if err != nil { return fmt.Errorf("请求大模型 API 失败:%s", body)
return fmt.Errorf("error with reading response: %v", err)
}
var res struct {
Code int `json:"error_code"`
Msg string `json:"error_msg"`
}
err = json.Unmarshal(body, &res)
if err != nil {
return fmt.Errorf("error with decode response: %v", err)
}
utils.ReplyMessage(ws, "请求百度文心大模型 API 失败:"+res.Msg)
} }
return nil return nil

View File

@ -23,11 +23,13 @@ import (
"geekai/store/vo" "geekai/store/vo"
"geekai/utils" "geekai/utils"
"geekai/utils/resp" "geekai/utils/resp"
"html/template"
"net/http" "net/http"
"net/url" "net/url"
"regexp" "regexp"
"strings" "strings"
"time" "time"
"unicode/utf8"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/go-redis/redis/v8" "github.com/go-redis/redis/v8"
@ -122,7 +124,7 @@ func (h *ChatHandler) ChatHandle(c *gin.Context) {
MaxContext: chatModel.MaxContext, MaxContext: chatModel.MaxContext,
Temperature: chatModel.Temperature, Temperature: chatModel.Temperature,
KeyId: chatModel.KeyId, KeyId: chatModel.KeyId,
Platform: types.Platform(chatModel.Platform)} Platform: chatModel.Platform}
logger.Infof("New websocket connected, IP: %s, Username: %s", c.ClientIP(), session.Username) logger.Infof("New websocket connected, IP: %s, Username: %s", c.ClientIP(), session.Username)
// 保存会话连接 // 保存会话连接
@ -218,11 +220,11 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
Stream: true, Stream: true,
} }
switch session.Model.Platform { switch session.Model.Platform {
case types.Azure, types.ChatGLM, types.Baidu, types.XunFei: case types.Azure.Value, types.ChatGLM.Value, types.Baidu.Value, types.XunFei.Value:
req.Temperature = session.Model.Temperature req.Temperature = session.Model.Temperature
req.MaxTokens = session.Model.MaxTokens req.MaxTokens = session.Model.MaxTokens
break break
case types.OpenAI: case types.OpenAI.Value:
req.Temperature = session.Model.Temperature req.Temperature = session.Model.Temperature
req.MaxTokens = session.Model.MaxTokens req.MaxTokens = session.Model.MaxTokens
// OpenAI 支持函数功能 // OpenAI 支持函数功能
@ -261,7 +263,7 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
req.Tools = tools req.Tools = tools
req.ToolChoice = "auto" req.ToolChoice = "auto"
} }
case types.QWen: case types.QWen.Value:
req.Parameters = map[string]interface{}{ req.Parameters = map[string]interface{}{
"max_tokens": session.Model.MaxTokens, "max_tokens": session.Model.MaxTokens,
"temperature": session.Model.Temperature, "temperature": session.Model.Temperature,
@ -325,14 +327,14 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
reqMgs = append(reqMgs, m) reqMgs = append(reqMgs, m)
} }
if session.Model.Platform == types.QWen { if session.Model.Platform == types.QWen.Value {
req.Input = make(map[string]interface{}) req.Input = make(map[string]interface{})
reqMgs = append(reqMgs, types.Message{ reqMgs = append(reqMgs, types.Message{
Role: "user", Role: "user",
Content: prompt, Content: prompt,
}) })
req.Input["messages"] = reqMgs req.Input["messages"] = reqMgs
} else if session.Model.Platform == types.OpenAI { // extract image for gpt-vision model } else if session.Model.Platform == types.OpenAI.Value { // extract image for gpt-vision model
imgURLs := utils.ExtractImgURL(prompt) imgURLs := utils.ExtractImgURL(prompt)
logger.Debugf("detected IMG: %+v", imgURLs) logger.Debugf("detected IMG: %+v", imgURLs)
var content interface{} var content interface{}
@ -370,17 +372,17 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
logger.Debugf("%+v", req.Messages) logger.Debugf("%+v", req.Messages)
switch session.Model.Platform { switch session.Model.Platform {
case types.Azure: case types.Azure.Value:
return h.sendAzureMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws) return h.sendAzureMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws)
case types.OpenAI: case types.OpenAI.Value:
return h.sendOpenAiMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws) return h.sendOpenAiMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws)
case types.ChatGLM: case types.ChatGLM.Value:
return h.sendChatGLMMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws) return h.sendChatGLMMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws)
case types.Baidu: case types.Baidu.Value:
return h.sendBaiduMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws) return h.sendBaiduMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws)
case types.XunFei: case types.XunFei.Value:
return h.sendXunFeiMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws) return h.sendXunFeiMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws)
case types.QWen: case types.QWen.Value:
return h.sendQWenMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws) return h.sendQWenMessage(chatCtx, req, userVo, ctx, session, role, prompt, ws)
} }
@ -467,7 +469,7 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
} }
// ONLY allow apiURL in blank list // ONLY allow apiURL in blank list
if session.Model.Platform == types.OpenAI { if session.Model.Platform == types.OpenAI.Value {
err := h.licenseService.IsValidApiURL(apiKey.ApiURL) err := h.licenseService.IsValidApiURL(apiKey.ApiURL)
if err != nil { if err != nil {
return nil, err return nil, err
@ -476,19 +478,19 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
var apiURL string var apiURL string
switch session.Model.Platform { switch session.Model.Platform {
case types.Azure: case types.Azure.Value:
md := strings.Replace(req.Model, ".", "", 1) md := strings.Replace(req.Model, ".", "", 1)
apiURL = strings.Replace(apiKey.ApiURL, "{model}", md, 1) apiURL = strings.Replace(apiKey.ApiURL, "{model}", md, 1)
break break
case types.ChatGLM: case types.ChatGLM.Value:
apiURL = strings.Replace(apiKey.ApiURL, "{model}", req.Model, 1) apiURL = strings.Replace(apiKey.ApiURL, "{model}", req.Model, 1)
req.Prompt = req.Messages // 使用 prompt 字段替代 message 字段 req.Prompt = req.Messages // 使用 prompt 字段替代 message 字段
req.Messages = nil req.Messages = nil
break break
case types.Baidu: case types.Baidu.Value:
apiURL = strings.Replace(apiKey.ApiURL, "{model}", req.Model, 1) apiURL = strings.Replace(apiKey.ApiURL, "{model}", req.Model, 1)
break break
case types.QWen: case types.QWen.Value:
apiURL = apiKey.ApiURL apiURL = apiKey.ApiURL
req.Messages = nil req.Messages = nil
break break
@ -498,7 +500,7 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
// 更新 API KEY 的最后使用时间 // 更新 API KEY 的最后使用时间
h.DB.Model(apiKey).UpdateColumn("last_used_at", time.Now().Unix()) h.DB.Model(apiKey).UpdateColumn("last_used_at", time.Now().Unix())
// 百度文心,需要串接 access_token // 百度文心,需要串接 access_token
if session.Model.Platform == types.Baidu { if session.Model.Platform == types.Baidu.Value {
token, err := h.getBaiduToken(apiKey.Value) token, err := h.getBaiduToken(apiKey.Value)
if err != nil { if err != nil {
return nil, err return nil, err
@ -534,22 +536,22 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
} }
logger.Debugf("Sending %s request, ApiURL:%s, API KEY:%s, PROXY: %s, Model: %s", session.Model.Platform, apiURL, apiKey.Value, apiKey.ProxyURL, req.Model) logger.Debugf("Sending %s request, ApiURL:%s, API KEY:%s, PROXY: %s, Model: %s", session.Model.Platform, apiURL, apiKey.Value, apiKey.ProxyURL, req.Model)
switch session.Model.Platform { switch session.Model.Platform {
case types.Azure: case types.Azure.Value:
request.Header.Set("api-key", apiKey.Value) request.Header.Set("api-key", apiKey.Value)
break break
case types.ChatGLM: case types.ChatGLM.Value:
token, err := h.getChatGLMToken(apiKey.Value) token, err := h.getChatGLMToken(apiKey.Value)
if err != nil { if err != nil {
return nil, err return nil, err
} }
request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token)) request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
break break
case types.Baidu: case types.Baidu.Value:
request.RequestURI = "" request.RequestURI = ""
case types.OpenAI: case types.OpenAI.Value:
request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey.Value)) request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey.Value))
break break
case types.QWen: case types.QWen.Value:
request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey.Value)) request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey.Value))
request.Header.Set("X-DashScope-SSE", "enable") request.Header.Set("X-DashScope-SSE", "enable")
break break
@ -583,6 +585,97 @@ func (h *ChatHandler) subUserPower(userVo vo.User, session *types.ChatSession, p
} }
func (h *ChatHandler) saveChatHistory(
req types.ApiRequest,
prompt string,
contents []string,
message types.Message,
chatCtx []types.Message,
session *types.ChatSession,
role model.ChatRole,
userVo vo.User,
promptCreatedAt time.Time,
replyCreatedAt time.Time) {
if message.Role == "" {
message.Role = "assistant"
}
message.Content = strings.Join(contents, "")
useMsg := types.Message{Role: "user", Content: prompt}
// 更新上下文消息,如果是调用函数则不需要更新上下文
if h.App.SysConfig.EnableContext {
chatCtx = append(chatCtx, useMsg) // 提问消息
chatCtx = append(chatCtx, message) // 回复消息
h.App.ChatContexts.Put(session.ChatId, chatCtx)
}
// 追加聊天记录
// for prompt
promptToken, err := utils.CalcTokens(prompt, req.Model)
if err != nil {
logger.Error(err)
}
historyUserMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.PromptMsg,
Icon: userVo.Avatar,
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
res := h.DB.Save(&historyUserMsg)
if res.Error != nil {
logger.Error("failed to save prompt history message: ", res.Error)
}
// for reply
// 计算本次对话消耗的总 token 数量
replyTokens, _ := utils.CalcTokens(message.Content, req.Model)
totalTokens := replyTokens + getTotalTokens(req)
historyReplyMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.ReplyMsg,
Icon: role.Icon,
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
res = h.DB.Create(&historyReplyMsg)
if res.Error != nil {
logger.Error("failed to save reply history message: ", res.Error)
}
// 更新用户算力
h.subUserPower(userVo, session, promptToken, replyTokens)
// 保存当前会话
var chatItem model.ChatItem
res = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem)
if res.Error != nil {
chatItem.ChatId = session.ChatId
chatItem.UserId = session.UserId
chatItem.RoleId = role.Id
chatItem.ModelId = session.Model.Id
if utf8.RuneCountInString(prompt) > 30 {
chatItem.Title = string([]rune(prompt)[:30]) + "..."
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.DB.Create(&chatItem)
}
}
// 将AI回复消息中生成的图片链接下载到本地 // 将AI回复消息中生成的图片链接下载到本地
func (h *ChatHandler) extractImgUrl(text string) string { func (h *ChatHandler) extractImgUrl(text string) string {
pattern := `!\[([^\]]*)]\(([^)]+)\)` pattern := `!\[([^\]]*)]\(([^)]+)\)`

View File

@ -10,7 +10,6 @@ package chatimpl
import ( import (
"bufio" "bufio"
"context" "context"
"encoding/json"
"errors" "errors"
"fmt" "fmt"
"geekai/core/types" "geekai/core/types"
@ -18,11 +17,9 @@ import (
"geekai/store/vo" "geekai/store/vo"
"geekai/utils" "geekai/utils"
"github.com/golang-jwt/jwt/v5" "github.com/golang-jwt/jwt/v5"
"html/template"
"io" "io"
"strings" "strings"
"time" "time"
"unicode/utf8"
) )
// 清华大学 ChatGML 消息发送实现 // 清华大学 ChatGML 消息发送实现
@ -108,103 +105,11 @@ func (h *ChatHandler) sendChatGLMMessage(
// 消息发送成功 // 消息发送成功
if len(contents) > 0 { if len(contents) > 0 {
if message.Role == "" { h.saveChatHistory(req, prompt, contents, message, chatCtx, session, role, userVo, promptCreatedAt, replyCreatedAt)
message.Role = "assistant"
}
message.Content = strings.Join(contents, "")
useMsg := types.Message{Role: "user", Content: prompt}
// 更新上下文消息,如果是调用函数则不需要更新上下文
if h.App.SysConfig.EnableContext {
chatCtx = append(chatCtx, useMsg) // 提问消息
chatCtx = append(chatCtx, message) // 回复消息
h.App.ChatContexts.Put(session.ChatId, chatCtx)
}
// 追加聊天记录
// for prompt
promptToken, err := utils.CalcTokens(prompt, req.Model)
if err != nil {
logger.Error(err)
}
historyUserMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.PromptMsg,
Icon: userVo.Avatar,
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
res := h.DB.Save(&historyUserMsg)
if res.Error != nil {
logger.Error("failed to save prompt history message: ", res.Error)
}
// for reply
// 计算本次对话消耗的总 token 数量
replyTokens, _ := utils.CalcTokens(message.Content, req.Model)
totalTokens := replyTokens + getTotalTokens(req)
historyReplyMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.ReplyMsg,
Icon: role.Icon,
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
res = h.DB.Create(&historyReplyMsg)
if res.Error != nil {
logger.Error("failed to save reply history message: ", res.Error)
}
// 更新用户算力
h.subUserPower(userVo, session, promptToken, replyTokens)
// 保存当前会话
var chatItem model.ChatItem
res = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem)
if res.Error != nil {
chatItem.ChatId = session.ChatId
chatItem.UserId = session.UserId
chatItem.RoleId = role.Id
chatItem.ModelId = session.Model.Id
if utf8.RuneCountInString(prompt) > 30 {
chatItem.Title = string([]rune(prompt)[:30]) + "..."
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.DB.Create(&chatItem)
}
} }
} else { } else {
body, err := io.ReadAll(response.Body) body, _ := io.ReadAll(response.Body)
if err != nil { return fmt.Errorf("请求大模型 API 失败:%s", body)
return fmt.Errorf("error with reading response: %v", err)
}
var res struct {
Code int `json:"code"`
Success bool `json:"success"`
Msg string `json:"msg"`
}
err = json.Unmarshal(body, &res)
if err != nil {
return fmt.Errorf("error with decode response: %v", err)
}
if !res.Success {
utils.ReplyMessage(ws, "请求 ChatGLM 失败:"+res.Msg)
}
} }
return nil return nil

View File

@ -17,13 +17,10 @@ import (
"geekai/store/model" "geekai/store/model"
"geekai/store/vo" "geekai/store/vo"
"geekai/utils" "geekai/utils"
"html/template" req2 "github.com/imroc/req/v3"
"io" "io"
"strings" "strings"
"time" "time"
"unicode/utf8"
req2 "github.com/imroc/req/v3"
) )
// OPenAI 消息发送实现 // OPenAI 消息发送实现
@ -178,126 +175,11 @@ func (h *ChatHandler) sendOpenAiMessage(
// 消息发送成功 // 消息发送成功
if len(contents) > 0 { if len(contents) > 0 {
if message.Role == "" { h.saveChatHistory(req, prompt, contents, message, chatCtx, session, role, userVo, promptCreatedAt, replyCreatedAt)
message.Role = "assistant"
}
message.Content = strings.Join(contents, "")
useMsg := types.Message{Role: "user", Content: prompt}
// 更新上下文消息,如果是调用函数则不需要更新上下文
if h.App.SysConfig.EnableContext && toolCall == false {
chatCtx = append(chatCtx, useMsg) // 提问消息
chatCtx = append(chatCtx, message) // 回复消息
h.App.ChatContexts.Put(session.ChatId, chatCtx)
}
// 追加聊天记录
useContext := true
if toolCall {
useContext = false
}
// for prompt
promptToken, err := utils.CalcTokens(prompt, req.Model)
if err != nil {
logger.Error(err)
}
historyUserMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.PromptMsg,
Icon: userVo.Avatar,
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: useContext,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
res := h.DB.Save(&historyUserMsg)
if res.Error != nil {
logger.Error("failed to save prompt history message: ", res.Error)
}
// 计算本次对话消耗的总 token 数量
var replyTokens = 0
if toolCall { // prompt + 函数名 + 参数 token
tokens, _ := utils.CalcTokens(function.Name, req.Model)
replyTokens += tokens
tokens, _ = utils.CalcTokens(utils.InterfaceToString(arguments), req.Model)
replyTokens += tokens
} else {
replyTokens, _ = utils.CalcTokens(message.Content, req.Model)
}
replyTokens += getTotalTokens(req)
historyReplyMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.ReplyMsg,
Icon: role.Icon,
Content: h.extractImgUrl(message.Content),
Tokens: replyTokens,
UseContext: useContext,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
res = h.DB.Create(&historyReplyMsg)
if res.Error != nil {
logger.Error("failed to save reply history message: ", res.Error)
}
// 更新用户算力
h.subUserPower(userVo, session, promptToken, replyTokens)
// 保存当前会话
var chatItem model.ChatItem
res = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem)
if res.Error != nil {
chatItem.ChatId = session.ChatId
chatItem.UserId = session.UserId
chatItem.RoleId = role.Id
chatItem.ModelId = session.Model.Id
if utf8.RuneCountInString(prompt) > 30 {
chatItem.Title = string([]rune(prompt)[:30]) + "..."
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.DB.Create(&chatItem)
}
} }
} else { } else {
body, err := io.ReadAll(response.Body) body, _ := io.ReadAll(response.Body)
if err != nil { return fmt.Errorf("请求 OpenAI API 失败:%s", body)
utils.ReplyMessage(ws, "请求 OpenAI API 失败:"+err.Error())
return fmt.Errorf("error with reading response: %v", err)
}
var res types.ApiError
err = json.Unmarshal(body, &res)
if err != nil {
utils.ReplyMessage(ws, "请求 OpenAI API 失败:\n"+"```\n"+string(body)+"```")
return fmt.Errorf("error with decode response: %v", err)
}
// OpenAI API 调用异常处理
if strings.Contains(res.Error.Message, "This key is associated with a deactivated account") {
utils.ReplyMessage(ws, "请求 OpenAI API 失败API KEY 所关联的账户被禁用。")
// 移除当前 API key
h.DB.Where("value = ?", apiKey).Delete(&model.ApiKey{})
} else if strings.Contains(res.Error.Message, "You exceeded your current quota") {
utils.ReplyMessage(ws, "请求 OpenAI API 失败API KEY 触发并发限制,请稍后再试。")
} else if strings.Contains(res.Error.Message, "This model's maximum context length") {
logger.Error(res.Error.Message)
utils.ReplyMessage(ws, "当前会话上下文长度超出限制,已为您清空会话上下文!")
h.App.ChatContexts.Delete(session.ChatId)
return h.sendMessage(ctx, session, role, prompt, ws)
} else {
utils.ReplyMessage(ws, "请求 OpenAI API 失败:"+res.Error.Message)
}
} }
return nil return nil

View File

@ -10,18 +10,15 @@ package chatimpl
import ( import (
"bufio" "bufio"
"context" "context"
"encoding/json"
"fmt" "fmt"
"geekai/core/types" "geekai/core/types"
"geekai/store/model" "geekai/store/model"
"geekai/store/vo" "geekai/store/vo"
"geekai/utils" "geekai/utils"
"github.com/syndtr/goleveldb/leveldb/errors" "github.com/syndtr/goleveldb/leveldb/errors"
"html/template"
"io" "io"
"strings" "strings"
"time" "time"
"unicode/utf8"
) )
type qWenResp struct { type qWenResp struct {
@ -142,100 +139,11 @@ func (h *ChatHandler) sendQWenMessage(
// 消息发送成功 // 消息发送成功
if len(contents) > 0 { if len(contents) > 0 {
if message.Role == "" { h.saveChatHistory(req, prompt, contents, message, chatCtx, session, role, userVo, promptCreatedAt, replyCreatedAt)
message.Role = "assistant"
}
message.Content = strings.Join(contents, "")
useMsg := types.Message{Role: "user", Content: prompt}
// 更新上下文消息,如果是调用函数则不需要更新上下文
if h.App.SysConfig.EnableContext {
chatCtx = append(chatCtx, useMsg) // 提问消息
chatCtx = append(chatCtx, message) // 回复消息
h.App.ChatContexts.Put(session.ChatId, chatCtx)
}
// 追加聊天记录
// for prompt
promptToken, err := utils.CalcTokens(prompt, req.Model)
if err != nil {
logger.Error(err)
}
historyUserMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.PromptMsg,
Icon: userVo.Avatar,
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
res := h.DB.Save(&historyUserMsg)
if res.Error != nil {
logger.Error("failed to save prompt history message: ", res.Error)
}
// for reply
// 计算本次对话消耗的总 token 数量
replyTokens, _ := utils.CalcTokens(message.Content, req.Model)
totalTokens := replyTokens + getTotalTokens(req)
historyReplyMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.ReplyMsg,
Icon: role.Icon,
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
res = h.DB.Create(&historyReplyMsg)
if res.Error != nil {
logger.Error("failed to save reply history message: ", res.Error)
}
// 更新用户算力
h.subUserPower(userVo, session, promptToken, replyTokens)
// 保存当前会话
var chatItem model.ChatItem
res = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem)
if res.Error != nil {
chatItem.ChatId = session.ChatId
chatItem.UserId = session.UserId
chatItem.RoleId = role.Id
chatItem.ModelId = session.Model.Id
if utf8.RuneCountInString(prompt) > 30 {
chatItem.Title = string([]rune(prompt)[:30]) + "..."
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.DB.Create(&chatItem)
}
} }
} else { } else {
body, err := io.ReadAll(response.Body) body, _ := io.ReadAll(response.Body)
if err != nil { return fmt.Errorf("请求大模型 API 失败:%s", body)
return fmt.Errorf("error with reading response: %v", err)
}
var res struct {
Code int `json:"error_code"`
Msg string `json:"error_msg"`
}
err = json.Unmarshal(body, &res)
if err != nil {
return fmt.Errorf("error with decode response: %v", err)
}
utils.ReplyMessage(ws, "请求通义千问大模型 API 失败:"+res.Msg)
} }
return nil return nil

View File

@ -21,13 +21,11 @@ import (
"geekai/utils" "geekai/utils"
"github.com/gorilla/websocket" "github.com/gorilla/websocket"
"gorm.io/gorm" "gorm.io/gorm"
"html/template"
"io" "io"
"net/http" "net/http"
"net/url" "net/url"
"strings" "strings"
"time" "time"
"unicode/utf8"
) )
type xunFeiResp struct { type xunFeiResp struct {
@ -181,89 +179,10 @@ func (h *ChatHandler) sendXunFeiMessage(
} }
} }
// 消息发送成功 // 消息发送成功
if len(contents) > 0 { if len(contents) > 0 {
if message.Role == "" { h.saveChatHistory(req, prompt, contents, message, chatCtx, session, role, userVo, promptCreatedAt, replyCreatedAt)
message.Role = "assistant"
}
message.Content = strings.Join(contents, "")
useMsg := types.Message{Role: "user", Content: prompt}
// 更新上下文消息,如果是调用函数则不需要更新上下文
if h.App.SysConfig.EnableContext {
chatCtx = append(chatCtx, useMsg) // 提问消息
chatCtx = append(chatCtx, message) // 回复消息
h.App.ChatContexts.Put(session.ChatId, chatCtx)
}
// 追加聊天记录
// for prompt
promptToken, err := utils.CalcTokens(prompt, req.Model)
if err != nil {
logger.Error(err)
}
historyUserMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.PromptMsg,
Icon: userVo.Avatar,
Content: template.HTMLEscapeString(prompt),
Tokens: promptToken,
UseContext: true,
Model: req.Model,
}
historyUserMsg.CreatedAt = promptCreatedAt
historyUserMsg.UpdatedAt = promptCreatedAt
res := h.DB.Save(&historyUserMsg)
if res.Error != nil {
logger.Error("failed to save prompt history message: ", res.Error)
}
// for reply
// 计算本次对话消耗的总 token 数量
replyTokens, _ := utils.CalcTokens(message.Content, req.Model)
totalTokens := replyTokens + getTotalTokens(req)
historyReplyMsg := model.ChatMessage{
UserId: userVo.Id,
ChatId: session.ChatId,
RoleId: role.Id,
Type: types.ReplyMsg,
Icon: role.Icon,
Content: message.Content,
Tokens: totalTokens,
UseContext: true,
Model: req.Model,
}
historyReplyMsg.CreatedAt = replyCreatedAt
historyReplyMsg.UpdatedAt = replyCreatedAt
res = h.DB.Create(&historyReplyMsg)
if res.Error != nil {
logger.Error("failed to save reply history message: ", res.Error)
}
// 更新用户算力
h.subUserPower(userVo, session, promptToken, replyTokens)
// 保存当前会话
var chatItem model.ChatItem
res = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem)
if res.Error != nil {
chatItem.ChatId = session.ChatId
chatItem.UserId = session.UserId
chatItem.RoleId = role.Id
chatItem.ModelId = session.Model.Id
if utf8.RuneCountInString(prompt) > 30 {
chatItem.Title = string([]rune(prompt)[:30]) + "..."
} else {
chatItem.Title = prompt
}
chatItem.Model = req.Model
h.DB.Create(&chatItem)
}
} }
return nil return nil
} }

View File

@ -304,7 +304,7 @@ func main() {
group.GET("config/get", h.Get) group.GET("config/get", h.Get)
group.POST("active", h.Active) group.POST("active", h.Active)
group.GET("config/get/license", h.GetLicense) group.GET("config/get/license", h.GetLicense)
group.GET("config/get/draw", h.GetDrawingConfig) group.GET("config/get/app", h.GetAppConfig)
group.POST("config/update/draw", h.SaveDrawingConfig) group.POST("config/update/draw", h.SaveDrawingConfig)
}), }),
fx.Invoke(func(s *core.AppServer, h *admin.ManagerHandler) { fx.Invoke(func(s *core.AppServer, h *admin.ManagerHandler) {

View File

@ -92,9 +92,9 @@ const items = [
}, },
{ {
icon: 'role', icon: 'menu',
index: '/admin/role', index: '/admin/app',
title: '角色管理', title: '应用管理',
}, },
{ {
icon: 'api-key', icon: 'api-key',

View File

@ -46,7 +46,7 @@ const routes = [
component: () => import('@/views/Member.vue'), component: () => import('@/views/Member.vue'),
}, },
{ {
name: 'chat-role', name: 'chat-app',
path: '/apps', path: '/apps',
meta: {title: '应用中心'}, meta: {title: '应用中心'},
component: () => import('@/views/ChatApps.vue'), component: () => import('@/views/ChatApps.vue'),
@ -139,10 +139,10 @@ const routes = [
component: () => import('@/views/admin/Users.vue'), component: () => import('@/views/admin/Users.vue'),
}, },
{ {
path: '/admin/role', path: '/admin/app',
name: 'admin-role', name: 'admin-app',
meta: {title: '角色管理'}, meta: {title: '应用管理'},
component: () => import('@/views/admin/Roles.vue'), component: () => import('@/views/admin/Apps.vue'),
}, },
{ {
path: '/admin/apikey', path: '/admin/apikey',

View File

@ -119,12 +119,12 @@ const mjModels = ref([
{name: "急速Turbo", value: "turbo"}, {name: "急速Turbo", value: "turbo"},
]) ])
httpGet("/api/admin/config/get/draw").then(res => { httpGet("/api/admin/config/get/app").then(res => {
sdConfigs.value = res.data.sd sdConfigs.value = res.data.sd
mjPlusConfigs.value = res.data.mj_plus mjPlusConfigs.value = res.data.mj_plus
mjProxyConfigs.value = res.data.mj_proxy mjProxyConfigs.value = res.data.mj_proxy
}).catch(e =>{ }).catch(e =>{
ElMessage.error("获取AI绘画配置失败:"+e.message) ElMessage.error("获取配置失败:"+e.message)
}) })
const addConfig = (configs) => { const addConfig = (configs) => {

View File

@ -87,7 +87,7 @@
<el-input v-model="item.name" autocomplete="off"/> <el-input v-model="item.name" autocomplete="off"/>
</el-form-item> </el-form-item>
<el-form-item label="用途:" prop="type"> <el-form-item label="用途:" prop="type">
<el-select v-model="item.type" placeholder="请选择用途" @change="changePlatform"> <el-select v-model="item.type" placeholder="请选择用途" @change="changeType">
<el-option v-for="item in types" :value="item.value" :label="item.name" :key="item.value">{{ <el-option v-for="item in types" :value="item.value" :label="item.name" :key="item.value">{{
item.name item.name
}} }}
@ -99,7 +99,8 @@
</el-form-item> </el-form-item>
<el-form-item label="API URL" prop="api_url"> <el-form-item label="API URL" prop="api_url">
<el-input v-model="item.api_url" autocomplete="off" <el-input v-model="item.api_url" autocomplete="off"
placeholder="如果你用了第三方的 API 中转,这里填写中转地址"/> placeholder="必须填土完整的 Chat API URLhttps://api.openai.com/v1/chat/completions"/>
<div class="info">如果你使用了第三方中转这里就填写中转地址</div>
</el-form-item> </el-form-item>
<el-form-item label="代理地址:" prop="proxy_url"> <el-form-item label="代理地址:" prop="proxy_url">
@ -126,7 +127,7 @@ import {onMounted, onUnmounted, reactive, ref} from "vue";
import {httpGet, httpPost} from "@/utils/http"; import {httpGet, httpPost} from "@/utils/http";
import {ElMessage} from "element-plus"; import {ElMessage} from "element-plus";
import {dateFormat, removeArrayItem, substr} from "@/utils/libs"; import {dateFormat, removeArrayItem, substr} from "@/utils/libs";
import {DocumentCopy, Plus, ShoppingCart} from "@element-plus/icons-vue"; import {DocumentCopy, Plus, ShoppingCart, InfoFilled} from "@element-plus/icons-vue";
import ClipboardJS from "clipboard"; import ClipboardJS from "clipboard";
// //
@ -142,39 +143,7 @@ const rules = reactive({
const loading = ref(true) const loading = ref(true)
const formRef = ref(null) const formRef = ref(null)
const title = ref("") const title = ref("")
const platforms = ref([ const platforms = ref([])
{
name: "【OpenAI/中转】ChatGPT",
value: "OpenAI",
api_url: "https://api.chat-plus.net/v1/chat/completions",
img_url: "https://api.chat-plus.net/v1/images/generations"
},
{
name: "【讯飞】星火大模型",
value: "XunFei",
api_url: "wss://spark-api.xf-yun.com/{version}/chat"
},
{
name: "【清华智普】ChatGLM",
value: "ChatGLM",
api_url: "https://open.bigmodel.cn/api/paas/v3/model-api/{model}/sse-invoke"
},
{
name: "【百度】文心一言",
value: "Baidu",
api_url: "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/{model}"
},
{
name: "【微软】Azure",
value: "Azure",
api_url: "https://chat-bot-api.openai.azure.com/openai/deployments/{model}/chat/completions?api-version=2023-05-15"
},
{
name: "【阿里】千义通问",
value: "QWen",
api_url: "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation"
},
])
const types = ref([ const types = ref([
{name: "聊天", value: "chat"}, {name: "聊天", value: "chat"},
{name: "绘画", value: "img"}, {name: "绘画", value: "img"},
@ -191,6 +160,12 @@ onMounted(() => {
clipboard.value.on('error', () => { clipboard.value.on('error', () => {
ElMessage.error('复制失败!'); ElMessage.error('复制失败!');
}) })
httpGet("/api/admin/config/get/app").then(res => {
platforms.value = res.data.platforms
}).catch(e =>{
ElMessage.error("获取配置失败:"+e.message)
})
}) })
onUnmounted(() => { onUnmounted(() => {
@ -263,21 +238,24 @@ const set = (filed, row) => {
}) })
} }
const changePlatform = () => { const selectedPlatform = ref(null)
let platform = null const changePlatform = (value) => {
console.log(value)
for (let v of platforms.value) { for (let v of platforms.value) {
if (v.value === item.value.platform) { if (v.value === value) {
platform = v selectedPlatform.value = v
break item.value.api_url = v.chat_url
} }
} }
if (platform !== null) { }
if (item.value.type === "img" && platform.img_url) {
item.value.api_url = platform.img_url
} else {
item.value.api_url = platform.api_url
}
const changeType = (value) => {
if (selectedPlatform.value) {
if(value === 'img') {
item.value.api_url = selectedPlatform.value.img_url
} else {
item.value.api_url = selectedPlatform.value.chat_url
}
} }
} }
</script> </script>
@ -306,7 +284,9 @@ const changePlatform = () => {
.el-form { .el-form {
.el-form-item__content { .el-form-item__content {
.info {
color #999999
}
.el-icon { .el-icon {
padding-left: 10px; padding-left: 10px;
} }

View File

@ -9,25 +9,25 @@
<template #default="props"> <template #default="props">
<div> <div>
<el-table :data="props.row.context" :border="childBorder"> <el-table :data="props.row.context" :border="childBorder">
<el-table-column label="对话角色" prop="role" width="120"/> <el-table-column label="对话应用" prop="role" width="120"/>
<el-table-column label="对话内容" prop="content"/> <el-table-column label="对话内容" prop="content"/>
</el-table> </el-table>
</div> </div>
</template> </template>
</el-table-column> </el-table-column>
<el-table-column label="角色名称" prop="name"> <el-table-column label="应用名称" prop="name">
<template #default="scope"> <template #default="scope">
<span class="sort" :data-id="scope.row.id">{{ scope.row.name }}</span> <span class="sort" :data-id="scope.row.id">{{ scope.row.name }}</span>
</template> </template>
</el-table-column> </el-table-column>
<el-table-column label="角色标识" prop="key"/> <el-table-column label="应用标识" prop="key"/>
<el-table-column label="绑定模型" prop="model_name"/> <el-table-column label="绑定模型" prop="model_name"/>
<el-table-column label="启用状态"> <el-table-column label="启用状态">
<template #default="scope"> <template #default="scope">
<el-switch v-model="scope.row['enable']" @change="roleSet('enable',scope.row)"/> <el-switch v-model="scope.row['enable']" @change="roleSet('enable',scope.row)"/>
</template> </template>
</el-table-column> </el-table-column>
<el-table-column label="角色图标" prop="icon"> <el-table-column label="应用图标" prop="icon">
<template #default="scope"> <template #default="scope">
<el-image :src="scope.row.icon" style="width: 45px; height: 45px; border-radius: 50%"/> <el-image :src="scope.row.icon" style="width: 45px; height: 45px; border-radius: 50%"/>
</template> </template>
@ -36,7 +36,7 @@
<el-table-column label="操作" width="150" align="right"> <el-table-column label="操作" width="150" align="right">
<template #default="scope"> <template #default="scope">
<el-button size="small" type="primary" @click="rowEdit(scope.$index, scope.row)">编辑</el-button> <el-button size="small" type="primary" @click="rowEdit(scope.$index, scope.row)">编辑</el-button>
<el-popconfirm title="确定要删除当前角色吗?" @confirm="removeRole(scope.row)" :width="200"> <el-popconfirm title="确定要删除当前应用吗?" @confirm="removeRole(scope.row)" :width="200">
<template #reference> <template #reference>
<el-button size="small" type="danger">删除</el-button> <el-button size="small" type="danger">删除</el-button>
</template> </template>
@ -53,21 +53,21 @@
width="50%" width="50%"
> >
<el-form :model="role" label-width="120px" ref="formRef" label-position="left" :rules="rules"> <el-form :model="role" label-width="120px" ref="formRef" label-position="left" :rules="rules">
<el-form-item label="角色名称:" prop="name"> <el-form-item label="应用名称:" prop="name">
<el-input <el-input
v-model="role.name" v-model="role.name"
autocomplete="off" autocomplete="off"
/> />
</el-form-item> </el-form-item>
<el-form-item label="角色标志:" prop="key"> <el-form-item label="应用标志:" prop="key">
<el-input <el-input
v-model="role.key" v-model="role.key"
autocomplete="off" autocomplete="off"
/> />
</el-form-item> </el-form-item>
<el-form-item label="角色图标:" prop="icon"> <el-form-item label="应用图标:" prop="icon">
<el-input v-model="role.icon"> <el-input v-model="role.icon">
<template #append> <template #append>
<el-upload <el-upload
@ -107,7 +107,7 @@
<el-form-item label="上下文信息:" prop="context"> <el-form-item label="上下文信息:" prop="context">
<template #default> <template #default>
<el-table :data="role.context" :border="childBorder" size="small"> <el-table :data="role.context" :border="childBorder" size="small">
<el-table-column label="对话角色" width="120"> <el-table-column label="对话应用" width="120">
<template #default="scope"> <template #default="scope">
<el-input <el-input
v-model="scope.row.role" v-model="scope.row.role"
@ -181,8 +181,8 @@ const loading = ref(true)
const rules = reactive({ const rules = reactive({
name: [{required: true, message: '请输入用户名', trigger: 'blur',}], name: [{required: true, message: '请输入用户名', trigger: 'blur',}],
key: [{required: true, message: '请输入角色标识', trigger: 'blur',}], key: [{required: true, message: '请输入应用标识', trigger: 'blur',}],
icon: [{required: true, message: '请输入角色图标', trigger: 'blur',}], icon: [{required: true, message: '请输入应用图标', trigger: 'blur',}],
sort: [ sort: [
{required: true, message: '请输入排序数字', trigger: 'blur'}, {required: true, message: '请输入排序数字', trigger: 'blur'},
{type: 'number', message: '请输入有效数字'}, {type: 'number', message: '请输入有效数字'},
@ -204,13 +204,13 @@ onMounted(() => {
}) })
const fetchData = () => { const fetchData = () => {
// //
httpGet('/api/admin/role/list').then((res) => { httpGet('/api/admin/role/list').then((res) => {
tableData.value = res.data tableData.value = res.data
sortedTableData.value = copyObj(tableData.value) sortedTableData.value = copyObj(tableData.value)
loading.value = false loading.value = false
}).catch(() => { }).catch(() => {
ElMessage.error("获取聊天角色失败"); ElMessage.error("获取聊天应用失败");
}) })
const drawBodyWrapper = document.querySelector('.el-table__body tbody') const drawBodyWrapper = document.querySelector('.el-table__body tbody')
@ -250,14 +250,14 @@ const roleSet = (filed, row) => {
// //
const curIndex = ref(0) const curIndex = ref(0)
const rowEdit = function (index, row) { const rowEdit = function (index, row) {
optTitle.value = "修改角色" optTitle.value = "修改应用"
curIndex.value = index curIndex.value = index
role.value = copyObj(row) role.value = copyObj(row)
showDialog.value = true showDialog.value = true
} }
const addRole = function () { const addRole = function () {
optTitle.value = "添加新角色" optTitle.value = "添加新应用"
role.value = {context: []} role.value = {context: []}
showDialog.value = true showDialog.value = true
} }

View File

@ -214,15 +214,7 @@ const rules = reactive({
}) })
const loading = ref(true) const loading = ref(true)
const formRef = ref(null) const formRef = ref(null)
const platforms = ref([ const platforms = ref([])
{name: "【OpenAI】ChatGPT", value: "OpenAI"},
{name: "【讯飞】星火大模型", value: "XunFei"},
{name: "【清华智普】ChatGLM", value: "ChatGLM"},
{name: "【百度】文心一言", value: "Baidu"},
{name: "【微软】Azure", value: "Azure"},
{name: "【阿里】通义千问", value: "QWen"},
])
// API KEY // API KEY
const apiKeys = ref([]) const apiKeys = ref([])
@ -287,6 +279,12 @@ onMounted(() => {
clipboard.value.on('error', () => { clipboard.value.on('error', () => {
ElMessage.error('复制失败!'); ElMessage.error('复制失败!');
}) })
httpGet("/api/admin/config/get/app").then(res => {
platforms.value = res.data.platforms
}).catch(e =>{
ElMessage.error("获取配置失败:"+e.message)
})
}) })
onUnmounted(() => { onUnmounted(() => {