diff --git a/CHANGELOG.md b/CHANGELOG.md
index fc7ccddd..76329cb0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,17 @@
# 更新日志
+## v4.2.4
+
+- 功能优化:更改前端构建技术选型,使用 Vite 构建,提升构建速度和兼容性
+- 功能优化:使用 SSE 发送消息,替换原来的 Websocket 消息方案
+- 功能新增:管理后台支持设置默认昵称
+- 功能优化:支持 Suno v4.5 模型支持
+- 功能新增:用户注册和用户登录增加用户协议和隐私政策功能,需要用户同意协议才可注册和登录。
+- 功能优化:修改重新回答功能,撤回千面的问答内容为可编辑内容,撤回的内容不会增加额外的上下文
+- 功能优化:优化聊天记录的存储结构,增加模型名称字段,支持存储更长的模型名称
+- Bug 修复:聊天应用绑定模型后无效,还是会轮询 API KEY,导致一会成功,一会请求失败。
+- 功能优化:如果管理后台没有启用会员充值菜单,移动端也不显示充值套餐功能
+
## v4.2.3
- 功能优化:增加模型分组与模型描述,采用卡片展示模式改进模型选择功能体验
diff --git a/api/core/app_server.go b/api/core/app_server.go
index d45a14d1..34850e61 100644
--- a/api/core/app_server.go
+++ b/api/core/app_server.go
@@ -100,6 +100,26 @@ func (s *AppServer) Run(db *gorm.DB) error {
&model.UserLoginLog{},
&model.DallJob{},
)
+ // 手动删除字段
+ if db.Migrator().HasColumn(&model.Order{}, "deleted_at") {
+ db.Migrator().DropColumn(&model.Order{}, "deleted_at")
+ }
+ if db.Migrator().HasColumn(&model.ChatItem{}, "deleted_at") {
+ db.Migrator().DropColumn(&model.ChatItem{}, "deleted_at")
+ }
+ if db.Migrator().HasColumn(&model.ChatMessage{}, "deleted_at") {
+ db.Migrator().DropColumn(&model.ChatMessage{}, "deleted_at")
+ }
+ if db.Migrator().HasColumn(&model.User{}, "chat_config") {
+ db.Migrator().DropColumn(&model.User{}, "chat_config")
+ }
+ if db.Migrator().HasColumn(&model.ChatModel{}, "category") {
+ db.Migrator().DropColumn(&model.ChatModel{}, "category")
+ }
+ if db.Migrator().HasColumn(&model.ChatModel{}, "description") {
+ db.Migrator().DropColumn(&model.ChatModel{}, "description")
+ }
+
logger.Info("Database tables migrated successfully")
// 统计安装信息
diff --git a/api/core/types/chat.go b/api/core/types/chat.go
index aa8ac01f..20805378 100644
--- a/api/core/types/chat.go
+++ b/api/core/types/chat.go
@@ -52,17 +52,6 @@ type Delta struct {
} `json:"function_call,omitempty"`
}
-// ChatSession 聊天会话对象
-type ChatSession struct {
- UserId uint `json:"user_id"`
- ClientIP string `json:"client_ip"` // 客户端 IP
- ChatId string `json:"chat_id"` // 客户端聊天会话 ID, 多会话模式专用字段
- Model ChatModel `json:"model"` // GPT 模型
- Start int64 `json:"start"` // 开始请求时间戳
- Tools []int `json:"tools"` // 工具函数列表
- Stream bool `json:"stream"` // 是否采用流式输出
-}
-
type ChatModel struct {
Id uint `json:"id"`
Name string `json:"name"`
diff --git a/api/core/types/config.go b/api/core/types/config.go
index 29169539..01b6bc02 100644
--- a/api/core/types/config.go
+++ b/api/core/types/config.go
@@ -162,10 +162,11 @@ type SystemConfig struct {
SdNegPrompt string `json:"sd_neg_prompt"` // SD 默认反向提示词
MjMode string `json:"mj_mode"` // midjourney 默认的API模式,relax, fast, turbo
- IndexNavs []int `json:"index_navs"` // 首页显示的导航菜单
- Copyright string `json:"copyright"` // 版权信息
- ICP string `json:"icp"` // ICP 备案号
- MarkMapText string `json:"mark_map_text"` // 思维导入的默认文本
+ IndexNavs []int `json:"index_navs"` // 首页显示的导航菜单
+ Copyright string `json:"copyright"` // 版权信息
+ DefaultNickname string `json:"default_nickname"` // 默认昵称
+ ICP string `json:"icp"` // ICP 备案号
+ MarkMapText string `json:"mark_map_text"` // 思维导入的默认文本
EnabledVerify bool `json:"enabled_verify"` // 是否启用验证码
EmailWhiteList []string `json:"email_white_list"` // 邮箱白名单列表
diff --git a/api/core/types/locked_map.go b/api/core/types/locked_map.go
index 4382ceee..2cb9362c 100644
--- a/api/core/types/locked_map.go
+++ b/api/core/types/locked_map.go
@@ -16,7 +16,7 @@ type MKey interface {
string | int | uint
}
type MValue interface {
- *WsClient | *ChatSession | context.CancelFunc | []any
+ *WsClient | context.CancelFunc | []any
}
type LMap[K MKey, T MValue] struct {
lock sync.RWMutex
diff --git a/api/handler/admin/chat_handler.go b/api/handler/admin/chat_handler.go
index a97eacc2..f99defa6 100644
--- a/api/handler/admin/chat_handler.go
+++ b/api/handler/admin/chat_handler.go
@@ -209,20 +209,28 @@ func (h *ChatHandler) Messages(c *gin.Context) {
func (h *ChatHandler) History(c *gin.Context) {
chatId := c.Query("chat_id") // 会话 ID
var items []model.ChatMessage
- var messages = make([]vo.HistoryMessage, 0)
+ var messages = make([]vo.ChatMessage, 0)
res := h.DB.Where("chat_id = ?", chatId).Find(&items)
if res.Error != nil {
resp.ERROR(c, "No history message")
return
} else {
for _, item := range items {
- var v vo.HistoryMessage
+ var v vo.ChatMessage
err := utils.CopyObject(item, &v)
+ if err != nil {
+ continue
+ }
+ // 解析内容
+ var content vo.MsgContent
+ err = utils.JsonDecode(item.Content, &content)
+ if err != nil {
+ content.Text = item.Content
+ }
+ v.Content = content
v.CreatedAt = item.CreatedAt.Unix()
v.UpdatedAt = item.UpdatedAt.Unix()
- if err == nil {
- messages = append(messages, v)
- }
+ messages = append(messages, v)
}
}
diff --git a/api/handler/admin/chat_model_handler.go b/api/handler/admin/chat_model_handler.go
index 5ad9f182..471e8f08 100644
--- a/api/handler/admin/chat_model_handler.go
+++ b/api/handler/admin/chat_model_handler.go
@@ -40,8 +40,8 @@ func (h *ChatModelHandler) Save(c *gin.Context) {
Power int `json:"power"`
MaxTokens int `json:"max_tokens"` // 最大响应长度
MaxContext int `json:"max_context"` // 最大上下文长度
- Description string `json:"description"` //模型描述
- Category string `json:"category"` //模型类别
+ Desc string `json:"desc"` //模型描述
+ Tag string `json:"tag"` //模型标签
Temperature float32 `json:"temperature"` // 模型温度
KeyId int `json:"key_id,omitempty"`
CreatedAt int64 `json:"created_at"`
@@ -66,8 +66,8 @@ func (h *ChatModelHandler) Save(c *gin.Context) {
item.Power = data.Power
item.MaxTokens = data.MaxTokens
item.MaxContext = data.MaxContext
- item.Description = data.Description
- item.Category = data.Category
+ item.Desc = data.Desc
+ item.Tag = data.Tag
item.Temperature = data.Temperature
item.KeyId = uint(data.KeyId)
item.Type = data.Type
@@ -100,12 +100,16 @@ func (h *ChatModelHandler) List(c *gin.Context) {
session := h.DB.Session(&gorm.Session{})
enable := h.GetBool(c, "enable")
name := h.GetTrim(c, "name")
+ modelType := h.GetTrim(c, "type")
if enable {
session = session.Where("enabled", enable)
}
if name != "" {
session = session.Where("name LIKE ?", name+"%")
}
+ if modelType != "" {
+ session = session.Where("type", modelType)
+ }
var items []model.ChatModel
var cms = make([]vo.ChatModel, 0)
res := session.Order("sort_num ASC").Find(&items)
diff --git a/api/handler/admin/redeem_handler.go b/api/handler/admin/redeem_handler.go
index b2559a9c..6fae1e91 100644
--- a/api/handler/admin/redeem_handler.go
+++ b/api/handler/admin/redeem_handler.go
@@ -106,8 +106,8 @@ func (h *RedeemHandler) Export(c *gin.Context) {
}
// 设置响应头,告诉浏览器这是一个附件,需要下载
- c.Header("Content-Disposition", "attachment; filename=output.csv")
- c.Header("Content-Type", "text/csv")
+ c.Header("Prompt-Disposition", "attachment; filename=output.csv")
+ c.Header("Prompt-Type", "text/csv")
// 创建一个 CSV writer
writer := csv.NewWriter(c.Writer)
diff --git a/api/handler/admin/user_handler.go b/api/handler/admin/user_handler.go
index 6f8a59fa..90ff4b20 100644
--- a/api/handler/admin/user_handler.go
+++ b/api/handler/admin/user_handler.go
@@ -178,6 +178,7 @@ func (h *UserHandler) Save(c *gin.Context) {
Power: data.Power,
Status: true,
ChatRoles: utils.JsonEncode(data.ChatRoles),
+ ChatConfig: "{}",
ChatModels: utils.JsonEncode(data.ChatModels),
ExpiredTime: utils.Str2stamp(data.ExpiredTime),
}
@@ -353,4 +354,4 @@ func (h *UserHandler) GenLoginLink(c *gin.Context) {
}
resp.SUCCESS(c, tokenString)
-}
\ No newline at end of file
+}
diff --git a/api/handler/chat_handler.go b/api/handler/chat_handler.go
index fc7172c7..caa6bd33 100644
--- a/api/handler/chat_handler.go
+++ b/api/handler/chat_handler.go
@@ -21,11 +21,11 @@ import (
"geekai/store/vo"
"geekai/utils"
"geekai/utils/resp"
- "html/template"
"io"
"net/http"
"net/url"
"os"
+ "path"
"strings"
"time"
"unicode/utf8"
@@ -36,13 +36,34 @@ import (
"gorm.io/gorm"
)
+const (
+ ChatEventStart = "start"
+ ChatEventEnd = "end"
+ ChatEventError = "error"
+ ChatEventMessageDelta = "message_delta"
+ ChatEventTitle = "title"
+)
+
+type ChatInput struct {
+ UserId uint `json:"user_id"`
+ RoleId uint `json:"role_id"`
+ ModelId uint `json:"model_id"`
+ ChatId string `json:"chat_id"`
+ Prompt string `json:"prompt"`
+ Tools []uint `json:"tools"`
+ Stream bool `json:"stream"`
+ Files []vo.File `json:"files"`
+ ChatModel model.ChatModel `json:"chat_model,omitempty"`
+ ChatRole model.ChatRole `json:"chat_role,omitempty"`
+ LastMsgId uint `json:"last_msg_id,omitempty"` // 最后的消息ID,用于重新生成答案的时候过滤上下文
+}
+
type ChatHandler struct {
BaseHandler
redis *redis.Client
uploadManager *oss.UploaderManager
licenseService *service.LicenseService
ReqCancelFunc *types.LMap[string, context.CancelFunc] // HttpClient 请求取消 handle function
- ChatContexts *types.LMap[string, []any] // 聊天上下文 Map [chatId] => []Message
userService *service.UserService
}
@@ -53,14 +74,74 @@ func NewChatHandler(app *core.AppServer, db *gorm.DB, redis *redis.Client, manag
uploadManager: manager,
licenseService: licenseService,
ReqCancelFunc: types.NewLMap[string, context.CancelFunc](),
- ChatContexts: types.NewLMap[string, []any](),
userService: userService,
}
}
-func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSession, role model.ChatRole, prompt string, ws *types.WsClient) error {
+// Chat 处理聊天请求
+func (h *ChatHandler) Chat(c *gin.Context) {
+ var input ChatInput
+ if err := c.ShouldBindJSON(&input); err != nil {
+ resp.ERROR(c, types.InvalidArgs)
+ return
+ }
+
+ // 设置SSE响应头
+ c.Header("Prompt-Type", "text/event-stream")
+ c.Header("Cache-Control", "no-cache")
+ c.Header("Connection", "keep-alive")
+ c.Header("X-Accel-Buffering", "no")
+
+ ctx, cancel := context.WithCancel(c.Request.Context())
+ defer cancel()
+
+ // 使用旧的聊天数据覆盖模型和角色ID
+ var chat model.ChatItem
+ h.DB.Where("chat_id", input.ChatId).First(&chat)
+ if chat.Id > 0 {
+ input.ModelId = chat.ModelId
+ input.RoleId = chat.RoleId
+ }
+
+ // 验证聊天角色
+ var chatRole model.ChatRole
+ err := h.DB.First(&chatRole, input.RoleId).Error
+ if err != nil || !chatRole.Enable {
+ pushMessage(c, ChatEventError, "当前聊天角色不存在或者未启用,请更换角色之后再发起对话!")
+ return
+ }
+ input.ChatRole = chatRole
+
+ // 获取模型信息
+ var chatModel model.ChatModel
+ err = h.DB.Where("id", input.ModelId).First(&chatModel).Error
+ if err != nil || !chatModel.Enabled {
+ pushMessage(c, ChatEventError, "当前AI模型暂未启用,请更换模型后再发起对话!")
+ return
+ }
+ input.ChatModel = chatModel
+
+ // 发送消息
+ err = h.sendMessage(ctx, input, c)
+ if err != nil {
+ pushMessage(c, ChatEventError, err.Error())
+ return
+ }
+
+ pushMessage(c, ChatEventEnd, "对话完成")
+}
+
+func pushMessage(c *gin.Context, msgType string, content interface{}) {
+ c.SSEvent("message", map[string]interface{}{
+ "type": msgType,
+ "body": content,
+ })
+ c.Writer.Flush()
+}
+
+func (h *ChatHandler) sendMessage(ctx context.Context, input ChatInput, c *gin.Context) error {
var user model.User
- res := h.DB.Model(&model.User{}).First(&user, session.UserId)
+ res := h.DB.Model(&model.User{}).First(&user, input.UserId)
if res.Error != nil {
return errors.New("未授权用户,您正在进行非法操作!")
}
@@ -71,12 +152,12 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
return errors.New("User 对象转换失败," + err.Error())
}
- if userVo.Status == false {
+ if !userVo.Status {
return errors.New("您的账号已经被禁用,如果疑问,请联系管理员!")
}
- if userVo.Power < session.Model.Power {
- return fmt.Errorf("您当前剩余算力 %d 已不足以支付当前模型的单次对话需要消耗的算力 %d,[立即购买](/member)。", userVo.Power, session.Model.Power)
+ if userVo.Power < input.ChatModel.Power {
+ return fmt.Errorf("您当前剩余算力 %d 已不足以支付当前模型的单次对话需要消耗的算力 %d,[立即购买](/member)。", userVo.Power, input.ChatModel.Power)
}
if userVo.ExpiredTime > 0 && userVo.ExpiredTime <= time.Now().Unix() {
@@ -84,30 +165,29 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
}
// 检查 prompt 长度是否超过了当前模型允许的最大上下文长度
- promptTokens, _ := utils.CalcTokens(prompt, session.Model.Value)
- if promptTokens > session.Model.MaxContext {
+ promptTokens, _ := utils.CalcTokens(input.Prompt, input.ChatModel.Value)
+ if promptTokens > input.ChatModel.MaxContext {
return errors.New("对话内容超出了当前模型允许的最大上下文长度!")
}
var req = types.ApiRequest{
- Model: session.Model.Value,
- Stream: session.Stream,
- Temperature: session.Model.Temperature,
+ Model: input.ChatModel.Value,
+ Stream: input.Stream,
+ Temperature: input.ChatModel.Temperature,
}
// 兼容 OpenAI 模型
- if strings.HasPrefix(session.Model.Value, "o1-") ||
- strings.HasPrefix(session.Model.Value, "o3-") ||
- strings.HasPrefix(session.Model.Value, "gpt") {
- req.MaxCompletionTokens = session.Model.MaxTokens
- session.Start = time.Now().Unix()
+ if strings.HasPrefix(input.ChatModel.Value, "o1-") ||
+ strings.HasPrefix(input.ChatModel.Value, "o3-") ||
+ strings.HasPrefix(input.ChatModel.Value, "gpt") {
+ req.MaxCompletionTokens = input.ChatModel.MaxTokens
} else {
- req.MaxTokens = session.Model.MaxTokens
+ req.MaxTokens = input.ChatModel.MaxTokens
}
- if len(session.Tools) > 0 && !strings.HasPrefix(session.Model.Value, "o1-") {
+ if len(input.Tools) > 0 && !strings.HasPrefix(input.ChatModel.Value, "o1-") {
var items []model.Function
- res = h.DB.Where("enabled", true).Where("id IN ?", session.Tools).Find(&items)
+ res = h.DB.Where("enabled", true).Where("id IN ?", input.Tools).Find(&items)
if res.Error == nil {
var tools = make([]types.Tool, 0)
for _, v := range items {
@@ -138,25 +218,27 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
}
// 加载聊天上下文
- chatCtx := make([]interface{}, 0)
- messages := make([]interface{}, 0)
+ chatCtx := make([]any, 0)
+ messages := make([]any, 0)
if h.App.SysConfig.EnableContext {
- if h.ChatContexts.Has(session.ChatId) {
- messages = h.ChatContexts.Get(session.ChatId)
- } else {
- _ = utils.JsonDecode(role.Context, &messages)
- if h.App.SysConfig.ContextDeep > 0 {
- var historyMessages []model.ChatMessage
- res := h.DB.Where("chat_id = ? and use_context = 1", session.ChatId).Limit(h.App.SysConfig.ContextDeep).Order("id DESC").Find(&historyMessages)
- if res.Error == nil {
- for i := len(historyMessages) - 1; i >= 0; i-- {
- msg := historyMessages[i]
- ms := types.Message{Role: "user", Content: msg.Content}
- if msg.Type == types.ReplyMsg {
- ms.Role = "assistant"
- }
- chatCtx = append(chatCtx, ms)
+ _ = utils.JsonDecode(input.ChatRole.Context, &messages)
+ if h.App.SysConfig.ContextDeep > 0 {
+ var historyMessages []model.ChatMessage
+ dbSession := h.DB.Session(&gorm.Session{}).Where("chat_id", input.ChatId)
+ if input.LastMsgId > 0 { // 重新生成逻辑
+ dbSession = dbSession.Where("id < ?", input.LastMsgId)
+ // 删除对应的聊天记录
+ h.DB.Debug().Where("chat_id", input.ChatId).Where("id >= ?", input.LastMsgId).Delete(&model.ChatMessage{})
+ }
+ err = dbSession.Limit(h.App.SysConfig.ContextDeep).Order("id DESC").Find(&historyMessages).Error
+ if err == nil {
+ for i := len(historyMessages) - 1; i >= 0; i-- {
+ msg := historyMessages[i]
+ ms := types.Message{Role: "user", Content: msg.Content}
+ if msg.Type == types.ReplyMsg {
+ ms.Role = "assistant"
}
+ chatCtx = append(chatCtx, ms)
}
}
}
@@ -171,7 +253,7 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
v := messages[i]
tks, _ = utils.CalcTokens(utils.JsonEncode(v), req.Model)
// 上下文 token 超出了模型的最大上下文长度
- if tokens+tks >= session.Model.MaxContext {
+ if tokens+tks >= input.ChatModel.MaxContext {
break
}
@@ -183,78 +265,106 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session *types.ChatSessio
tokens += tks
chatCtx = append(chatCtx, v)
}
-
- logger.Debugf("聊天上下文:%+v", chatCtx)
}
- reqMgs := make([]interface{}, 0)
+ reqMgs := make([]any, 0)
for i := len(chatCtx) - 1; i >= 0; i-- {
reqMgs = append(reqMgs, chatCtx[i])
}
- fullPrompt := prompt
- text := prompt
- // extract files in prompt
- files := utils.ExtractFileURLs(prompt)
- logger.Debugf("detected FILES: %+v", files)
- // 如果不是逆向模型,则提取文件内容
- if len(files) > 0 && !(session.Model.Value == "gpt-4-all" ||
- strings.HasPrefix(session.Model.Value, "gpt-4-gizmo") ||
- strings.HasSuffix(session.Model.Value, "claude-3")) {
- contents := make([]string, 0)
- var file model.File
- for _, v := range files {
- h.DB.Where("url = ?", v).First(&file)
- content, err := utils.ReadFileContent(v, h.App.Config.TikaHost)
- if err != nil {
- logger.Error("error with read file: ", err)
- } else {
- contents = append(contents, fmt.Sprintf("%s 文件内容:%s", file.Name, content))
- }
- text = strings.Replace(text, v, "", 1)
- }
- if len(contents) > 0 {
- fullPrompt = fmt.Sprintf("请根据提供的文件内容信息回答问题(其中Excel 已转成 HTML):\n\n %s\n\n 问题:%s", strings.Join(contents, "\n"), text)
- }
-
- tokens, _ := utils.CalcTokens(fullPrompt, req.Model)
- if tokens > session.Model.MaxContext {
- return fmt.Errorf("文件的长度超出模型允许的最大上下文长度,请减少文件内容数量或文件大小。")
- }
- }
- logger.Debug("最终Prompt:", fullPrompt)
-
- // extract images from prompt
- imgURLs := utils.ExtractImgURLs(prompt)
- logger.Debugf("detected IMG: %+v", imgURLs)
- var content interface{}
- if len(imgURLs) > 0 {
- data := make([]interface{}, 0)
- for _, v := range imgURLs {
- text = strings.Replace(text, v, "", 1)
- data = append(data, gin.H{
+ fileContents := make([]string, 0) // 文件内容
+ var finalPrompt = input.Prompt
+ imgList := make([]any, 0)
+ for _, file := range input.Files {
+ logger.Debugf("detected file: %+v", file.URL)
+ // 处理图片
+ if isImageURL(file.URL) {
+ imgList = append(imgList, gin.H{
"type": "image_url",
"image_url": gin.H{
- "url": v,
+ "url": file.URL,
},
})
+ } else {
+ // 如果不是逆向模型,则提取文件内容
+ modelValue := input.ChatModel.Value
+ if !(strings.Contains(modelValue, "-all") || strings.HasPrefix(modelValue, "gpt-4-gizmo") || strings.HasPrefix(modelValue, "claude")) {
+ content, err := utils.ReadFileContent(file.URL, h.App.Config.TikaHost)
+ if err != nil {
+ logger.Error("error with read file: ", err)
+ continue
+ } else {
+ fileContents = append(fileContents, fmt.Sprintf("%s 文件内容:%s", file.Name, content))
+ }
+ }
}
- data = append(data, gin.H{
- "type": "text",
- "text": strings.TrimSpace(text),
- })
- content = data
- } else {
- content = fullPrompt
}
- req.Messages = append(reqMgs, map[string]interface{}{
- "role": "user",
- "content": content,
- })
- logger.Debugf("%+v", req.Messages)
+ if len(fileContents) > 0 {
+ finalPrompt = fmt.Sprintf("请根据提供的文件内容信息回答问题(其中Excel 已转成 HTML):\n\n %s\n\n 问题:%s", strings.Join(fileContents, "\n"), input.Prompt)
+ tokens, _ := utils.CalcTokens(finalPrompt, req.Model)
+ if tokens > input.ChatModel.MaxContext {
+ return fmt.Errorf("文件的长度超出模型允许的最大上下文长度,请减少文件内容数量或文件大小。")
+ }
+ } else {
+ finalPrompt = input.Prompt
+ }
- return h.sendOpenAiMessage(req, userVo, ctx, session, role, prompt, ws)
+ if len(imgList) > 0 {
+ imgList = append(imgList, map[string]interface{}{
+ "type": "text",
+ "text": input.Prompt,
+ })
+ req.Messages = append(reqMgs, map[string]interface{}{
+ "role": "user",
+ "content": imgList,
+ })
+ } else {
+ req.Messages = append(reqMgs, map[string]interface{}{
+ "role": "user",
+ "content": finalPrompt,
+ })
+ }
+
+ return h.sendOpenAiMessage(req, userVo, ctx, input, c)
+}
+
+// 判断一个 URL 是否图片链接
+func isImageURL(url string) bool {
+ // 检查是否是有效的URL
+ if !strings.HasPrefix(url, "http://") && !strings.HasPrefix(url, "https://") {
+ return false
+ }
+
+ // 检查文件扩展名
+ ext := strings.ToLower(path.Ext(url))
+ validImageExts := map[string]bool{
+ ".jpg": true,
+ ".jpeg": true,
+ ".png": true,
+ ".gif": true,
+ ".bmp": true,
+ ".webp": true,
+ ".svg": true,
+ ".ico": true,
+ }
+
+ if !validImageExts[ext] {
+ return false
+ }
+
+ // 发送HEAD请求检查Content-Type
+ client := &http.Client{
+ Timeout: 5 * time.Second,
+ }
+ resp, err := client.Head(url)
+ if err != nil {
+ return false
+ }
+ defer resp.Body.Close()
+
+ contentType := resp.Header.Get("Content-Type")
+ return strings.HasPrefix(contentType, "image/")
}
// Tokens 统计 token 数量
@@ -323,15 +433,14 @@ func (h *ChatHandler) StopGenerate(c *gin.Context) {
// 发送请求到 OpenAI 服务器
// useOwnApiKey: 是否使用了用户自己的 API KEY
-func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, session *types.ChatSession, apiKey *model.ApiKey) (*http.Response, error) {
+func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, input ChatInput, apiKey *model.ApiKey) (*http.Response, error) {
// if the chat model bind a KEY, use it directly
- if session.Model.KeyId > 0 {
- h.DB.Where("id", session.Model.KeyId).Find(apiKey)
- }
- // use the last unused key
- if apiKey.Id == 0 {
+ if input.ChatModel.KeyId > 0 {
+ h.DB.Where("id", input.ChatModel.KeyId).Find(apiKey)
+ } else { // use the last unused key
h.DB.Where("type", "chat").Where("enabled", true).Order("last_used_at ASC").First(apiKey)
}
+
if apiKey.Id == 0 {
return nil, errors.New("no available key, please import key")
}
@@ -381,16 +490,16 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
}
// 扣减用户算力
-func (h *ChatHandler) subUserPower(userVo vo.User, session *types.ChatSession, promptTokens int, replyTokens int) {
+func (h *ChatHandler) subUserPower(userVo vo.User, input ChatInput, promptTokens int, replyTokens int) {
power := 1
- if session.Model.Power > 0 {
- power = session.Model.Power
+ if input.ChatModel.Power > 0 {
+ power = input.ChatModel.Power
}
err := h.userService.DecreasePower(userVo.Id, power, model.PowerLog{
Type: types.PowerConsume,
- Model: session.Model.Value,
- Remark: fmt.Sprintf("模型名称:%s, 提问长度:%d,回复长度:%d", session.Model.Name, promptTokens, replyTokens),
+ Model: input.ChatModel.Value,
+ Remark: fmt.Sprintf("模型名称:%s, 提问长度:%d,回复长度:%d", input.ChatModel.Name, promptTokens, replyTokens),
})
if err != nil {
logger.Error(err)
@@ -401,19 +510,11 @@ func (h *ChatHandler) saveChatHistory(
req types.ApiRequest,
usage Usage,
message types.Message,
- session *types.ChatSession,
- role model.ChatRole,
+ input ChatInput,
userVo vo.User,
promptCreatedAt time.Time,
replyCreatedAt time.Time) {
- // 更新上下文消息
- if h.App.SysConfig.EnableContext {
- chatCtx := req.Messages // 提问消息
- chatCtx = append(chatCtx, message) // 回复消息
- h.ChatContexts.Put(session.ChatId, chatCtx)
- }
-
// 追加聊天记录
// for prompt
var promptTokens, replyTokens, totalTokens int
@@ -424,12 +525,15 @@ func (h *ChatHandler) saveChatHistory(
}
historyUserMsg := model.ChatMessage{
- UserId: userVo.Id,
- ChatId: session.ChatId,
- RoleId: role.Id,
- Type: types.PromptMsg,
- Icon: userVo.Avatar,
- Content: template.HTMLEscapeString(usage.Prompt),
+ UserId: userVo.Id,
+ ChatId: input.ChatId,
+ RoleId: input.RoleId,
+ Type: types.PromptMsg,
+ Icon: userVo.Avatar,
+ Content: utils.JsonEncode(vo.MsgContent{
+ Text: usage.Prompt,
+ Files: input.Files,
+ }),
Tokens: promptTokens,
TotalTokens: promptTokens,
UseContext: true,
@@ -452,12 +556,15 @@ func (h *ChatHandler) saveChatHistory(
totalTokens = replyTokens + getTotalTokens(req)
}
historyReplyMsg := model.ChatMessage{
- UserId: userVo.Id,
- ChatId: session.ChatId,
- RoleId: role.Id,
- Type: types.ReplyMsg,
- Icon: role.Icon,
- Content: usage.Content,
+ UserId: userVo.Id,
+ ChatId: input.ChatId,
+ RoleId: input.RoleId,
+ Type: types.ReplyMsg,
+ Icon: input.ChatRole.Icon,
+ Content: utils.JsonEncode(vo.MsgContent{
+ Text: message.Content,
+ Files: input.Files,
+ }),
Tokens: replyTokens,
TotalTokens: totalTokens,
UseContext: true,
@@ -471,17 +578,17 @@ func (h *ChatHandler) saveChatHistory(
}
// 更新用户算力
- if session.Model.Power > 0 {
- h.subUserPower(userVo, session, promptTokens, replyTokens)
+ if input.ChatModel.Power > 0 {
+ h.subUserPower(userVo, input, promptTokens, replyTokens)
}
// 保存当前会话
var chatItem model.ChatItem
- err = h.DB.Where("chat_id = ?", session.ChatId).First(&chatItem).Error
+ err = h.DB.Where("chat_id = ?", input.ChatId).First(&chatItem).Error
if err != nil {
- chatItem.ChatId = session.ChatId
+ chatItem.ChatId = input.ChatId
chatItem.UserId = userVo.Id
- chatItem.RoleId = role.Id
- chatItem.ModelId = session.Model.Id
+ chatItem.RoleId = input.RoleId
+ chatItem.ModelId = input.ModelId
if utf8.RuneCountInString(usage.Prompt) > 30 {
chatItem.Title = string([]rune(usage.Prompt)[:30]) + "..."
} else {
@@ -495,7 +602,7 @@ func (h *ChatHandler) saveChatHistory(
}
}
-// 文本生成语音
+// TextToSpeech 文本生成语音
func (h *ChatHandler) TextToSpeech(c *gin.Context) {
var data struct {
ModelId int `json:"model_id"`
@@ -509,13 +616,19 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) {
textHash := utils.Sha256(fmt.Sprintf("%d/%s", data.ModelId, data.Text))
audioFile := fmt.Sprintf("%s/audio", h.App.Config.StaticDir)
if _, err := os.Stat(audioFile); err != nil {
- os.MkdirAll(audioFile, 0755)
+ resp.ERROR(c, err.Error())
+ return
+ }
+
+ if err := os.MkdirAll(audioFile, 0755); err != nil {
+ resp.ERROR(c, err.Error())
+ return
}
audioFile = fmt.Sprintf("%s/%s.mp3", audioFile, textHash)
if _, err := os.Stat(audioFile); err == nil {
// 设置响应头
- c.Header("Content-Type", "audio/mpeg")
- c.Header("Content-Disposition", "attachment; filename=speech.mp3")
+ c.Header("Prompt-Type", "audio/mpeg")
+ c.Header("Prompt-Disposition", "attachment; filename=speech.mp3")
c.File(audioFile)
return
}
@@ -579,9 +692,230 @@ func (h *ChatHandler) TextToSpeech(c *gin.Context) {
}
// 设置响应头
- c.Header("Content-Type", "audio/mpeg")
- c.Header("Content-Disposition", "attachment; filename=speech.mp3")
+ c.Header("Prompt-Type", "audio/mpeg")
+ c.Header("Prompt-Disposition", "attachment; filename=speech.mp3")
// 直接写入完整的音频数据到响应
- c.Writer.Write(audioBytes)
+ _, err = c.Writer.Write(audioBytes)
+ if err != nil {
+ logger.Error("写入音频数据到响应失败:", err)
+ }
}
+
+// // OPenAI 消息发送实现
+// func (h *ChatHandler) sendOpenAiMessage(
+// req types.ApiRequest,
+// userVo vo.User,
+// ctx context.Context,
+// session *types.ChatSession,
+// role model.ChatRole,
+// prompt string,
+// c *gin.Context) error {
+// promptCreatedAt := time.Now() // 记录提问时间
+// start := time.Now()
+// var apiKey = model.ApiKey{}
+// response, err := h.doRequest(ctx, req, session, &apiKey)
+// logger.Info("HTTP请求完成,耗时:", time.Since(start))
+// if err != nil {
+// if strings.Contains(err.Error(), "context canceled") {
+// return fmt.Errorf("用户取消了请求:%s", prompt)
+// } else if strings.Contains(err.Error(), "no available key") {
+// return errors.New("抱歉😔😔😔,系统已经没有可用的 API KEY,请联系管理员!")
+// }
+// return err
+// } else {
+// defer response.Body.Close()
+// }
+
+// if response.StatusCode != 200 {
+// body, _ := io.ReadAll(response.Body)
+// return fmt.Errorf("请求 OpenAI API 失败:%d, %v", response.StatusCode, string(body))
+// }
+
+// contentType := response.Header.Get("Prompt-Type")
+// if strings.Contains(contentType, "text/event-stream") {
+// replyCreatedAt := time.Now() // 记录回复时间
+// // 循环读取 Chunk 消息
+// var message = types.Message{Role: "assistant"}
+// var contents = make([]string, 0)
+// var function model.Function
+// var toolCall = false
+// var arguments = make([]string, 0)
+// var reasoning = false
+
+// pushMessage(c, ChatEventStart, "开始响应")
+// scanner := bufio.NewScanner(response.Body)
+// for scanner.Scan() {
+// line := scanner.Text()
+// if !strings.Contains(line, "data:") || len(line) < 30 {
+// continue
+// }
+// var responseBody = types.ApiResponse{}
+// err = json.Unmarshal([]byte(line[6:]), &responseBody)
+// if err != nil { // 数据解析出错
+// return errors.New(line)
+// }
+// if len(responseBody.Choices) == 0 { // Fixed: 兼容 Azure API 第一个输出空行
+// continue
+// }
+// if responseBody.Choices[0].Delta.Prompt == nil &&
+// responseBody.Choices[0].Delta.ToolCalls == nil &&
+// responseBody.Choices[0].Delta.ReasoningContent == "" {
+// continue
+// }
+
+// if responseBody.Choices[0].FinishReason == "stop" && len(contents) == 0 {
+// pushMessage(c, ChatEventError, "抱歉😔😔😔,AI助手由于未知原因已经停止输出内容。")
+// break
+// }
+
+// var tool types.ToolCall
+// if len(responseBody.Choices[0].Delta.ToolCalls) > 0 {
+// tool = responseBody.Choices[0].Delta.ToolCalls[0]
+// if toolCall && tool.Function.Name == "" {
+// arguments = append(arguments, tool.Function.Arguments)
+// continue
+// }
+// }
+
+// // 兼容 Function Call
+// fun := responseBody.Choices[0].Delta.FunctionCall
+// if fun.Name != "" {
+// tool = *new(types.ToolCall)
+// tool.Function.Name = fun.Name
+// } else if toolCall {
+// arguments = append(arguments, fun.Arguments)
+// continue
+// }
+
+// if !utils.IsEmptyValue(tool) {
+// res := h.DB.Where("name = ?", tool.Function.Name).First(&function)
+// if res.Error == nil {
+// toolCall = true
+// callMsg := fmt.Sprintf("正在调用工具 `%s` 作答 ...\n\n", function.Label)
+// pushMessage(c, ChatEventMessageDelta, map[string]interface{}{
+// "type": "text",
+// "content": callMsg,
+// })
+// contents = append(contents, callMsg)
+// }
+// continue
+// }
+
+// if responseBody.Choices[0].FinishReason == "tool_calls" ||
+// responseBody.Choices[0].FinishReason == "function_call" { // 函数调用完毕
+// break
+// }
+
+// // output stopped
+// if responseBody.Choices[0].FinishReason != "" {
+// break // 输出完成或者输出中断了
+// } else { // 正常输出结果
+// // 兼容思考过程
+// if responseBody.Choices[0].Delta.ReasoningContent != "" {
+// reasoningContent := responseBody.Choices[0].Delta.ReasoningContent
+// if !reasoning {
+// reasoningContent = fmt.Sprintf("%s", reasoningContent)
+// reasoning = true
+// }
+
+// pushMessage(c, ChatEventMessageDelta, map[string]interface{}{
+// "type": "text",
+// "content": reasoningContent,
+// })
+// contents = append(contents, reasoningContent)
+// } else if responseBody.Choices[0].Delta.Prompt != "" {
+// finalContent := responseBody.Choices[0].Delta.Prompt
+// if reasoning {
+// finalContent = fmt.Sprintf("%s", responseBody.Choices[0].Delta.Prompt)
+// reasoning = false
+// }
+// contents = append(contents, utils.InterfaceToString(finalContent))
+// pushMessage(c, ChatEventMessageDelta, map[string]interface{}{
+// "type": "text",
+// "content": finalContent,
+// })
+// }
+// }
+// } // end for
+
+// if err := scanner.Err(); err != nil {
+// if strings.Contains(err.Error(), "context canceled") {
+// logger.Info("用户取消了请求:", prompt)
+// } else {
+// logger.Error("信息读取出错:", err)
+// }
+// }
+
+// if toolCall { // 调用函数完成任务
+// params := make(map[string]any)
+// _ = utils.JsonDecode(strings.Join(arguments, ""), ¶ms)
+// logger.Debugf("函数名称: %s, 函数参数:%s", function.Name, params)
+// params["user_id"] = userVo.Id
+// var apiRes types.BizVo
+// r, err := req2.C().R().SetHeader("Body-Type", "application/json").
+// SetHeader("Authorization", function.Token).
+// SetBody(params).Post(function.Action)
+// errMsg := ""
+// if err != nil {
+// errMsg = err.Error()
+// } else {
+// all, _ := io.ReadAll(r.Body)
+// err = json.Unmarshal(all, &apiRes)
+// if err != nil {
+// errMsg = err.Error()
+// } else if apiRes.Code != types.Success {
+// errMsg = apiRes.Message
+// }
+// }
+
+// if errMsg != "" {
+// errMsg = "调用函数工具出错:" + errMsg
+// contents = append(contents, errMsg)
+// } else {
+// errMsg = utils.InterfaceToString(apiRes.Data)
+// contents = append(contents, errMsg)
+// }
+// pushMessage(c, ChatEventMessageDelta, map[string]interface{}{
+// "type": "text",
+// "content": errMsg,
+// })
+// }
+
+// // 消息发送成功
+// if len(contents) > 0 {
+// usage := Usage{
+// Prompt: prompt,
+// Prompt: strings.Join(contents, ""),
+// PromptTokens: 0,
+// CompletionTokens: 0,
+// TotalTokens: 0,
+// }
+// message.Prompt = usage.Prompt
+// h.saveChatHistory(req, usage, message, session, role, userVo, promptCreatedAt, replyCreatedAt)
+// }
+// } else {
+// var respVo OpenAIResVo
+// body, err := io.ReadAll(response.Body)
+// if err != nil {
+// return fmt.Errorf("读取响应失败:%v", body)
+// }
+// err = json.Unmarshal(body, &respVo)
+// if err != nil {
+// return fmt.Errorf("解析响应失败:%v", body)
+// }
+// content := respVo.Choices[0].Message.Prompt
+// if strings.HasPrefix(req.Model, "o1-") {
+// content = fmt.Sprintf("AI思考结束,耗时:%d 秒。\n%s", time.Now().Unix()-session.Start, respVo.Choices[0].Message.Prompt)
+// }
+// pushMessage(c, ChatEventMessageDelta, map[string]interface{}{
+// "type": "text",
+// "content": content,
+// })
+// respVo.Usage.Prompt = prompt
+// respVo.Usage.Prompt = content
+// h.saveChatHistory(req, respVo.Usage, respVo.Choices[0].Message, session, role, userVo, promptCreatedAt, time.Now())
+// }
+
+// return nil
+// }
diff --git a/api/handler/chat_item_handler.go b/api/handler/chat_item_handler.go
index f08be3fe..53d95b64 100644
--- a/api/handler/chat_item_handler.go
+++ b/api/handler/chat_item_handler.go
@@ -104,8 +104,6 @@ func (h *ChatHandler) Clear(c *gin.Context) {
var chatIds = make([]string, 0)
for _, chat := range chats {
chatIds = append(chatIds, chat.ChatId)
- // 清空会话上下文
- h.ChatContexts.Delete(chat.ChatId)
}
err = h.DB.Transaction(func(tx *gorm.DB) error {
res := h.DB.Where("user_id =?", user.Id).Delete(&model.ChatItem{})
@@ -133,20 +131,28 @@ func (h *ChatHandler) Clear(c *gin.Context) {
func (h *ChatHandler) History(c *gin.Context) {
chatId := c.Query("chat_id") // 会话 ID
var items []model.ChatMessage
- var messages = make([]vo.HistoryMessage, 0)
+ var messages = make([]vo.ChatMessage, 0)
res := h.DB.Where("chat_id = ?", chatId).Find(&items)
if res.Error != nil {
resp.ERROR(c, "No history message")
return
} else {
for _, item := range items {
- var v vo.HistoryMessage
+ var v vo.ChatMessage
err := utils.CopyObject(item, &v)
+ if err != nil {
+ continue
+ }
+ // 解析内容
+ var content vo.MsgContent
+ err = utils.JsonDecode(item.Content, &content)
+ if err != nil {
+ content.Text = item.Content
+ }
+ v.Content = content
v.CreatedAt = item.CreatedAt.Unix()
v.UpdatedAt = item.UpdatedAt.Unix()
- if err == nil {
- messages = append(messages, v)
- }
+ messages = append(messages, v)
}
}
@@ -179,10 +185,6 @@ func (h *ChatHandler) Remove(c *gin.Context) {
return
}
- // TODO: 是否要删除 MidJourney 绘画记录和图片文件?
-
- // 清空会话上下文
- h.ChatContexts.Delete(chatId)
resp.SUCCESS(c, types.OkMsg)
}
diff --git a/api/handler/chat_openai_handler.go b/api/handler/chat_openai_handler.go
index fea1a1e3..6a71c405 100644
--- a/api/handler/chat_openai_handler.go
+++ b/api/handler/chat_openai_handler.go
@@ -21,6 +21,7 @@ import (
"strings"
"time"
+ "github.com/gin-gonic/gin"
req2 "github.com/imroc/req/v3"
)
@@ -55,18 +56,16 @@ func (h *ChatHandler) sendOpenAiMessage(
req types.ApiRequest,
userVo vo.User,
ctx context.Context,
- session *types.ChatSession,
- role model.ChatRole,
- prompt string,
- ws *types.WsClient) error {
+ input ChatInput,
+ c *gin.Context) error {
promptCreatedAt := time.Now() // 记录提问时间
start := time.Now()
var apiKey = model.ApiKey{}
- response, err := h.doRequest(ctx, req, session, &apiKey)
+ response, err := h.doRequest(ctx, req, input, &apiKey)
logger.Info("HTTP请求完成,耗时:", time.Since(start))
if err != nil {
if strings.Contains(err.Error(), "context canceled") {
- return fmt.Errorf("用户取消了请求:%s", prompt)
+ return fmt.Errorf("用户取消了请求:%s", input.Prompt)
} else if strings.Contains(err.Error(), "no available key") {
return errors.New("抱歉😔😔😔,系统已经没有可用的 API KEY,请联系管理员!")
}
@@ -112,7 +111,7 @@ func (h *ChatHandler) sendOpenAiMessage(
}
if responseBody.Choices[0].FinishReason == "stop" && len(contents) == 0 {
- utils.SendChunkMsg(ws, "抱歉😔😔😔,AI助手由于未知原因已经停止输出内容。")
+ pushMessage(c, "text", "抱歉😔😔😔,AI助手由于未知原因已经停止输出内容。")
break
}
@@ -140,7 +139,7 @@ func (h *ChatHandler) sendOpenAiMessage(
if res.Error == nil {
toolCall = true
callMsg := fmt.Sprintf("正在调用工具 `%s` 作答 ...\n\n", function.Label)
- utils.SendChunkMsg(ws, callMsg)
+ pushMessage(c, "text", callMsg)
contents = append(contents, callMsg)
}
continue
@@ -163,7 +162,7 @@ func (h *ChatHandler) sendOpenAiMessage(
reasoning = true
}
- utils.SendChunkMsg(ws, reasoningContent)
+ pushMessage(c, "text", reasoningContent)
contents = append(contents, reasoningContent)
} else if responseBody.Choices[0].Delta.Content != "" {
finalContent := responseBody.Choices[0].Delta.Content
@@ -172,14 +171,14 @@ func (h *ChatHandler) sendOpenAiMessage(
reasoning = false
}
contents = append(contents, utils.InterfaceToString(finalContent))
- utils.SendChunkMsg(ws, finalContent)
+ pushMessage(c, "text", finalContent)
}
}
} // end for
if err := scanner.Err(); err != nil {
if strings.Contains(err.Error(), "context canceled") {
- logger.Info("用户取消了请求:", prompt)
+ logger.Info("用户取消了请求:", input.Prompt)
} else {
logger.Error("信息读取出错:", err)
}
@@ -214,20 +213,20 @@ func (h *ChatHandler) sendOpenAiMessage(
errMsg = utils.InterfaceToString(apiRes.Data)
contents = append(contents, errMsg)
}
- utils.SendChunkMsg(ws, errMsg)
+ pushMessage(c, "text", errMsg)
}
// 消息发送成功
if len(contents) > 0 {
usage := Usage{
- Prompt: prompt,
+ Prompt: input.Prompt,
Content: strings.Join(contents, ""),
PromptTokens: 0,
CompletionTokens: 0,
TotalTokens: 0,
}
message.Content = usage.Content
- h.saveChatHistory(req, usage, message, session, role, userVo, promptCreatedAt, replyCreatedAt)
+ h.saveChatHistory(req, usage, message, input, userVo, promptCreatedAt, replyCreatedAt)
}
} else { // 非流式输出
var respVo OpenAIResVo
@@ -240,13 +239,10 @@ func (h *ChatHandler) sendOpenAiMessage(
return fmt.Errorf("解析响应失败:%v", body)
}
content := respVo.Choices[0].Message.Content
- if strings.HasPrefix(req.Model, "o1-") {
- content = fmt.Sprintf("AI思考结束,耗时:%d 秒。\n%s", time.Now().Unix()-session.Start, respVo.Choices[0].Message.Content)
- }
- utils.SendChunkMsg(ws, content)
- respVo.Usage.Prompt = prompt
+ pushMessage(c, "text", content)
+ respVo.Usage.Prompt = input.Prompt
respVo.Usage.Content = content
- h.saveChatHistory(req, respVo.Usage, respVo.Choices[0].Message, session, role, userVo, promptCreatedAt, time.Now())
+ h.saveChatHistory(req, respVo.Usage, respVo.Choices[0].Message, input, userVo, promptCreatedAt, time.Now())
}
return nil
diff --git a/api/handler/user_handler.go b/api/handler/user_handler.go
index dee60047..0e159759 100644
--- a/api/handler/user_handler.go
+++ b/api/handler/user_handler.go
@@ -137,13 +137,15 @@ func (h *UserHandler) Register(c *gin.Context) {
salt := utils.RandString(8)
user := model.User{
- Username: data.Username,
- Password: utils.GenPassword(data.Password, salt),
- Avatar: "/images/avatar/user.png",
- Salt: salt,
- Status: true,
- ChatRoles: utils.JsonEncode([]string{"gpt"}), // 默认只订阅通用助手角色
- Power: h.App.SysConfig.InitPower,
+ Username: data.Username,
+ Password: utils.GenPassword(data.Password, salt),
+ Avatar: "/images/avatar/user.png",
+ Salt: salt,
+ Status: true,
+ ChatRoles: utils.JsonEncode([]string{"gpt"}), // 默认只订阅通用助手角色
+ ChatConfig: "{}",
+ ChatModels: "{}",
+ Power: h.App.SysConfig.InitPower,
}
// check if the username is existing
@@ -170,10 +172,15 @@ func (h *UserHandler) Register(c *gin.Context) {
if data.InviteCode != "" {
user.Power += h.App.SysConfig.InvitePower
}
+
if h.licenseService.GetLicense().Configs.DeCopy {
user.Nickname = fmt.Sprintf("用户@%d", utils.RandomNumber(6))
} else {
- user.Nickname = fmt.Sprintf("极客学长@%d", utils.RandomNumber(6))
+ defaultNickname := h.App.SysConfig.DefaultNickname
+ if defaultNickname == "" {
+ defaultNickname = "极客学长"
+ }
+ user.Nickname = fmt.Sprintf("%s@%d", defaultNickname, utils.RandomNumber(6))
}
tx := h.DB.Begin()
diff --git a/api/handler/ws_handler.go b/api/handler/ws_handler.go
deleted file mode 100644
index 77e9636e..00000000
--- a/api/handler/ws_handler.go
+++ /dev/null
@@ -1,152 +0,0 @@
-package handler
-
-// * +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-// * Copyright 2023 The Geek-AI Authors. All rights reserved.
-// * Use of this source code is governed by a Apache-2.0 license
-// * that can be found in the LICENSE file.
-// * @Author yangjian102621@163.com
-// * +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-
-import (
- "context"
- "geekai/core"
- "geekai/core/types"
- "geekai/service"
- "geekai/store/model"
- "geekai/utils"
- "net/http"
- "strings"
-
- "github.com/gin-gonic/gin"
- "github.com/gorilla/websocket"
- "gorm.io/gorm"
-)
-
-// Websocket 连接处理 handler
-
-type WebsocketHandler struct {
- BaseHandler
- wsService *service.WebsocketService
- chatHandler *ChatHandler
-}
-
-func NewWebsocketHandler(app *core.AppServer, s *service.WebsocketService, db *gorm.DB, chatHandler *ChatHandler) *WebsocketHandler {
- return &WebsocketHandler{
- BaseHandler: BaseHandler{App: app, DB: db},
- chatHandler: chatHandler,
- wsService: s,
- }
-}
-
-func (h *WebsocketHandler) Client(c *gin.Context) {
- clientProtocols := c.GetHeader("Sec-WebSocket-Protocol")
- ws, err := (&websocket.Upgrader{
- CheckOrigin: func(r *http.Request) bool { return true },
- Subprotocols: strings.Split(clientProtocols, ","),
- }).Upgrade(c.Writer, c.Request, nil)
- if err != nil {
- logger.Error(err)
- c.Abort()
- return
- }
-
- clientId := c.Query("client_id")
- client := types.NewWsClient(ws, clientId)
- userId := h.GetLoginUserId(c)
- if userId == 0 {
- _ = client.Send([]byte("Invalid user_id"))
- c.Abort()
- return
- }
- var user model.User
- if err := h.DB.Where("id", userId).First(&user).Error; err != nil {
- _ = client.Send([]byte("Invalid user_id"))
- c.Abort()
- return
- }
-
- h.wsService.Clients.Put(clientId, client)
- logger.Infof("New websocket connected, IP: %s", c.RemoteIP())
- go func() {
- for {
- _, msg, err := client.Receive()
- if err != nil {
- logger.Debugf("close connection: %s", client.Conn.RemoteAddr())
- client.Close()
- h.wsService.Clients.Delete(clientId)
- break
- }
-
- var message types.InputMessage
- err = utils.JsonDecode(string(msg), &message)
- if err != nil {
- continue
- }
-
- logger.Debugf("Receive a message:%+v", message)
- if message.Type == types.MsgTypePing {
- utils.SendChannelMsg(client, types.ChPing, "pong")
- continue
- }
-
- // 当前只处理聊天消息,其他消息全部丢弃
- var chatMessage types.ChatMessage
- err = utils.JsonDecode(utils.JsonEncode(message.Body), &chatMessage)
- if err != nil || message.Channel != types.ChChat {
- logger.Warnf("invalid message body:%+v", message.Body)
- continue
- }
- var chatRole model.ChatRole
- err = h.DB.First(&chatRole, chatMessage.RoleId).Error
- if err != nil || !chatRole.Enable {
- utils.SendAndFlush(client, "当前聊天角色不存在或者未启用,请更换角色之后再发起对话!!!")
- continue
- }
- // if the role bind a model_id, use role's bind model_id
- if chatRole.ModelId > 0 {
- chatMessage.RoleId = int(chatRole.ModelId)
- }
- // get model info
- var chatModel model.ChatModel
- err = h.DB.Where("id", chatMessage.ModelId).First(&chatModel).Error
- if err != nil || chatModel.Enabled == false {
- utils.SendAndFlush(client, "当前AI模型暂未启用,请更换模型后再发起对话!!!")
- continue
- }
-
- session := &types.ChatSession{
- ClientIP: c.ClientIP(),
- UserId: userId,
- }
-
- // use old chat data override the chat model and role ID
- var chat model.ChatItem
- h.DB.Where("chat_id", chatMessage.ChatId).First(&chat)
- if chat.Id > 0 {
- chatModel.Id = chat.ModelId
- chatMessage.RoleId = int(chat.RoleId)
- }
-
- session.ChatId = chatMessage.ChatId
- session.Tools = chatMessage.Tools
- session.Stream = chatMessage.Stream
- // 复制模型数据
- err = utils.CopyObject(chatModel, &session.Model)
- if err != nil {
- logger.Error(err, chatModel)
- }
- session.Model.Id = chatModel.Id
- ctx, cancel := context.WithCancel(context.Background())
- h.chatHandler.ReqCancelFunc.Put(clientId, cancel)
- err = h.chatHandler.sendMessage(ctx, session, chatRole, chatMessage.Content, client)
- if err != nil {
- logger.Error(err)
- utils.SendAndFlush(client, err.Error())
- } else {
- utils.SendMsg(client, types.ReplyMessage{Channel: types.ChChat, Type: types.MsgTypeEnd})
- logger.Infof("回答完毕: %v", message.Body)
- }
-
- }
- }()
-}
diff --git a/api/main.go b/api/main.go
index 15dba11d..eebab9e6 100644
--- a/api/main.go
+++ b/api/main.go
@@ -248,6 +248,7 @@ func main() {
}),
fx.Invoke(func(s *core.AppServer, h *handler.ChatHandler) {
group := s.Engine.Group("/api/chat/")
+ group.Any("message", h.Chat)
group.GET("list", h.List)
group.GET("detail", h.Detail)
group.POST("update", h.Update)
@@ -519,11 +520,6 @@ func main() {
group := s.Engine.Group("/api/test")
group.Any("sse", h.PostTest, h.SseTest)
}),
- fx.Provide(service.NewWebsocketService),
- fx.Provide(handler.NewWebsocketHandler),
- fx.Invoke(func(s *core.AppServer, h *handler.WebsocketHandler) {
- s.Engine.Any("/api/ws", h.Client)
- }),
fx.Provide(handler.NewPromptHandler),
fx.Invoke(func(s *core.AppServer, h *handler.PromptHandler) {
group := s.Engine.Group("/api/prompt")
diff --git a/api/service/crawler/service.go b/api/service/crawler/service.go
index 2899a8e2..39fb2fa7 100644
--- a/api/service/crawler/service.go
+++ b/api/service/crawler/service.go
@@ -24,15 +24,15 @@ func NewService() (*Service, error) {
// 启动浏览器
path, _ := launcher.LookPath()
u := launcher.New().Bin(path).
- Headless(true). // 无头模式
- Set("disable-web-security", ""). // 禁用网络安全限制
- Set("disable-gpu", ""). // 禁用 GPU 加速
- Set("no-sandbox", ""). // 禁用沙箱模式
- Set("disable-setuid-sandbox", "").// 禁用 setuid 沙箱
+ Headless(true). // 无头模式
+ Set("disable-web-security", ""). // 禁用网络安全限制
+ Set("disable-gpu", ""). // 禁用 GPU 加速
+ Set("no-sandbox", ""). // 禁用沙箱模式
+ Set("disable-setuid-sandbox", ""). // 禁用 setuid 沙箱
MustLaunch()
browser := rod.New().ControlURL(u).MustConnect()
-
+
return &Service{
browser: browser,
}, nil
@@ -50,7 +50,7 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error
if keyword == "" {
return nil, errors.New("搜索关键词不能为空")
}
-
+
if maxPages <= 0 {
maxPages = 1
}
@@ -59,18 +59,18 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error
}
results := make([]SearchResult, 0)
-
+
// 使用百度搜索
searchURL := fmt.Sprintf("https://www.baidu.com/s?wd=%s", url.QueryEscape(keyword))
-
+
// 设置页面超时
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
-
+
// 创建页面
page := s.browser.MustPage()
defer page.MustClose()
-
+
// 设置视口大小
err := page.SetViewport(&proto.EmulationSetDeviceMetricsOverride{
Width: 1280,
@@ -79,19 +79,19 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error
if err != nil {
return nil, fmt.Errorf("设置视口失败: %v", err)
}
-
+
// 导航到搜索页面
err = page.Context(ctx).Navigate(searchURL)
if err != nil {
return nil, fmt.Errorf("导航到搜索页面失败: %v", err)
}
-
+
// 等待搜索结果加载完成
err = page.WaitLoad()
if err != nil {
return nil, fmt.Errorf("等待页面加载完成失败: %v", err)
}
-
+
// 分析当前页面的搜索结果
for i := 0; i < maxPages; i++ {
if i > 0 {
@@ -100,52 +100,52 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error
if err != nil || nextPage == nil {
break // 没有下一页
}
-
+
err = nextPage.Click(proto.InputMouseButtonLeft, 1)
if err != nil {
break // 点击下一页失败
}
-
+
// 等待新页面加载
err = page.WaitLoad()
if err != nil {
break
}
}
-
+
// 提取搜索结果
resultElements, err := page.Elements(".result, .c-container")
if err != nil || resultElements == nil {
continue
}
-
+
for _, result := range resultElements {
// 获取标题
titleElement, err := result.Element("h3, .t")
if err != nil || titleElement == nil {
continue
}
-
+
title, err := titleElement.Text()
if err != nil {
continue
}
-
+
// 获取 URL
linkElement, err := titleElement.Element("a")
if err != nil || linkElement == nil {
continue
}
-
+
href, err := linkElement.Attribute("href")
if err != nil || href == nil {
continue
}
-
+
// 获取内容摘要 - 尝试多个可能的选择器
var contentElement *rod.Element
var content string
-
+
// 尝试多个可能的选择器来适应不同版本的百度搜索结果
selectors := []string{".content-right_8Zs40", ".c-abstract", ".content_LJ0WN", ".content"}
for _, selector := range selectors {
@@ -157,7 +157,7 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error
}
}
}
-
+
// 如果所有选择器都失败,尝试直接从结果块中提取文本
if content == "" {
// 获取结果元素的所有文本
@@ -173,21 +173,21 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error
}
}
}
-
+
// 添加到结果集
results = append(results, SearchResult{
Title: title,
URL: *href,
Content: content,
})
-
+
// 限制结果数量,每页最多 10 条
if len(results) >= 10*maxPages {
break
}
}
}
-
+
// 获取真实 URL(百度搜索结果中的 URL 是短链接,需要跳转获取真实 URL)
for i, result := range results {
realURL, err := s.getRedirectURL(result.URL)
@@ -195,7 +195,7 @@ func (s *Service) WebSearch(keyword string, maxPages int) ([]SearchResult, error
results[i].URL = realURL
}
}
-
+
return results, nil
}
@@ -209,22 +209,22 @@ func (s *Service) getRedirectURL(shortURL string) (string, error) {
defer func() {
_ = page.Close()
}()
-
+
// 导航到短链接
err = page.Navigate(shortURL)
if err != nil {
return shortURL, err // 返回原始URL
}
-
+
// 等待重定向完成
time.Sleep(2 * time.Second)
-
+
// 获取当前 URL
info, err := page.Info()
if err != nil {
return shortURL, err // 返回原始URL
}
-
+
return info.URL, nil
}
@@ -247,21 +247,21 @@ func SearchWeb(keyword string, maxPages int) (string, error) {
log.Errorf("爬虫服务崩溃: %v", r)
}
}()
-
+
service, err := NewService()
if err != nil {
return "", fmt.Errorf("创建爬虫服务失败: %v", err)
}
defer service.Close()
-
+
// 设置超时上下文
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
defer cancel()
-
+
// 使用goroutine和通道来处理超时
resultChan := make(chan []SearchResult, 1)
errChan := make(chan error, 1)
-
+
go func() {
results, err := service.WebSearch(keyword, maxPages)
if err != nil {
@@ -270,7 +270,7 @@ func SearchWeb(keyword string, maxPages int) (string, error) {
}
resultChan <- results
}()
-
+
// 等待结果或超时
select {
case <-ctx.Done():
@@ -281,32 +281,32 @@ func SearchWeb(keyword string, maxPages int) (string, error) {
if len(results) == 0 {
return "未找到关于 \"" + keyword + "\" 的相关搜索结果", nil
}
-
+
// 格式化结果
var builder strings.Builder
builder.WriteString(fmt.Sprintf("为您找到关于 \"%s\" 的 %d 条搜索结果:\n\n", keyword, len(results)))
-
+
for i, result := range results {
// // 尝试打开链接获取实际内容
// page := service.browser.MustPage()
// defer page.MustClose()
-
+
// // 设置页面超时
// pageCtx, pageCancel := context.WithTimeout(context.Background(), 10*time.Second)
// defer pageCancel()
-
+
// // 导航到目标页面
// err := page.Context(pageCtx).Navigate(result.URL)
// if err == nil {
// // 等待页面加载
// _ = page.WaitLoad()
-
+
// // 获取页面标题
// title, err := page.Eval("() => document.title")
// if err == nil && title.Value.String() != "" {
// result.Title = title.Value.String()
// }
-
+
// // 获取页面主要内容
// if content, err := page.Element("body"); err == nil {
// if text, err := content.Text(); err == nil {
@@ -315,11 +315,11 @@ func SearchWeb(keyword string, maxPages int) (string, error) {
// if len(text) > 200 {
// text = text[:200] + "..."
// }
- // result.Content = text
+ // result.Prompt = text
// }
// }
// }
-
+
builder.WriteString(fmt.Sprintf("%d. **%s**\n", i+1, result.Title))
builder.WriteString(fmt.Sprintf(" 链接: %s\n", result.URL))
if result.Content != "" {
@@ -327,7 +327,7 @@ func SearchWeb(keyword string, maxPages int) (string, error) {
}
builder.WriteString("\n")
}
-
+
return builder.String(), nil
}
-}
\ No newline at end of file
+}
diff --git a/api/service/dalle/service.go b/api/service/dalle/service.go
index a38cde36..29cf4491 100644
--- a/api/service/dalle/service.go
+++ b/api/service/dalle/service.go
@@ -16,7 +16,6 @@ import (
"geekai/store"
"geekai/store/model"
"geekai/utils"
- "io"
"time"
"github.com/go-redis/redis/v8"
@@ -182,9 +181,6 @@ func (s *Service) Image(task types.DallTask, sync bool) (string, error) {
return "", fmt.Errorf("error with send request, status: %s, %+v", r.Status, errRes.Error)
}
- all, _ := io.ReadAll(r.Body)
- logger.Debugf("response: %+v", string(all))
-
// update the api key last use time
s.db.Model(&apiKey).UpdateColumn("last_used_at", time.Now().Unix())
var imgURL string
diff --git a/api/service/suno/service.go b/api/service/suno/service.go
index 14e36e88..59e2aecc 100644
--- a/api/service/suno/service.go
+++ b/api/service/suno/service.go
@@ -94,6 +94,8 @@ func (s *Service) Run() {
continue
}
+ logger.Infof("任务提交成功: %+v", r)
+
// 更新任务信息
s.db.Model(&model.SunoJob{Id: task.Id}).UpdateColumns(map[string]interface{}{
"task_id": r.Data,
@@ -127,6 +129,7 @@ func (s *Service) Create(task types.SunoTask) (RespVo, error) {
"continue_clip_id": task.RefSongId,
"continue_at": task.ExtendSecs,
"make_instrumental": task.Instrumental,
+ "mv": task.Model,
}
// 灵感模式
if task.Type == 1 {
@@ -134,7 +137,6 @@ func (s *Service) Create(task types.SunoTask) (RespVo, error) {
} else { // 自定义模式
reqBody["prompt"] = task.Lyrics
reqBody["tags"] = task.Tags
- reqBody["mv"] = task.Model
reqBody["title"] = task.Title
}
diff --git a/api/service/types.go b/api/service/types.go
index 9c774cbf..f39bf73c 100644
--- a/api/service/types.go
+++ b/api/service/types.go
@@ -113,7 +113,7 @@ Please remember, the final output must be the same language with user’s input.
- What kinds of examples may need to be included, how many, and whether they are complex enough to benefit from placeholders.
- Clarity and Conciseness: Use clear, specific language. Avoid unnecessary instructions or bland statements.
- Formatting: Use markdown features for readability. DO NOT USE CODE BLOCKS UNLESS SPECIFICALLY REQUESTED.
-- Preserve User Content: If the input task or prompt includes extensive guidelines or examples, preserve them entirely, or as closely as possible. If they are vague, consider breaking down into sub-steps. Keep any details, guidelines, examples, variables, or placeholders provided by the user.
+- Preserve User Prompt: If the input task or prompt includes extensive guidelines or examples, preserve them entirely, or as closely as possible. If they are vague, consider breaking down into sub-steps. Keep any details, guidelines, examples, variables, or placeholders provided by the user.
- Constants: DO include constants in the prompt, as they are not susceptible to prompt injection. Such as guides, rubrics, and examples.
- Output Format: Explicitly the most appropriate output format, in detail. This should include length and syntax (e.g. short sentence, paragraph, JSON, etc.)
- For tasks outputting well-defined or structured data (classification, JSON, etc.) bias toward outputting a JSON.
diff --git a/api/service/ws_service.go b/api/service/ws_service.go
deleted file mode 100644
index d049f6bd..00000000
--- a/api/service/ws_service.go
+++ /dev/null
@@ -1,13 +0,0 @@
-package service
-
-import "geekai/core/types"
-
-type WebsocketService struct {
- Clients *types.LMap[string, *types.WsClient] // clientId => Client
-}
-
-func NewWebsocketService() *WebsocketService {
- return &WebsocketService{
- Clients: types.NewLMap[string, *types.WsClient](),
- }
-}
diff --git a/api/store/model/chat_history.go b/api/store/model/chat_message.go
similarity index 66%
rename from api/store/model/chat_history.go
rename to api/store/model/chat_message.go
index 393b99c9..b6567a65 100644
--- a/api/store/model/chat_history.go
+++ b/api/store/model/chat_message.go
@@ -5,17 +5,17 @@ import (
)
type ChatMessage struct {
- Id int64 `gorm:"column:id;primaryKey;autoIncrement" json:"id"`
- UserId uint `gorm:"column:user_id;type:int;not null;comment:用户 ID" json:"user_id"`
+ Id uint `gorm:"column:id;primaryKey;autoIncrement" json:"id"`
+ UserId uint `gorm:"column:user_id;type:int;not null;comment:用户 ID" json:"user_id"`
ChatId string `gorm:"column:chat_id;type:char(40);not null;index;comment:会话 ID" json:"chat_id"`
Type string `gorm:"column:type;type:varchar(10);not null;comment:类型:prompt|reply" json:"type"`
Icon string `gorm:"column:icon;type:varchar(255);not null;comment:角色图标" json:"icon"`
- RoleId uint `gorm:"column:role_id;type:int;not null;comment:角色 ID" json:"role_id"`
- Model string `gorm:"column:model;type:varchar(30);comment:模型名称" json:"model"`
+ RoleId uint `gorm:"column:role_id;type:int;not null;comment:角色 ID" json:"role_id"`
+ Model string `gorm:"column:model;type:varchar(255);comment:模型名称" json:"model"`
Content string `gorm:"column:content;type:text;not null;comment:聊天内容" json:"content"`
Tokens int `gorm:"column:tokens;type:smallint;not null;comment:耗费 token 数量" json:"tokens"`
TotalTokens int `gorm:"column:total_tokens;type:int;not null;comment:消耗总Token长度" json:"total_tokens"`
- UseContext bool `gorm:"column:use_context;type:tinyint(1);not null;comment:是否允许作为上下文语料" json:"use_context"`
+ UseContext bool `gorm:"column:use_context;type:tinyint(1);not null;comment:是否允许作为上下文语料" json:"use_context"`
CreatedAt time.Time `gorm:"column:created_at;type:datetime;not null" json:"created_at"`
UpdatedAt time.Time `gorm:"column:updated_at;type:datetime;not null" json:"updated_at"`
}
diff --git a/api/store/model/chat_model.go b/api/store/model/chat_model.go
index b753c347..cf42e154 100644
--- a/api/store/model/chat_model.go
+++ b/api/store/model/chat_model.go
@@ -5,20 +5,20 @@ import (
)
type ChatModel struct {
- Id uint `gorm:"column:id;primaryKey;autoIncrement" json:"id"`
- Description string `gorm:"column:description;type:varchar(1024);not null;default:'';comment:模型类型描述" json:"description"`
- Category string `gorm:"column:category;type:varchar(1024);not null;default:'';comment:模型类别" json:"category"`
+ Id uint `gorm:"column:id;primaryKey;autoIncrement" json:"id"`
+ Desc string `gorm:"column:desc;type:varchar(1024);not null;default:'';comment:模型类型描述" json:"desc"`
+ Tag string `gorm:"column:tag;type:varchar(1024);not null;default:'';comment:模型标签" json:"tag"`
Type string `gorm:"column:type;type:varchar(10);not null;default:chat;comment:模型类型(chat,img)" json:"type"`
Name string `gorm:"column:name;type:varchar(255);not null;comment:模型名称" json:"name"`
Value string `gorm:"column:value;type:varchar(255);not null;comment:模型值" json:"value"`
SortNum int `gorm:"column:sort_num;type:tinyint(1);not null;comment:排序数字" json:"sort_num"`
- Enabled bool `gorm:"column:enabled;type:tinyint(1);not null;default:0;comment:是否启用模型" json:"enabled"`
+ Enabled bool `gorm:"column:enabled;type:tinyint(1);not null;default:0;comment:是否启用模型" json:"enabled"`
Power int `gorm:"column:power;type:smallint;not null;comment:消耗算力点数" json:"power"`
Temperature float32 `gorm:"column:temperature;type:float(3,1);not null;default:1.0;comment:模型创意度" json:"temperature"`
MaxTokens int `gorm:"column:max_tokens;type:int;not null;default:1024;comment:最大响应长度" json:"max_tokens"`
MaxContext int `gorm:"column:max_context;type:int;not null;default:4096;comment:最大上下文长度" json:"max_context"`
- Open bool `gorm:"column:open;type:tinyint(1);not null;comment:是否开放模型" json:"open"`
- KeyId uint `gorm:"column:key_id;type:int;not null;comment:绑定API KEY ID" json:"key_id"`
+ Open bool `gorm:"column:open;type:tinyint(1);not null;comment:是否开放模型" json:"open"`
+ KeyId uint `gorm:"column:key_id;type:int;not null;comment:绑定API KEY ID" json:"key_id"`
Options string `gorm:"column:options;type:text;not null;comment:模型自定义选项" json:"options"`
CreatedAt time.Time `gorm:"column:created_at;type:datetime" json:"created_at"`
UpdatedAt time.Time `gorm:"column:updated_at;type:datetime" json:"updated_at"`
diff --git a/api/store/model/user.go b/api/store/model/user.go
index 3a6fda61..a1866ffb 100644
--- a/api/store/model/user.go
+++ b/api/store/model/user.go
@@ -5,27 +5,27 @@ import (
)
type User struct {
- Id uint `gorm:"column:id;primaryKey;autoIncrement" json:"id"`
- Username string `gorm:"column:username;type:varchar(30);uniqueIndex;not null;comment:用户名" json:"username"`
- Mobile string `gorm:"column:mobile;type:char(11);comment:手机号" json:"mobile"`
- Email string `gorm:"column:email;type:varchar(50);comment:邮箱地址" json:"email"`
- Nickname string `gorm:"column:nickname;type:varchar(30);not null;comment:昵称" json:"nickname"`
- Password string `gorm:"column:password;type:char(64);not null;comment:密码" json:"password"`
- Avatar string `gorm:"column:avatar;type:varchar(255);not null;comment:头像" json:"avatar"`
- Salt string `gorm:"column:salt;type:char(12);not null;comment:密码盐" json:"salt"`
- Power int `gorm:"column:power;type:int;not null;default:0;comment:剩余算力" json:"power"`
- ExpiredTime int64 `gorm:"column:expired_time;type:int;not null;comment:用户过期时间" json:"expired_time"`
- Status bool `gorm:"column:status;type:tinyint(1);not null;comment:当前状态" json:"status"`
- ChatConfig string `gorm:"column:chat_config;type:text;not null;comment:聊天配置json" json:"chat_config"`
- ChatRoles string `gorm:"column:chat_roles_json;type:text;not null;comment:聊天角色 json" json:"chat_roles_json"`
- ChatModels string `gorm:"column:chat_models_json;type:text;not null;comment:AI模型 json" json:"chat_models_json"`
- LastLoginAt int64 `gorm:"column:last_login_at;type:int;not null;comment:最后登录时间" json:"last_login_at"`
- Vip bool `gorm:"column:vip;type:tinyint(1);not null;default:0;comment:是否会员" json:"vip"`
- LastLoginIp string `gorm:"column:last_login_ip;type:char(16);not null;comment:最后登录 IP" json:"last_login_ip"`
- OpenId string `gorm:"column:openid;type:varchar(100);comment:第三方登录账号ID" json:"openid"`
- Platform string `gorm:"column:platform;type:varchar(30);comment:登录平台" json:"platform"`
- CreatedAt time.Time `gorm:"column:created_at;type:datetime;not null" json:"created_at"`
- UpdatedAt time.Time `gorm:"column:updated_at;type:datetime;not null" json:"updated_at"`
+ Id uint `gorm:"column:id;primaryKey;autoIncrement" json:"id"`
+ Username string `gorm:"column:username;type:varchar(30);uniqueIndex;not null;comment:用户名" json:"username"`
+ Mobile string `gorm:"column:mobile;type:char(11);comment:手机号" json:"mobile"`
+ Email string `gorm:"column:email;type:varchar(50);comment:邮箱地址" json:"email"`
+ Nickname string `gorm:"column:nickname;type:varchar(30);not null;comment:昵称" json:"nickname"`
+ Password string `gorm:"column:password;type:char(64);not null;comment:密码" json:"password"`
+ Avatar string `gorm:"column:avatar;type:varchar(255);not null;comment:头像" json:"avatar"`
+ Salt string `gorm:"column:salt;type:char(12);not null;comment:密码盐" json:"salt"`
+ Power int `gorm:"column:power;type:int;default:0;comment:剩余算力" json:"power"`
+ ExpiredTime int64 `gorm:"column:expired_time;type:int;not null;comment:用户过期时间" json:"expired_time"`
+ Status bool `gorm:"column:status;type:tinyint(1);not null;comment:当前状态" json:"status"`
+ ChatConfig string `gorm:"column:chat_config_json;type:text;default:null;comment:聊天配置json" json:"chat_config"`
+ ChatRoles string `gorm:"column:chat_roles_json;type:text;default:null;comment:聊天角色 json" json:"chat_roles"`
+ ChatModels string `gorm:"column:chat_models_json;type:text;default:null;comment:AI模型 json" json:"chat_models"`
+ LastLoginAt int64 `gorm:"column:last_login_at;type:int;not null;comment:最后登录时间" json:"last_login_at"`
+ Vip bool `gorm:"column:vip;type:tinyint(1);not null;default:0;comment:是否会员" json:"vip"`
+ LastLoginIp string `gorm:"column:last_login_ip;type:char(16);not null;comment:最后登录 IP" json:"last_login_ip"`
+ OpenId string `gorm:"column:openid;type:varchar(100);comment:第三方登录账号ID" json:"openid"`
+ Platform string `gorm:"column:platform;type:varchar(30);comment:登录平台" json:"platform"`
+ CreatedAt time.Time `gorm:"column:created_at;type:datetime;not null" json:"created_at"`
+ UpdatedAt time.Time `gorm:"column:updated_at;type:datetime;not null" json:"updated_at"`
}
func (m *User) TableName() string {
diff --git a/api/store/mysql.go b/api/store/mysql.go
index 70aba960..8233c695 100644
--- a/api/store/mysql.go
+++ b/api/store/mysql.go
@@ -9,16 +9,17 @@ package store
import (
"geekai/core/types"
+ "time"
+
"gorm.io/driver/mysql"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"gorm.io/gorm/schema"
- "time"
)
func NewGormConfig() *gorm.Config {
return &gorm.Config{
- Logger: logger.Default.LogMode(logger.Silent),
+ Logger: logger.Default.LogMode(logger.Warn),
NamingStrategy: schema.NamingStrategy{
TablePrefix: "chatgpt_", // 设置表前缀
SingularTable: false, // 使用单数表名形式
@@ -36,9 +37,9 @@ func NewMysql(config *gorm.Config, appConfig *types.AppConfig) (*gorm.DB, error)
if err != nil {
return nil, err
}
+
sqlDB.SetMaxIdleConns(32)
sqlDB.SetMaxOpenConns(512)
sqlDB.SetConnMaxLifetime(time.Hour)
-
return db, nil
}
diff --git a/api/store/vo/chat_history.go b/api/store/vo/chat_history.go
deleted file mode 100644
index 3f534f39..00000000
--- a/api/store/vo/chat_history.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package vo
-
-type HistoryMessage struct {
- BaseVo
- ChatId string `json:"chat_id"`
- UserId uint `json:"user_id"`
- RoleId uint `json:"role_id"`
- Model string `json:"model"`
- Type string `json:"type"`
- Icon string `json:"icon"`
- Tokens int `json:"tokens"`
- Content string `json:"content"`
- UseContext bool `json:"use_context"`
-}
diff --git a/api/store/vo/chat_message.go b/api/store/vo/chat_message.go
new file mode 100644
index 00000000..f12279be
--- /dev/null
+++ b/api/store/vo/chat_message.go
@@ -0,0 +1,21 @@
+package vo
+
+type MsgContent struct {
+ Text string `json:"text"`
+ Files []File `json:"files"`
+}
+
+type ChatMessage struct {
+ Id uint `json:"id"`
+ CreatedAt int64 `json:"created_at"`
+ UpdatedAt int64 `json:"updated_at"`
+ ChatId string `json:"chat_id"`
+ UserId uint `json:"user_id"`
+ RoleId uint `json:"role_id"`
+ Model string `json:"model"`
+ Type string `json:"type"`
+ Icon string `json:"icon"`
+ Tokens int `json:"tokens"`
+ Content MsgContent `json:"content"`
+ UseContext bool `json:"use_context"`
+}
diff --git a/api/store/vo/chat_model.go b/api/store/vo/chat_model.go
index 427fa4e8..b4076e19 100644
--- a/api/store/vo/chat_model.go
+++ b/api/store/vo/chat_model.go
@@ -10,8 +10,8 @@ type ChatModel struct {
Open bool `json:"open"`
MaxTokens int `json:"max_tokens"` // 最大响应长度
MaxContext int `json:"max_context"` // 最大上下文长度
- Description string `json:"description"` // 模型描述
- Category string `json:"category"` //模型类别
+ Desc string `json:"desc"` // 模型描述
+ Tag string `json:"tag"` //模型标签
Temperature float32 `json:"temperature"` // 模型温度
KeyId uint `json:"key_id,omitempty"`
KeyName string `json:"key_name"`
diff --git a/build/geekai-install.sh b/build/geekai-install.sh
new file mode 100755
index 00000000..5c6fdcde
--- /dev/null
+++ b/build/geekai-install.sh
@@ -0,0 +1,196 @@
+#!/usr/bin/env bash
+# install-docker.sh — 自动识别 Linux 发行版并安装 Docker (Compose v2)
+# 支持国内镜像自动/手动切换,内置错误处理与友好提示。
+# ---------------------------------------------------------------
+set -Eeuo pipefail
+
+# ────────────────────────── 彩色输出 ────────────────────────── #
+info() { printf "\e[32m[INFO]\e[0m %s\n" "$*"; }
+warn() { printf "\e[33m[WARN]\e[0m %s\n" "$*" >&2; }
+error() { printf "\e[31m[ERROR]\e[0m %s\n" "$*" >&2; exit 1; }
+trap 'error "脚本失败,命令:\"${BASH_COMMAND}\", 退出码 $?"' ERR
+
+need_cmd() { command -v "$1" &>/dev/null; }
+as_root() { ((EUID==0)) && "$@" || sudo "$@"; }
+
+# ─────────── 部署 GeekAI-Plus 函数 ─────────── #
+deploy_geekai_plus(){
+ local repo=https://gitee.com/blackfox/geekai-plus-open.git
+ local dir=${GEEKAI_DIR:-geekai-plus}
+ info "部署 GeekAI-Plus 到目录 \"$dir\""
+ need_cmd git || error "未找到 git,请检查安装步骤。"
+ if [[ -d $dir ]]; then
+ warn "目录 $dir 已存在,跳过克隆。"
+ else
+ git clone --depth 1 "$repo" "$dir"
+ fi
+ pushd "$dir" >/dev/null
+ info "启动 docker compose…"
+ if docker compose up -d; then
+ info "GeekAI-Plus 部署完成!请访问 http://ip:8080。"
+ else
+ error "docker compose 启动失败。"
+ fi
+ popd >/dev/null
+}
+
+# ─────────────────── 检测 Docker 是否已安装 ─────────────────── #
+if need_cmd docker && (docker compose version &>/dev/null || need_cmd docker-compose); then
+ info "Docker 与 Compose 已安装,无需重复操作。"
+ deploy_geekai_plus
+ exit 0
+fi
+
+# ────────────────────────── 解析发行版 ───────────────────────── #
+[[ -r /etc/os-release ]] || error "无法识别系统:缺少 /etc/os-release"
+. /etc/os-release
+OS_ID=${ID,,}
+OS_VER=${VERSION_ID:-unknown}
+ARCH=$(uname -m)
+
+info "检测到系统:$PRETTY_NAME ($OS_ID $OS_VER, $ARCH)"
+
+# ──────────────────── 镜像域名与自动回退逻辑 ──────────────────── #
+# ❶ 用户可通过 DOCKER_MIRROR 指定:
+# - aliyun → https://mirrors.aliyun.com/docker-ce
+# - tuna → https://mirrors.tuna.tsinghua.edu.cn/docker-ce
+# - official (默认) → https://download.docker.com
+#
+# ❷ 若未指定,则先探测官方域名能否连通;失败则自动切换到 aliyun。
+#
+choose_mirror() {
+ local sel=${DOCKER_MIRROR:-auto}
+
+ case "$sel" in
+ aliyun) MIRROR="https://mirrors.aliyun.com/docker-ce" ;;
+ tuna) MIRROR="https://mirrors.tuna.tsinghua.edu.cn/docker-ce" ;;
+ official) MIRROR="https://download.docker.com" ;;
+ auto)
+ MIRROR="https://download.docker.com"
+ info "检测官方源连通性…"
+ if ! curl -m 3 -sfL "${MIRROR}/linux/${OS_ID}/gpg" -o /dev/null; then
+ warn "官方源不可达,回退至阿里云镜像。"
+ MIRROR="https://mirrors.aliyun.com/docker-ce"
+ fi ;;
+ *)
+ error "未知镜像标识:$sel(可选 aliyun|tuna|official)" ;;
+ esac
+ info "使用镜像源:$MIRROR"
+}
+choose_mirror
+
+# ────────────────────────── 安装函数 ────────────────────────── #
+install_docker_debian_like() {
+ info "使用 APT 安装 Docker"
+ as_root apt-get update -y
+ as_root apt-get install -y ca-certificates curl git gnupg lsb-release
+
+ as_root install -m 0755 -d /etc/apt/keyrings
+ curl -fsSL "${MIRROR}/linux/${OS_ID}/gpg" \
+ | as_root gpg --dearmor -o /etc/apt/keyrings/docker.gpg
+ chmod a+r /etc/apt/keyrings/docker.gpg
+
+ echo \
+ "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] \
+ ${MIRROR}/linux/${OS_ID} $(lsb_release -cs) stable" \
+ | as_root tee /etc/apt/sources.list.d/docker.list >/dev/null
+
+ as_root apt-get update -y
+ as_root apt-get install -y \
+ docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
+}
+
+install_docker_centos_like() {
+ info "使用 YUM/DNF 安装 Docker"
+ local pkgcmd
+ if need_cmd dnf; then pkgcmd=dnf; else pkgcmd=yum; fi
+
+ as_root $pkgcmd -y install ${pkgcmd}-plugins-core git
+ as_root $pkgcmd config-manager \
+ --add-repo "${MIRROR}/linux/centos/docker-ce.repo"
+ as_root $pkgcmd -y install \
+ docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
+ as_root systemctl enable --now docker
+}
+
+install_docker_fedora() {
+ info "使用 DNF 安装 Docker (Fedora)"
+ as_root dnf -y install dnf-plugins-core
+ as_root dnf config-manager --add-repo \
+ "${MIRROR}/linux/fedora/docker-ce.repo"
+ as_root dnf -y install \
+ docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin git
+ as_root systemctl enable --now docker
+}
+
+install_docker_arch() {
+ info "使用 pacman 安装 Docker"
+ as_root pacman -Sy --noconfirm docker docker-compose git
+ as_root systemctl enable --now docker
+}
+
+install_docker_opensuse() {
+ info "使用 zypper 安装 Docker"
+ as_root zypper -n in docker docker-compose git
+ as_root systemctl enable --now docker
+}
+
+install_docker_alpine() {
+ info "使用 apk 安装 Docker"
+ as_root apk add --no-cache docker docker-cli-compose git
+ as_root rc-update add docker boot git
+ as_root service docker start
+}
+
+install_docker_fallback() {
+ warn "发行版 \"$OS_ID\" 未做专门适配,执行官方一键脚本…"
+ curl -fsSL get.docker.com | as_root sh
+}
+
+# ────────────────────────── 分发安装 ────────────────────────── #
+case "$OS_ID" in
+ debian|ubuntu|linuxmint) install_docker_debian_like ;;
+ centos|rocky|almalinux|rhel) install_docker_centos_like ;;
+ fedora) install_docker_fedora ;;
+ arch|manjaro) install_docker_arch ;;
+ opensuse*|suse|sles) install_docker_opensuse ;;
+ alpine) install_docker_alpine ;;
+ *) install_docker_fallback ;;
+esac
+
+# ──────────────────── 安装后检查 & docker 组 ─────────────────── #
+need_cmd docker || error "Docker 安装后仍不可用,请检查日志。"
+as_root usermod -aG docker "${SUDO_USER:-$USER}" || true
+
+# ──────────────────── (可选) 镜像加速器配置 ─────────────────── #
+if [[ "${ENABLE_REGISTRYMIRROR:-1}" == "1" ]]; then
+ as_root mkdir -p /etc/docker
+ cat <<-JSON | as_root tee /etc/docker/daemon.json >/dev/null
+ {
+ "registry-mirrors": [
+ "https://registry.docker-cn.com", "https://mirror.ccs.tencentyun.com","https://hub-mirror.c.163.com"
+ ]
+ }
+JSON
+ as_root systemctl restart docker
+ info "已为 Docker 配置国内镜像加速器。"
+fi
+
+# ────────────────────────── 最终信息 ────────────────────────── #
+info "Docker 版本:$(docker --version | cut -d',' -f1)"
+if docker compose version &>/dev/null; then
+ info "Compose 版本:$(docker compose version --short)"
+elif need_cmd docker-compose; then
+ info "Compose 版本:$(docker-compose --version | awk '{print $3}')"
+fi
+
+cat <<'EOF'
+╭─────────────────────────────────────────────────────────╮
+│ 安装完成! │
+│ · 请重新登录或执行 `newgrp docker` 以使用 docker 免 sudo │
+│ · 如需跳过镜像加速,可执行:ENABLE_REGISTRYMIRROR=0 ... │
+╰─────────────────────────────────────────────────────────╯
+EOF
+
+deploy_geekai_plus
+
diff --git a/database/geekai_plus-v4.2.3.sql b/database/geekai_plus-v4.2.3.sql
index 80017315..b499a98f 100644
--- a/database/geekai_plus-v4.2.3.sql
+++ b/database/geekai_plus-v4.2.3.sql
@@ -110,7 +110,7 @@ CREATE TABLE `chatgpt_chat_history` (
`type` varchar(10) NOT NULL COMMENT '类型:prompt|reply',
`icon` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '角色图标',
`role_id` bigint NOT NULL COMMENT '角色 ID',
- `model` varchar(30) DEFAULT NULL COMMENT '模型名称',
+ `model` varchar(255) DEFAULT NULL COMMENT '模型名称',
`content` text NOT NULL COMMENT '聊天内容',
`tokens` smallint NOT NULL COMMENT '耗费 token 数量',
`total_tokens` bigint NOT NULL COMMENT '消耗总Token长度',
@@ -134,7 +134,7 @@ CREATE TABLE `chatgpt_chat_items` (
`role_id` bigint NOT NULL COMMENT '角色 ID',
`title` varchar(100) NOT NULL COMMENT '会话标题',
`model_id` bigint NOT NULL DEFAULT '0' COMMENT '模型 ID',
- `model` varchar(30) DEFAULT NULL COMMENT '模型名称',
+ `model` varchar(255) DEFAULT NULL COMMENT '模型名称',
`created_at` datetime NOT NULL COMMENT '创建时间',
`updated_at` datetime NOT NULL COMMENT '更新时间',
`deleted_at` datetime DEFAULT NULL
@@ -465,7 +465,7 @@ CREATE TABLE `chatgpt_power_logs` (
`type` tinyint(1) NOT NULL COMMENT '类型(1:充值,2:消费,3:退费)',
`amount` smallint NOT NULL COMMENT '算力数值',
`balance` bigint NOT NULL COMMENT '余额',
- `model` varchar(30) NOT NULL COMMENT '模型',
+ `model` varchar(255) NOT NULL COMMENT '模型',
`remark` varchar(512) CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci NOT NULL COMMENT '备注',
`mark` tinyint(1) NOT NULL COMMENT '资金类型(0:支出,1:收入)',
`created_at` datetime NOT NULL COMMENT '创建时间'
diff --git a/database/update-v4.2.3.1.sql b/database/update-v4.2.3.1.sql
new file mode 100644
index 00000000..5029b160
--- /dev/null
+++ b/database/update-v4.2.3.1.sql
@@ -0,0 +1,3 @@
+INSERT INTO `chatgpt_configs` (`id`, `marker`, `config_json`) VALUES
+(4, 'privacy', '{\"sd_neg_prompt\":\"\",\"mj_mode\":\"\",\"index_navs\":null,\"copyright\":\"\",\"default_nickname\":\"\",\"icp\":\"\",\"mark_map_text\":\"\",\"enabled_verify\":false,\"email_white_list\":null,\"translate_model_id\":0,\"max_file_size\":0,\"content\":\"# 隐私政策\\n\\n我们非常重视用户的隐私和个人信息保护。您在使用我们的产品与服务时,我们可能会收集和使用您的相关信息。我们希望通过本《隐私政策》向您说明我们在收集和使用您相关信息时对应的处理规则。\",\"updated\":true}'),
+(5, 'agreement', '{\"sd_neg_prompt\":\"\",\"mj_mode\":\"\",\"index_navs\":null,\"copyright\":\"\",\"default_nickname\":\"\",\"icp\":\"\",\"mark_map_text\":\"\",\"enabled_verify\":false,\"email_white_list\":null,\"translate_model_id\":0,\"max_file_size\":0,\"content\":\"# 用户协议\\n\\n用户在使用本服务前应当阅读并同意本协议。本协议内容包括协议正文及所有本平台已经发布的或将来可能发布的各类规则。所有规则为本协议不可分割的组成部分,与协议正文具有同等法律效力。\",\"updated\":true}');
diff --git a/database/update-v4.2.3.sql b/database/update-v4.2.3.sql
index 0cdf9f14..98ac1d2f 100644
--- a/database/update-v4.2.3.sql
+++ b/database/update-v4.2.3.sql
@@ -2,4 +2,4 @@ ALTER TABLE `chatgpt_chat_models` ADD `category` VARCHAR(1024) NOT NULL DEFAULT
ALTER TABLE `chatgpt_chat_models` ADD `description` VARCHAR(1024) NOT NULL DEFAULT '' COMMENT '模型类型描述' AFTER `id`;
ALTER TABLE `chatgpt_orders` DROP `deleted_at`;
ALTER TABLE `chatgpt_chat_history` DROP `deleted_at`;
-ALTER TABLE `chatgpt_chat_items` DROP `deleted_at`;
+ALTER TABLE `chatgpt_chat_items` DROP `deleted_at`;
\ No newline at end of file
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index 7de5ad82..2e65d179 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -46,7 +46,7 @@ services:
# 后端 API 程序
geekai-api:
- image: registry.cn-shenzhen.aliyuncs.com/geekmaster/geekai-api:v4.2.3-amd64
+ image: registry.cn-shenzhen.aliyuncs.com/geekmaster/geekai-api:v4.2.4-amd64
container_name: geekai-api
restart: always
depends_on:
@@ -69,7 +69,7 @@ services:
# 前端应用
geekai-web:
- image: registry.cn-shenzhen.aliyuncs.com/geekmaster/geekai-web:v4.2.3-amd64
+ image: registry.cn-shenzhen.aliyuncs.com/geekmaster/geekai-web:v4.2.4-amd64
container_name: geekai-web
restart: always
depends_on:
diff --git a/web/.env.development b/web/.env.development
index a680a658..058389fa 100644
--- a/web/.env.development
+++ b/web/.env.development
@@ -1,14 +1,14 @@
-VUE_APP_API_HOST=http://localhost:5678
-VUE_APP_WS_HOST=ws://localhost:5678
-VUE_APP_USER=18888888888
-VUE_APP_PASS=12345678
-VUE_APP_ADMIN_USER=admin
-VUE_APP_ADMIN_PASS=admin123
-VUE_APP_KEY_PREFIX=GeekAI_DEV_
-VUE_APP_TITLE="Geek-AI 创作系统"
-VUE_APP_VERSION=v4.2.2
-VUE_APP_DOCS_URL=https://docs.geekai.me
-VUE_APP_GITHUB_URL=https://github.com/yangjian102621/geekai
-VUE_APP_GITEE_URL=https://gitee.com/blackfox/geekai
-VUE_APP_GITCODE_URL=https://gitcode.com/yangjian102621/geekai
+VITE_API_HOST=http://localhost:5678
+VITE_WS_HOST=ws://localhost:5678
+VITE_USER=18888888888
+VITE_PASS=12345678
+VITE_ADMIN_USER=admin
+VITE_ADMIN_PASS=admin123
+VITE_KEY_PREFIX=GeekAI_DEV_
+VITE_TITLE="Geek-AI 创作系统"
+VITE_VERSION=v4.2.4
+VITE_DOCS_URL=https://docs.geekai.me
+VITE_GITHUB_URL=https://github.com/yangjian102621/geekai
+VITE_GITEE_URL=https://gitee.com/blackfox/geekai
+VITE_GITCODE_URL=https://gitcode.com/yangjian102621/geekai
diff --git a/web/.env.production b/web/.env.production
index 07f5bc61..0d0bce67 100644
--- a/web/.env.production
+++ b/web/.env.production
@@ -1,9 +1,9 @@
-VUE_APP_API_HOST=
-VUE_APP_WS_HOST=
-VUE_APP_KEY_PREFIX=GeekAI_
-VUE_APP_VERSION=v4.2.2
+VITE_API_HOST=
+VITE_WS_HOST=
+VITE_KEY_PREFIX=GeekAI_
+VITE_VERSION=v4.2.4
VUE_APP_TITLE="Geek-AI 创作系统"
-VUE_APP_DOCS_URL=https://docs.geekai.me
-VUE_APP_GITHUB_URL=https://github.com/yangjian102621/geekai
-VUE_APP_GITEE_URL=https://gitee.com/blackfox/geekai
-VUE_APP_GITCODE_URL=https://gitcode.com/yangjian102621/geekai
+VITE_DOCS_URL=https://docs.geekai.me
+VITE_GITHUB_URL=https://github.com/yangjian102621/geekai
+VITE_GITEE_URL=https://gitee.com/blackfox/geekai
+VITE_GITCODE_URL=https://gitcode.com/yangjian102621/geekai
diff --git a/web/auto-imports.d.ts b/web/auto-imports.d.ts
new file mode 100644
index 00000000..4db789c2
--- /dev/null
+++ b/web/auto-imports.d.ts
@@ -0,0 +1,78 @@
+/* eslint-disable */
+/* prettier-ignore */
+// @ts-nocheck
+// noinspection JSUnusedGlobalSymbols
+// Generated by unplugin-auto-import
+// biome-ignore lint: disable
+export {}
+declare global {
+ const EffectScope: typeof import('vue')['EffectScope']
+ const computed: typeof import('vue')['computed']
+ const createApp: typeof import('vue')['createApp']
+ const customRef: typeof import('vue')['customRef']
+ const defineAsyncComponent: typeof import('vue')['defineAsyncComponent']
+ const defineComponent: typeof import('vue')['defineComponent']
+ const effectScope: typeof import('vue')['effectScope']
+ const getCurrentInstance: typeof import('vue')['getCurrentInstance']
+ const getCurrentScope: typeof import('vue')['getCurrentScope']
+ const h: typeof import('vue')['h']
+ const inject: typeof import('vue')['inject']
+ const isProxy: typeof import('vue')['isProxy']
+ const isReactive: typeof import('vue')['isReactive']
+ const isReadonly: typeof import('vue')['isReadonly']
+ const isRef: typeof import('vue')['isRef']
+ const markRaw: typeof import('vue')['markRaw']
+ const nextTick: typeof import('vue')['nextTick']
+ const onActivated: typeof import('vue')['onActivated']
+ const onBeforeMount: typeof import('vue')['onBeforeMount']
+ const onBeforeRouteLeave: typeof import('vue-router')['onBeforeRouteLeave']
+ const onBeforeRouteUpdate: typeof import('vue-router')['onBeforeRouteUpdate']
+ const onBeforeUnmount: typeof import('vue')['onBeforeUnmount']
+ const onBeforeUpdate: typeof import('vue')['onBeforeUpdate']
+ const onDeactivated: typeof import('vue')['onDeactivated']
+ const onErrorCaptured: typeof import('vue')['onErrorCaptured']
+ const onMounted: typeof import('vue')['onMounted']
+ const onRenderTracked: typeof import('vue')['onRenderTracked']
+ const onRenderTriggered: typeof import('vue')['onRenderTriggered']
+ const onScopeDispose: typeof import('vue')['onScopeDispose']
+ const onServerPrefetch: typeof import('vue')['onServerPrefetch']
+ const onUnmounted: typeof import('vue')['onUnmounted']
+ const onUpdated: typeof import('vue')['onUpdated']
+ const onWatcherCleanup: typeof import('vue')['onWatcherCleanup']
+ const provide: typeof import('vue')['provide']
+ const reactive: typeof import('vue')['reactive']
+ const readonly: typeof import('vue')['readonly']
+ const ref: typeof import('vue')['ref']
+ const resolveComponent: typeof import('vue')['resolveComponent']
+ const shallowReactive: typeof import('vue')['shallowReactive']
+ const shallowReadonly: typeof import('vue')['shallowReadonly']
+ const shallowRef: typeof import('vue')['shallowRef']
+ const toRaw: typeof import('vue')['toRaw']
+ const toRef: typeof import('vue')['toRef']
+ const toRefs: typeof import('vue')['toRefs']
+ const toValue: typeof import('vue')['toValue']
+ const triggerRef: typeof import('vue')['triggerRef']
+ const unref: typeof import('vue')['unref']
+ const useAttrs: typeof import('vue')['useAttrs']
+ const useCssModule: typeof import('vue')['useCssModule']
+ const useCssVars: typeof import('vue')['useCssVars']
+ const useFetch: typeof import('@vueuse/core')['useFetch']
+ const useId: typeof import('vue')['useId']
+ const useLink: typeof import('vue-router')['useLink']
+ const useModel: typeof import('vue')['useModel']
+ const useMouse: typeof import('@vueuse/core')['useMouse']
+ const useRoute: typeof import('vue-router')['useRoute']
+ const useRouter: typeof import('vue-router')['useRouter']
+ const useSlots: typeof import('vue')['useSlots']
+ const useTemplateRef: typeof import('vue')['useTemplateRef']
+ const watch: typeof import('vue')['watch']
+ const watchEffect: typeof import('vue')['watchEffect']
+ const watchPostEffect: typeof import('vue')['watchPostEffect']
+ const watchSyncEffect: typeof import('vue')['watchSyncEffect']
+}
+// for type re-export
+declare global {
+ // @ts-ignore
+ export type { Component, ComponentPublicInstance, ComputedRef, DirectiveBinding, ExtractDefaultPropTypes, ExtractPropTypes, ExtractPublicPropTypes, InjectionKey, PropType, Ref, MaybeRef, MaybeRefOrGetter, VNode, WritableComputedRef } from 'vue'
+ import('vue')
+}
diff --git a/web/babel.config.js b/web/babel.config.js
deleted file mode 100644
index e9558405..00000000
--- a/web/babel.config.js
+++ /dev/null
@@ -1,5 +0,0 @@
-module.exports = {
- presets: [
- '@vue/cli-plugin-babel/preset'
- ]
-}
diff --git a/web/public/index.html b/web/index.html
similarity index 64%
rename from web/public/index.html
rename to web/index.html
index dae024cd..5bec046a 100644
--- a/web/public/index.html
+++ b/web/index.html
@@ -8,6 +8,8 @@
content="width=device-width,initial-scale=1.0,user-scalable=no"
/>
Geek-AI 创作助手
+
+
@@ -15,5 +17,6 @@
请开启JavaScript支持
+