feat: markmap function is ready

This commit is contained in:
RockYang 2024-04-15 17:23:59 +08:00
parent 8a9f386d8f
commit 7ad41927aa
12 changed files with 418 additions and 142 deletions

View File

@ -5,6 +5,10 @@
* Bug修复修复MidJourney在任务超时后出现后面的任务覆盖前面任务的问题 * Bug修复修复MidJourney在任务超时后出现后面的任务覆盖前面任务的问题
* 功能新增:支持上传图片和视觉模型 * 功能新增:支持上传图片和视觉模型
* 功能优化:优化聊天页面的复制代码按钮样式乱码 * 功能优化:优化聊天页面的复制代码按钮样式乱码
* 功能新增:增加思维导图功能,支持选择不同的对话模型来生成思维导图
* 功能新增支持为角色绑定对话模型比如绑定某个角色只能用GPT3.5或者 GPT4
* 功能新增:支持为模型绑定 API KEY比如为 GPT3.5 模型绑定免费的 API KEY 给用户免费使用来引流不至于消耗你的收费 KEY。
* 功能新增:支持管理后台 Logo 修改
## 4.0.2 ## 4.0.2

View File

@ -218,7 +218,7 @@ func needLogin(c *gin.Context) bool {
c.Request.URL.Path == "/api/config/get" || c.Request.URL.Path == "/api/config/get" ||
c.Request.URL.Path == "/api/product/list" || c.Request.URL.Path == "/api/product/list" ||
c.Request.URL.Path == "/api/menu/list" || c.Request.URL.Path == "/api/menu/list" ||
c.Request.URL.Path == "/api/markMap/model" || c.Request.URL.Path == "/api/markMap/client" ||
strings.HasPrefix(c.Request.URL.Path, "/api/test") || strings.HasPrefix(c.Request.URL.Path, "/api/test") ||
strings.HasPrefix(c.Request.URL.Path, "/api/function/") || strings.HasPrefix(c.Request.URL.Path, "/api/function/") ||
strings.HasPrefix(c.Request.URL.Path, "/api/sms/") || strings.HasPrefix(c.Request.URL.Path, "/api/sms/") ||

View File

@ -21,7 +21,7 @@ const (
WsStart = WsMsgType("start") WsStart = WsMsgType("start")
WsMiddle = WsMsgType("middle") WsMiddle = WsMsgType("middle")
WsEnd = WsMsgType("end") WsEnd = WsMsgType("end")
WsMjImg = WsMsgType("mj") WsErr = WsMsgType("error")
) )
type BizCode int type BizCode int

View File

@ -525,7 +525,6 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
request = request.WithContext(ctx) request = request.WithContext(ctx)
request.Header.Set("Content-Type", "application/json") request.Header.Set("Content-Type", "application/json")
var proxyURL string
if len(apiKey.ProxyURL) > 5 { // 使用代理 if len(apiKey.ProxyURL) > 5 { // 使用代理
proxy, _ := url.Parse(apiKey.ProxyURL) proxy, _ := url.Parse(apiKey.ProxyURL)
client = &http.Client{ client = &http.Client{
@ -536,7 +535,7 @@ func (h *ChatHandler) doRequest(ctx context.Context, req types.ApiRequest, sessi
} else { } else {
client = http.DefaultClient client = http.DefaultClient
} }
logger.Debugf("Sending %s request, ApiURL:%s, API KEY:%s, PROXY: %s, Model: %s", session.Model.Platform, apiURL, apiKey.Value, proxyURL, req.Model) logger.Debugf("Sending %s request, ApiURL:%s, API KEY:%s, PROXY: %s, Model: %s", session.Model.Platform, apiURL, apiKey.Value, apiKey.ProxyURL, req.Model)
switch session.Model.Platform { switch session.Model.Platform {
case types.Azure: case types.Azure:
request.Header.Set("api-key", apiKey.Value) request.Header.Set("api-key", apiKey.Value)

View File

@ -1,26 +1,35 @@
package handler package handler
import ( import (
"bufio"
"bytes"
"chatplus/core" "chatplus/core"
"chatplus/core/types" "chatplus/core/types"
"chatplus/store/model"
"chatplus/utils" "chatplus/utils"
"github.com/gorilla/websocket" "encoding/json"
"net/http" "errors"
"fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/gorilla/websocket"
"gorm.io/gorm" "gorm.io/gorm"
"io"
"net/http"
"net/url"
"strings"
"time"
) )
// MarkMapHandler 生成思维导图 // MarkMapHandler 生成思维导图
type MarkMapHandler struct { type MarkMapHandler struct {
BaseHandler BaseHandler
clients *types.LMap[uint, *types.WsClient] clients *types.LMap[int, *types.WsClient]
} }
func NewMarkMapHandler(app *core.AppServer, db *gorm.DB) *MarkMapHandler { func NewMarkMapHandler(app *core.AppServer, db *gorm.DB) *MarkMapHandler {
return &MarkMapHandler{ return &MarkMapHandler{
BaseHandler: BaseHandler{App: app, DB: db}, BaseHandler: BaseHandler{App: app, DB: db},
clients: types.NewLMap[uint, *types.WsClient](), clients: types.NewLMap[int, *types.WsClient](),
} }
} }
@ -32,9 +41,13 @@ func (h *MarkMapHandler) Client(c *gin.Context) {
} }
modelId := h.GetInt(c, "model_id", 0) modelId := h.GetInt(c, "model_id", 0)
userId := h.GetLoginUserId(c) userId := h.GetInt(c, "user_id", 0)
logger.Info(modelId) logger.Info(modelId)
client := types.NewWsClient(ws) client := types.NewWsClient(ws)
if cli := h.clients.Get(userId); cli != nil {
cli.Close()
}
// 保存会话连接 // 保存会话连接
h.clients.Put(userId, client) h.clients.Put(userId, client)
@ -55,12 +68,165 @@ func (h *MarkMapHandler) Client(c *gin.Context) {
// 心跳消息 // 心跳消息
if message.Type == "heartbeat" { if message.Type == "heartbeat" {
logger.Debug("收到 Chat 心跳消息:", message.Content) logger.Debug("收到 MarkMap 心跳消息:", message.Content)
continue
}
// change model
if message.Type == "model_id" {
modelId = utils.IntValue(utils.InterfaceToString(message.Content), 0)
continue continue
} }
logger.Info("Receive a message: ", message.Content) logger.Info("Receive a message: ", message.Content)
err = h.sendMessage(client, utils.InterfaceToString(message.Content), modelId, userId)
if err != nil {
utils.ReplyChunkMessage(client, types.WsMessage{Type: types.WsErr, Content: err.Error()})
}
} }
}() }()
} }
func (h *MarkMapHandler) sendMessage(client *types.WsClient, prompt string, modelId int, userId int) error {
var user model.User
res := h.DB.Model(&model.User{}).First(&user, userId)
if res.Error != nil {
return fmt.Errorf("error with query user info: %v", res.Error)
}
var chatModel model.ChatModel
res = h.DB.Where("id", modelId).First(&chatModel)
if res.Error != nil {
return fmt.Errorf("error with query chat model: %v", res.Error)
}
if user.Status == false {
return errors.New("当前用户被禁用")
}
if user.Power < chatModel.Power {
return fmt.Errorf("您当前剩余算力(%d已不足以支付当前模型算力%d", user.Power, chatModel.Power)
}
messages := make([]interface{}, 0)
messages = append(messages, types.Message{Role: "system", Content: "你是一位非常优秀的思维导图助手,你会把用户的所有提问都总结成思维导图,然后以 Markdown 格式输出。只输出 Markdown 内容,不要输出任何解释性的语句。"})
messages = append(messages, types.Message{Role: "user", Content: prompt})
var req = types.ApiRequest{
Model: chatModel.Value,
Stream: true,
Messages: messages,
}
var apiKey model.ApiKey
response, err := h.doRequest(req, chatModel, &apiKey)
if err != nil {
return fmt.Errorf("请求 OpenAI API 失败: %s", err)
}
defer response.Body.Close()
contentType := response.Header.Get("Content-Type")
if strings.Contains(contentType, "text/event-stream") {
// 循环读取 Chunk 消息
var message = types.Message{}
scanner := bufio.NewScanner(response.Body)
var isNew = true
for scanner.Scan() {
line := scanner.Text()
if !strings.Contains(line, "data:") || len(line) < 30 {
continue
}
var responseBody = types.ApiResponse{}
err = json.Unmarshal([]byte(line[6:]), &responseBody)
if err != nil || len(responseBody.Choices) == 0 { // 数据解析出错
return fmt.Errorf("error with decode data: %v", err)
}
// 初始化 role
if responseBody.Choices[0].Delta.Role != "" && message.Role == "" {
message.Role = responseBody.Choices[0].Delta.Role
continue
} else if responseBody.Choices[0].FinishReason != "" {
break // 输出完成或者输出中断了
} else {
if isNew {
utils.ReplyChunkMessage(client, types.WsMessage{Type: types.WsStart})
isNew = false
}
utils.ReplyChunkMessage(client, types.WsMessage{
Type: types.WsMiddle,
Content: utils.InterfaceToString(responseBody.Choices[0].Delta.Content),
})
}
} // end for
utils.ReplyChunkMessage(client, types.WsMessage{Type: types.WsEnd})
} else {
body, err := io.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("读取响应失败: %v", err)
}
var res types.ApiError
err = json.Unmarshal(body, &res)
if err != nil {
return fmt.Errorf("解析响应失败: %v", err)
}
// OpenAI API 调用异常处理
if strings.Contains(res.Error.Message, "This key is associated with a deactivated account") {
// remove key
h.DB.Where("value = ?", apiKey).Delete(&model.ApiKey{})
return errors.New("请求 OpenAI API 失败API KEY 所关联的账户被禁用。")
} else if strings.Contains(res.Error.Message, "You exceeded your current quota") {
return errors.New("请求 OpenAI API 失败API KEY 触发并发限制,请稍后再试。")
} else {
return fmt.Errorf("请求 OpenAI API 失败:%v", res.Error.Message)
}
}
return nil
}
func (h *MarkMapHandler) doRequest(req types.ApiRequest, chatModel model.ChatModel, apiKey *model.ApiKey) (*http.Response, error) {
// if the chat model bind a KEY, use it directly
var res *gorm.DB
if chatModel.KeyId > 0 {
res = h.DB.Where("id", chatModel.KeyId).Find(apiKey)
}
// use the last unused key
if res.Error != nil {
res = h.DB.Where("platform = ?", types.OpenAI).Where("type = ?", "chat").Where("enabled = ?", true).Order("last_used_at ASC").First(apiKey)
}
if res.Error != nil {
return nil, errors.New("no available key, please import key")
}
apiURL := apiKey.ApiURL
// 更新 API KEY 的最后使用时间
h.DB.Model(apiKey).UpdateColumn("last_used_at", time.Now().Unix())
// 创建 HttpClient 请求对象
var client *http.Client
requestBody, err := json.Marshal(req)
if err != nil {
return nil, err
}
request, err := http.NewRequest(http.MethodPost, apiURL, bytes.NewBuffer(requestBody))
if err != nil {
return nil, err
}
request.Header.Set("Content-Type", "application/json")
if len(apiKey.ProxyURL) > 5 { // 使用代理
proxy, _ := url.Parse(apiKey.ProxyURL)
client = &http.Client{
Transport: &http.Transport{
Proxy: http.ProxyURL(proxy),
},
}
} else {
client = http.DefaultClient
}
request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey.Value))
return client.Do(request)
}

View File

@ -439,7 +439,7 @@ func main() {
fx.Provide(handler.NewMarkMapHandler), fx.Provide(handler.NewMarkMapHandler),
fx.Invoke(func(s *core.AppServer, h *handler.MarkMapHandler) { fx.Invoke(func(s *core.AppServer, h *handler.MarkMapHandler) {
group := s.Engine.Group("/api/markMap/") group := s.Engine.Group("/api/markMap/")
group.GET("model", h.GetModel) group.Any("client", h.Client)
}), }),
fx.Invoke(func(s *core.AppServer, db *gorm.DB) { fx.Invoke(func(s *core.AppServer, db *gorm.DB) {
go func() { go func() {

View File

@ -73,6 +73,7 @@ func (c *PlusClient) Imagine(task types.MjTask) (ImageRes, error) {
// Blend 融图 // Blend 融图
func (c *PlusClient) Blend(task types.MjTask) (ImageRes, error) { func (c *PlusClient) Blend(task types.MjTask) (ImageRes, error) {
apiURL := fmt.Sprintf("%s/mj-%s/mj/submit/blend", c.apiURL, c.Config.Mode) apiURL := fmt.Sprintf("%s/mj-%s/mj/submit/blend", c.apiURL, c.Config.Mode)
logger.Info("API URL: ", apiURL)
body := ImageReq{ body := ImageReq{
BotType: "MID_JOURNEY", BotType: "MID_JOURNEY",
Dimensions: "SQUARE", Dimensions: "SQUARE",
@ -163,7 +164,8 @@ func (c *PlusClient) Upscale(task types.MjTask) (ImageRes, error) {
"customId": fmt.Sprintf("MJ::JOB::upsample::%d::%s", task.Index, task.MessageHash), "customId": fmt.Sprintf("MJ::JOB::upsample::%d::%s", task.Index, task.MessageHash),
"taskId": task.MessageId, "taskId": task.MessageId,
} }
apiURL := fmt.Sprintf("%s/mj-%s/mj/submit/action", c.Config.Mode, c.apiURL) apiURL := fmt.Sprintf("%s/mj-%s/mj/submit/action", c.apiURL, c.Config.Mode)
logger.Info("API URL: ", apiURL)
var res ImageRes var res ImageRes
var errRes ErrRes var errRes ErrRes
r, err := c.client.R(). r, err := c.client.R().
@ -189,7 +191,8 @@ func (c *PlusClient) Variation(task types.MjTask) (ImageRes, error) {
"customId": fmt.Sprintf("MJ::JOB::variation::%d::%s", task.Index, task.MessageHash), "customId": fmt.Sprintf("MJ::JOB::variation::%d::%s", task.Index, task.MessageHash),
"taskId": task.MessageId, "taskId": task.MessageId,
} }
apiURL := fmt.Sprintf("%s/mj-%s/mj/submit/action", c.Config.Mode, c.apiURL) apiURL := fmt.Sprintf("%s/mj-%s/mj/submit/action", c.apiURL, c.Config.Mode)
logger.Info("API URL: ", apiURL)
var res ImageRes var res ImageRes
var errRes ErrRes var errRes ErrRes
r, err := req.C().R(). r, err := req.C().R().

View File

@ -1,2 +1,3 @@
ALTER TABLE `chatgpt_chat_roles` ADD `model_id` INT NOT NULL DEFAULT '0' COMMENT '绑定模型ID' AFTER `sort_num`; ALTER TABLE `chatgpt_chat_roles` ADD `model_id` INT NOT NULL DEFAULT '0' COMMENT '绑定模型ID' AFTER `sort_num`;
ALTER TABLE `chatgpt_chat_models` ADD `key_id` INT(11) NOT NULL COMMENT '绑定API KEY ID' AFTER `open`; ALTER TABLE `chatgpt_chat_models` ADD `key_id` INT(11) NOT NULL COMMENT '绑定API KEY ID' AFTER `open`;
INSERT INTO `chatgpt_plus`.`chatgpt_menus`(`id`, `name`, `icon`, `url`, `sort_num`, `enabled`) VALUES (12, '思维导图', '/images/menu/xmind.png', '/xmind', 3, 1);

View File

@ -66,10 +66,43 @@
.right-box { .right-box {
width 100% width 100%
h2 { h2 {
color #ffffff color #ffffff
} }
.markdown {
color #ffffff
display flex
justify-content center
align-items center
h1 {
color: #47fff1;
}
h2 {
color: #ffcc00;
}
ul {
list-style-type: disc;
margin-left: 20px;
li {
line-height 1.5
}
}
strong {
font-weight: bold;
}
em {
font-style: italic;
}
}
.body { .body {
display flex display flex
justify-content center justify-content center

View File

@ -525,42 +525,10 @@
<div class="opt" v-if="scope.item['can_opt']"> <div class="opt" v-if="scope.item['can_opt']">
<div class="opt-line"> <div class="opt-line">
<ul> <ul>
<li> <li><a @click="upscale(1, scope.item)">U1</a></li>
<el-tooltip <li><a @click="upscale(2, scope.item)">U2</a></li>
class="box-item" <li><a @click="upscale(3, scope.item)">U3</a></li>
effect="light" <li><a @click="upscale(4, scope.item)">U4</a></li>
content="放大第一张"
placement="top">
<a @click="upscale(1, scope.item)">U1</a>
</el-tooltip>
</li>
<li>
<el-tooltip
class="box-item"
effect="light"
content="放大第二张"
placement="top">
<a @click="upscale(2, scope.item)">U2</a>
</el-tooltip>
</li>
<li>
<el-tooltip
class="box-item"
effect="light"
content="放大第三张"
placement="top">
<a @click="upscale(3, scope.item)">U3</a>
</el-tooltip>
</li>
<li>
<el-tooltip
class="box-item"
effect="light"
content="放大第四张"
placement="top">
<a @click="upscale(4, scope.item)">U4</a>
</el-tooltip>
</li>
<li class="show-prompt"> <li class="show-prompt">
<el-popover placement="left" title="提示词" :width="240" trigger="hover"> <el-popover placement="left" title="提示词" :width="240" trigger="hover">
@ -586,42 +554,10 @@
<div class="opt-line"> <div class="opt-line">
<ul> <ul>
<li> <li><a @click="variation(1, scope.item)">V1</a></li>
<el-tooltip <li><a @click="variation(2, scope.item)">V2</a></li>
class="box-item" <li><a @click="variation(3, scope.item)">V3</a></li>
effect="light" <li><a @click="variation(4, scope.item)">V4</a></li>
content="变化第一张"
placement="top">
<a @click="variation(1, scope.item)">V1</a>
</el-tooltip>
</li>
<li>
<el-tooltip
class="box-item"
effect="light"
content="变化第二张"
placement="top">
<a @click="variation(2, scope.item)">V2</a>
</el-tooltip>
</li>
<li>
<el-tooltip
class="box-item"
effect="light"
content="变化第三张"
placement="top">
<a @click="variation(3, scope.item)">V3</a>
</el-tooltip>
</li>
<li>
<el-tooltip
class="box-item"
effect="light"
content="变化第四张"
placement="top">
<a @click="variation(4, scope.item)">V4</a>
</el-tooltip>
</li>
</ul> </ul>
</div> </div>
</div> </div>
@ -797,23 +733,25 @@ const connect = () => {
}); });
_socket.addEventListener('close', () => { _socket.addEventListener('close', () => {
ElMessageBox.confirm( if (socket.value !== null) {
'检测到您已经在其他客户端创建了新的连接,当前连接将被关闭!', ElMessageBox.confirm(
'提示', '检测到您已经在其他客户端创建了新的连接,当前连接将被关闭!',
{ '提示',
dangerouslyUseHTMLString: true, {
confirmButtonText: '重新连接', dangerouslyUseHTMLString: true,
cancelButtonText: '关闭', confirmButtonText: '重新连接',
type: 'warning', cancelButtonText: '关闭',
} type: 'warning',
).then(() => { }
connect() ).then(() => {
}).catch(() => { connect()
ElMessage({ }).catch(() => {
type: 'info', ElMessage({
message: '连接已关闭', type: 'info',
message: '连接已关闭',
})
}) })
}) }
}); });
} }

View File

@ -576,24 +576,26 @@ const connect = () => {
}); });
_socket.addEventListener('close', () => { _socket.addEventListener('close', () => {
ElMessageBox.confirm( if (socket.value !== null) {
'检测到您已经在其他客户端创建了新的连接,当前连接将被关闭!', ElMessageBox.confirm(
'提示', '检测到您已经在其他客户端创建了新的连接,当前连接将被关闭!',
{ '提示',
dangerouslyUseHTMLString: true, {
confirmButtonText: '重新连接', dangerouslyUseHTMLString: true,
cancelButtonText: '关闭', confirmButtonText: '重新连接',
type: 'warning', cancelButtonText: '关闭',
} type: 'warning',
).then(() => { }
connect() ).then(() => {
}).catch(() => { connect()
ElMessage({ }).catch(() => {
type: 'info', ElMessage({
message: '连接已关闭', type: 'info',
message: '连接已关闭',
})
}) })
}) }
}); })
} }
const clipboard = ref(null) const clipboard = ref(null)

View File

@ -23,7 +23,7 @@
请选择生成思维导图的AI模型 请选择生成思维导图的AI模型
</div> </div>
<div class="param-line"> <div class="param-line">
<el-select v-model="modelID" placeholder="请选择模型"> <el-select v-model="modelID" placeholder="请选择模型" @change="changeModel">
<el-option <el-option
v-for="item in models" v-for="item in models"
:key="item.id" :key="item.id"
@ -40,11 +40,13 @@
</div> </div>
<div class="text-info"> <div class="text-info">
<el-tag type="success">当前可用算力{{ power }}</el-tag> <el-tag type="success">当前可用算力{{ loginUser.power }}</el-tag>
</div> </div>
<div class="param-line"> <div class="param-line">
<el-button color="#47fff1" :dark="false" round @click="generateAI">智能生成思维导图</el-button> <el-button color="#47fff1" :dark="false" round @click="generateAI" :loading="loading">
智能生成思维导图
</el-button>
</div> </div>
<div class="param-line"> <div class="param-line">
@ -52,7 +54,7 @@
</div> </div>
<div class="param-line"> <div class="param-line">
<el-input <el-input
v-model="text" v-model="content"
:autosize="{ minRows: 4, maxRows: 6 }" :autosize="{ minRows: 4, maxRows: 6 }"
type="textarea" type="textarea"
placeholder="请用markdown语法输入您想要生成思维导图的内容" placeholder="请用markdown语法输入您想要生成思维导图的内容"
@ -60,7 +62,7 @@
</div> </div>
<div class="param-line"> <div class="param-line">
<el-button color="#47fff1" :dark="false" round @click="generate">直接生成免费</el-button> <el-button color="#C5F9AE" :dark="false" round @click="generate">直接生成免费</el-button>
</div> </div>
</el-form> </el-form>
@ -69,7 +71,10 @@
<div class="right-box"> <div class="right-box">
<h2>思维导图</h2> <h2>思维导图</h2>
<div class="body"> <div class="markdown" v-if="loading">
<div v-html="html"></div>
</div>
<div class="body" v-show="!loading">
<svg ref="svgRef" :style="{ height: rightBoxHeight + 'px' }"/> <svg ref="svgRef" :style="{ height: rightBoxHeight + 'px' }"/>
</div> </div>
</div><!-- end task list box --> </div><!-- end task list box -->
@ -83,13 +88,13 @@
<script setup> <script setup>
import LoginDialog from "@/components/LoginDialog.vue"; import LoginDialog from "@/components/LoginDialog.vue";
import {onMounted, onUpdated, ref} from 'vue'; import {nextTick, onMounted, onUnmounted, ref} from 'vue';
import {Markmap} from 'markmap-view'; import {Markmap} from 'markmap-view';
import {loadCSS, loadJS} from 'markmap-common'; import {loadCSS, loadJS} from 'markmap-common';
import {Transformer} from 'markmap-lib'; import {Transformer} from 'markmap-lib';
import {checkSession} from "@/action/session"; import {checkSession} from "@/action/session";
import {httpGet} from "@/utils/http"; import {httpGet} from "@/utils/http";
import {ElMessage} from "element-plus"; import {ElMessage, ElMessageBox} from "element-plus";
const leftBoxHeight = ref(window.innerHeight - 105) const leftBoxHeight = ref(window.innerHeight - 105)
const rightBoxHeight = ref(window.innerHeight - 85) const rightBoxHeight = ref(window.innerHeight - 85)
@ -106,9 +111,13 @@ const text = ref(`# Geek-AI 助手
* 已集成支付宝支付功能微信支付支持多种会员套餐和点卡购买功能 * 已集成支付宝支付功能微信支付支持多种会员套餐和点卡购买功能
* 集成插件 API 功能可结合大语言模型的 function 功能开发各种强大的插件 * 集成插件 API 功能可结合大语言模型的 function 功能开发各种强大的插件
`) `)
const md = require('markdown-it')({breaks: true});
const content = ref(text.value)
const html = ref("")
const showLoginDialog = ref(false) const showLoginDialog = ref(false)
const isLogin = ref(false) const isLogin = ref(false)
const power = ref(0) const loginUser = ref({power: 0})
const transformer = new Transformer(); const transformer = new Transformer();
const {scripts, styles} = transformer.getAssets() const {scripts, styles} = transformer.getAssets()
loadCSS(styles); loadCSS(styles);
@ -119,6 +128,7 @@ const svgRef = ref(null)
const markMap = ref(null) const markMap = ref(null)
const models = ref([]) const models = ref([])
const modelID = ref(0) const modelID = ref(0)
const loading = ref(false)
onMounted(() => { onMounted(() => {
initData() initData()
@ -128,45 +138,165 @@ onMounted(() => {
const initData = () => { const initData = () => {
checkSession().then(user => { checkSession().then(user => {
power.value = user['power'] loginUser.value = user
isLogin.value = true isLogin.value = true
httpGet("/api/model/list").then(res => {
for (let v of res.data) {
if (v.platform === "OpenAI") {
models.value.push(v)
}
}
modelID.value = models.value[0].id
connect(user.id)
}).catch(e => {
ElMessage.error("获取模型失败:" + e.message)
})
}).catch(() => { }).catch(() => {
}); });
httpGet("/api/model/list").then(res => {
for (let v of res.data) {
if (v.platform === "OpenAI") {
models.value.push(v)
}
}
modelID.value = models.value[0].id
}).catch(e => {
ElMessage.error("获取模型失败:" + e.message)
})
} }
const update = () => { const update = () => {
const {root} = transformer.transform(text.value)
const {root} = transformer.transform(processContent(text.value))
markMap.value.setData(root) markMap.value.setData(root)
markMap.value.fit() markMap.value.fit()
} }
onUpdated(update) const processContent = (text) => {
const arr = []
const lines = text.split("\n")
for (let line of lines) {
if (line.indexOf("```") !== -1) {
continue
}
arr.push(line)
}
return arr.join("\n")
}
onUnmounted(() => {
if (socket.value !== null) {
socket.value.close()
}
socket.value = null
})
window.onresize = () => { window.onresize = () => {
leftBoxHeight.value = window.innerHeight - 145 leftBoxHeight.value = window.innerHeight - 145
rightBoxHeight.value = window.innerHeight - 85 rightBoxHeight.value = window.innerHeight - 85
} }
const socket = ref(null)
const heartbeatHandle = ref(null)
const connect = (userId) => {
if (socket.value !== null) {
socket.value.close()
}
let host = process.env.VUE_APP_WS_HOST
if (host === '') {
if (location.protocol === 'https:') {
host = 'wss://' + location.host;
} else {
host = 'ws://' + location.host;
}
}
//
const sendHeartbeat = () => {
clearTimeout(heartbeatHandle.value)
new Promise((resolve, reject) => {
if (socket.value !== null) {
socket.value.send(JSON.stringify({type: "heartbeat", content: "ping"}))
}
resolve("success")
}).then(() => {
heartbeatHandle.value = setTimeout(() => sendHeartbeat(), 5000)
});
}
const _socket = new WebSocket(host + `/api/markMap/client?user_id=${userId}&model_id=${modelID.value}`);
_socket.addEventListener('open', () => {
socket.value = _socket;
//
sendHeartbeat()
});
_socket.addEventListener('message', event => {
if (event.data instanceof Blob) {
const reader = new FileReader();
reader.readAsText(event.data, "UTF-8")
reader.onload = () => {
const data = JSON.parse(String(reader.result))
switch (data.type) {
case "start":
text.value = ""
break
case "middle":
text.value += data.content
html.value = md.render(processContent(text.value))
break
case "end":
loading.value = false
nextTick(() => update())
break
case "error":
loading.value = false
ElMessage.error(data.content)
break
}
}
}
})
_socket.addEventListener('close', () => {
loading.value = false
if (socket.value !== null) {
ElMessageBox.confirm(
'检测到您已经在其他客户端创建了新的连接,当前连接将被关闭!',
'提示',
{
dangerouslyUseHTMLString: true,
confirmButtonText: '重新连接',
cancelButtonText: '关闭',
type: 'warning',
}
).then(() => {
connect(userId)
}).catch(() => {
ElMessage({
type: 'info',
message: '连接已关闭',
})
})
}
});
}
const generate = () => { const generate = () => {
text.value = content.value
update() update()
} }
// 使 AI // 使 AI
const generateAI = () => { const generateAI = () => {
if (prompt.value === '') {
return ElMessage.error("请输入你的需求")
}
if (!isLogin.value) { if (!isLogin.value) {
showLoginDialog.value = true showLoginDialog.value = true
return return
} }
loading.value = true
socket.value.send(JSON.stringify({type: "message", content: prompt.value}))
}
const changeModel = () => {
if (socket.value !== null) {
socket.value.send(JSON.stringify({type: "model_id", content: modelID.value}))
}
} }
</script> </script>