实现聊天光标闪烁,实现聊天会话上下文

This commit is contained in:
RockYang 2023-03-19 00:31:35 +08:00
parent 59782e9e57
commit e2c3f50c8a
8 changed files with 146 additions and 92 deletions

10
README.md Normal file
View File

@ -0,0 +1,10 @@
# Wechat-GPT
基于 ChatGPT 的聊天应用
## TODOLIST
* [ ] 使用 level DB 保存用户聊天的上下文
* [ ] 使用 MySQL 保存用户的聊天的历史记录
* [ ] 用户聊天鉴权

View File

@ -12,6 +12,7 @@ import (
"net/http" "net/http"
"net/url" "net/url"
"openai/types" "openai/types"
"strings"
"time" "time"
) )
@ -52,8 +53,9 @@ func (s *Server) sendMessage(userId string, text string, ws Client) error {
Stream: true, Stream: true,
} }
var history []types.Message var history []types.Message
if v, ok := s.History[userId]; ok { if v, ok := s.History[userId]; ok && s.Config.Chat.EnableContext {
history = v history = v
//logger.Infof("上下文历史消息:%+v", history)
} else { } else {
history = make([]types.Message, 0) history = make([]types.Message, 0)
} }
@ -62,22 +64,22 @@ func (s *Server) sendMessage(userId string, text string, ws Client) error {
Content: text, Content: text,
}) })
logger.Info("上下文历史消息:%+v", s.History[userId])
requestBody, err := json.Marshal(r) requestBody, err := json.Marshal(r)
if err != nil { if err != nil {
return err return err
} }
request, err := http.NewRequest(http.MethodPost, s.Config.OpenAi.ApiURL, bytes.NewBuffer(requestBody)) request, err := http.NewRequest(http.MethodPost, s.Config.Chat.ApiURL, bytes.NewBuffer(requestBody))
if err != nil { if err != nil {
return err return err
} }
// TODO: API KEY 负载均衡 // TODO: API KEY 负载均衡
rand.Seed(time.Now().UnixNano()) rand.Seed(time.Now().UnixNano())
index := rand.Intn(len(s.Config.OpenAi.ApiKeys)) index := rand.Intn(len(s.Config.Chat.ApiKeys))
logger.Infof("Use API KEY: %s", s.Config.Chat.ApiKeys[index])
request.Header.Add("Content-Type", "application/json") request.Header.Add("Content-Type", "application/json")
request.Header.Add("Authorization", fmt.Sprintf("Bearer %s", s.Config.OpenAi.ApiKeys[index])) request.Header.Add("Authorization", fmt.Sprintf("Bearer %s", s.Config.Chat.ApiKeys[index]))
uri := url.URL{} uri := url.URL{}
proxy, _ := uri.Parse(s.Config.ProxyURL) proxy, _ := uri.Parse(s.Config.ProxyURL)
@ -104,11 +106,12 @@ func (s *Server) sendMessage(userId string, text string, ws Client) error {
for { for {
line, err := reader.ReadString('\n') line, err := reader.ReadString('\n')
if err != nil && err != io.EOF { if err != nil && err != io.EOF {
fmt.Println(err) logger.Error(err)
break break
} }
if line == "" { if line == "" {
replyMessage(types.WsMessage{Type: types.WsEnd}, ws)
break break
} else if len(line) < 20 { } else if len(line) < 20 {
continue continue
@ -116,29 +119,47 @@ func (s *Server) sendMessage(userId string, text string, ws Client) error {
err = json.Unmarshal([]byte(line[6:]), &responseBody) err = json.Unmarshal([]byte(line[6:]), &responseBody)
if err != nil { if err != nil {
fmt.Println(err) logger.Error(err)
continue continue
} }
// 初始化 role // 初始化 role
if responseBody.Choices[0].Delta.Role != "" && message.Role == "" { if responseBody.Choices[0].Delta.Role != "" && message.Role == "" {
message.Role = responseBody.Choices[0].Delta.Role message.Role = responseBody.Choices[0].Delta.Role
replyMessage(types.WsMessage{Type: types.WsStart}, ws)
continue continue
} else { } else if responseBody.Choices[0].FinishReason != "" { // 输出完成或者输出中断了
contents = append(contents, responseBody.Choices[0].Delta.Content) replyMessage(types.WsMessage{Type: types.WsEnd}, ws)
}
// 推送消息到客户端
err = ws.(*WsClient).Send([]byte(responseBody.Choices[0].Delta.Content))
if err != nil {
logger.Error(err)
}
fmt.Print(responseBody.Choices[0].Delta.Content)
if responseBody.Choices[0].FinishReason != "" {
break break
} else {
content := responseBody.Choices[0].Delta.Content
contents = append(contents, content)
replyMessage(types.WsMessage{
Type: types.WsMiddle,
Content: responseBody.Choices[0].Delta.Content,
}, ws)
} }
} }
// 追加历史消息 // 追加历史消息
history = append(history, types.Message{
Role: "user",
Content: text,
})
message.Content = strings.Join(contents, "")
history = append(history, message) history = append(history, message)
s.History[userId] = history s.History[userId] = history
return nil return nil
} }
// 回复客户端消息
func replyMessage(message types.WsMessage, client Client) {
msg, err := json.Marshal(message)
if err != nil {
logger.Errorf("Error for decoding json data: %v", err.Error())
return
}
err = client.(*WsClient).Send(msg)
if err != nil {
logger.Errorf("Error for reply message: %v", err.Error())
}
}

View File

@ -19,7 +19,7 @@ func (s *Server) ConfigSetHandle(c *gin.Context) {
} }
// API key // API key
if key, ok := data["api_key"]; ok && len(key) > 20 { if key, ok := data["api_key"]; ok && len(key) > 20 {
s.Config.OpenAi.ApiKeys = append(s.Config.OpenAi.ApiKeys, key) s.Config.Chat.ApiKeys = append(s.Config.Chat.ApiKeys, key)
} }
// proxy URL // proxy URL
@ -29,7 +29,7 @@ func (s *Server) ConfigSetHandle(c *gin.Context) {
// Model // Model
if model, ok := data["model"]; ok { if model, ok := data["model"]; ok {
s.Config.OpenAi.Model = model s.Config.Chat.Model = model
} }
// Temperature // Temperature
@ -42,7 +42,7 @@ func (s *Server) ConfigSetHandle(c *gin.Context) {
}) })
return return
} }
s.Config.OpenAi.Temperature = float32(v) s.Config.Chat.Temperature = float32(v)
} }
// max_tokens // max_tokens
@ -55,8 +55,20 @@ func (s *Server) ConfigSetHandle(c *gin.Context) {
}) })
return return
} }
s.Config.OpenAi.MaxTokens = v s.Config.Chat.MaxTokens = v
}
// enable Context
if enableContext, ok := data["enable_context"]; ok {
v, err := strconv.ParseBool(enableContext)
if err != nil {
c.JSON(http.StatusOK, types.BizVo{
Code: types.InvalidParams,
Message: "enable_context must be a bool parameter",
})
return
}
s.Config.Chat.EnableContext = v
} }
// 保存配置文件 // 保存配置文件

View File

@ -2,9 +2,9 @@ package types
import ( import (
"bytes" "bytes"
"fmt"
"github.com/BurntSushi/toml" "github.com/BurntSushi/toml"
"net/http" "net/http"
logger2 "openai/logger"
"openai/utils" "openai/utils"
"os" "os"
) )
@ -13,16 +13,17 @@ type Config struct {
Listen string Listen string
Session Session Session Session
ProxyURL string ProxyURL string
OpenAi OpenAi Chat Chat
} }
// OpenAi configs struct // Chat configs struct
type OpenAi struct { type Chat struct {
ApiURL string ApiURL string
ApiKeys []string ApiKeys []string
Model string Model string
Temperature float32 Temperature float32
MaxTokens int MaxTokens int
EnableContext bool // 是否保持聊天上下文
} }
// Session configs struct // Session configs struct
@ -51,21 +52,24 @@ func NewDefaultConfig() *Config {
HttpOnly: false, HttpOnly: false,
SameSite: http.SameSiteLaxMode, SameSite: http.SameSiteLaxMode,
}, },
OpenAi: OpenAi{ Chat: Chat{
ApiURL: "https://api.openai.com/v1/chat/completions", ApiURL: "https://api.openai.com/v1/chat/completions",
ApiKeys: []string{""}, ApiKeys: []string{""},
Model: "gpt-3.5-turbo", Model: "gpt-3.5-turbo",
MaxTokens: 1024, MaxTokens: 1024,
Temperature: 1.0, Temperature: 1.0,
EnableContext: true,
}, },
} }
} }
var logger = logger2.GetLogger()
func LoadConfig(configFile string) (*Config, error) { func LoadConfig(configFile string) (*Config, error) {
var config *Config var config *Config
_, err := os.Stat(configFile) _, err := os.Stat(configFile)
if err != nil { if err != nil {
fmt.Errorf("Error: %s", err.Error()) logger.Errorf("Error open config file: %s", err.Error())
config = NewDefaultConfig() config = NewDefaultConfig()
// save config // save config
err := SaveConfig(config, configFile) err := SaveConfig(config, configFile)
@ -76,7 +80,6 @@ func LoadConfig(configFile string) (*Config, error) {
return config, nil return config, nil
} }
_, err = toml.DecodeFile(configFile, &config) _, err = toml.DecodeFile(configFile, &config)
fmt.Println(config)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -10,11 +10,18 @@ type BizVo struct {
Data interface{} `json:"data,omitempty"` Data interface{} `json:"data,omitempty"`
} }
// WsVo Websocket 信息 VO // WsMessage Websocket message
type WsVo struct { type WsMessage struct {
Stop bool Type WsMsgType `json:"type"` // 消息类别start, end
Content string Content string `json:"content"`
} }
type WsMsgType string
const (
WsStart = WsMsgType("start")
WsMiddle = WsMsgType("middle")
WsEnd = WsMsgType("end")
)
type BizCode int type BizCode int

View File

@ -1,21 +0,0 @@
# qark-webssh
## Project setup
```
npm install
```
### Compiles and hot-reloads for development
```
npm run serve
```
### Compiles and minifies for production
```
npm run build
```
### Lints and fixes files
```
npm run lint
```

View File

@ -6,7 +6,10 @@
<div class="chat-item"> <div class="chat-item">
<div class="triangle"></div> <div class="triangle"></div>
<div class="content">{{ content }}</div> <div class="content">
<span v-html="content"></span>
<span class="cursor" v-show="cursor"></span>
</div>
</div> </div>
</div> </div>
</template> </template>
@ -24,6 +27,10 @@ export default defineComponent({
icon: { icon: {
type: String, type: String,
default: 'images/gpt-icon.png', default: 'images/gpt-icon.png',
},
cursor: {
type: Boolean,
default: true
} }
}, },
data() { data() {
@ -68,6 +75,21 @@ export default defineComponent({
background-color: #fff; background-color: #fff;
font-size: var(--content-font-size); font-size: var(--content-font-size);
border-radius: 5px; border-radius: 5px;
.cursor {
height 24px;
border-left 1px solid black;
animation: cursorImg 1s infinite steps(1, start);
@keyframes cursorImg {
0%, 100% {
opacity: 0;
}
50% {
opacity: 1;
}
}
}
} }
} }
} }

View File

@ -9,6 +9,7 @@
:content="chat.content"/> :content="chat.content"/>
<chat-reply v-else-if="chat.type==='reply'" <chat-reply v-else-if="chat.type==='reply'"
:icon="chat.icon" :icon="chat.icon"
:cursor="chat.cursor"
:content="chat.content"/> :content="chat.content"/>
</div> </div>
@ -42,6 +43,7 @@ import {defineComponent, nextTick} from 'vue'
import ChatPrompt from "@/components/ChatPrompt.vue"; import ChatPrompt from "@/components/ChatPrompt.vue";
import ChatReply from "@/components/ChatReply.vue"; import ChatReply from "@/components/ChatReply.vue";
import {randString} from "@/utils/libs"; import {randString} from "@/utils/libs";
import {ElMessage} from 'element-plus'
export default defineComponent({ export default defineComponent({
name: "XChat", name: "XChat",
@ -49,20 +51,7 @@ export default defineComponent({
data() { data() {
return { return {
title: "ChatGPT 控制台", title: "ChatGPT 控制台",
chatData: [ chatData: [],
{
id: "1",
type: 'prompt',
icon: 'images/user-icon.png',
content: '请问棒球棒可以放进人的耳朵里面吗'
},
{
id: "2",
type: 'reply',
icon: 'images/gpt-icon.png',
content: '不可以。棒球棒的直径通常都比人的耳道大得多,而且人的耳朵是非常敏感和易受伤的,如果硬塞棒球棒可能会导致耳道损伤、出血和疼痛等问题。此外,塞入耳道的物体还可能引起耳屎的囤积和感染等问题,因此强烈建议不要将任何非耳朵医学用品的物品插入耳朵。如果您有耳道不适或者其他耳朵健康问题,应该咨询专业医生的建议。'
}
],
inputBoxHeight: 63, inputBoxHeight: 63,
inputBoxWidth: 0, inputBoxWidth: 0,
inputValue: '', inputValue: '',
@ -99,24 +88,34 @@ export default defineComponent({
window.addEventListener('resize', this.windowResize); window.addEventListener('resize', this.windowResize);
// WebSocket // WebSocket
const socket = new WebSocket(process.env.VUE_APP_WS_HOST+'/api/chat'); const socket = new WebSocket(process.env.VUE_APP_WS_HOST + '/api/chat');
socket.addEventListener('open', () => { socket.addEventListener('open', () => {
console.log('WebSocket 连接已打开'); ElMessage.success('创建会话成功!');
}); });
socket.addEventListener('message', event => { socket.addEventListener('message', event => {
if (event.data instanceof Blob) { if (event.data instanceof Blob) {
const reader = new FileReader(); const reader = new FileReader();
reader.readAsText(event.data, "UTF-8"); reader.readAsText(event.data, "UTF-8");
reader.onload = () => { reader.onload = () => {
// this.chatData.push({ const data = JSON.parse(String(reader.result));
// type: "reply", if (data.type === 'start') {
// id: randString(32), this.chatData.push({
// icon: 'images/gpt-icon.png', type: "reply",
// content: reader.result id: randString(32),
// }); icon: 'images/gpt-icon.png',
this.chatData[this.chatData.length - 1]["content"] += reader.result content: "",
this.sending = false; cursor: true
});
} else if (data.type === 'end') {
this.sending = false;
this.chatData[this.chatData.length - 1]["cursor"] = false;
} else {
let content = data.content;
if (content.indexOf("\n\n") >= 0) {
content = content.replace("\n\n", "<br />");
}
this.chatData[this.chatData.length - 1]["content"] += content;
}
// //
nextTick(() => { nextTick(() => {
document.getElementById('container').scrollTo(0, document.getElementById('container').scrollHeight) document.getElementById('container').scrollTo(0, document.getElementById('container').scrollHeight)
@ -125,11 +124,11 @@ export default defineComponent({
} }
}); });
socket.addEventListener('close', event => { socket.addEventListener('close', () => {
console.log('WebSocket 连接已关闭', event.reason); ElMessage.error('会话发生异常,请刷新页面后重试');
}); });
socket.addEventListener('error', event => { socket.addEventListener('error', event => {
console.error('WebSocket 连接发生错误', event); ElMessage.error('WebSocket 连接发生错误: ' + event.message);
}); });
this.socket = socket; this.socket = socket;
@ -240,6 +239,7 @@ export default defineComponent({
#container { #container {
overflow auto; overflow auto;
width 100%;
.chat-box { .chat-box {
// //