mirror of
https://github.com/yangjian102621/geekai.git
synced 2025-09-17 16:56:38 +08:00
fixed: go-api => 增加全局错误处理 handler,修复业务处理异常导致服务退出的 Bug
This commit is contained in:
parent
eadb9a733f
commit
d20cc367b8
@ -162,6 +162,10 @@ docker-compose up -d
|
||||
* 后台管理地址:http://localhost:8080/admin
|
||||
* 移动端地址:http://localhost:8080/mobile
|
||||
|
||||
> 注意:你得访问后台管理系统 http://localhost:8080/admin
|
||||
> 输入你前面配置文档中设置的管理员用户名和密码登录。
|
||||
> 然后进入 `API KEY 管理` 菜单,添加一个 OpenAI 的 API KEY 才可以正常开启 AI 对话。
|
||||
|
||||
## 手动安装部署
|
||||
|
||||
由于本项目采用的是前后端分离的开发方式,所以部署也需要前后端分开部署。我这里以 linux 系统为例,演示一下部署过程:
|
||||
@ -254,9 +258,7 @@ server {
|
||||
}
|
||||
```
|
||||
|
||||
配置好之后重启 Nginx,然后访问后台管理系统 [http://www.chatgpt.com/admin](http://www.chatgpt.com/admin),
|
||||
输入你前面配置文档中设置的管理员用户名和密码登录。
|
||||
然后进入 `API KEY 管理` 菜单,添加一个 OpenAI 的 API KEY 即可。
|
||||
配置好之后重启 Nginx,然后 []
|
||||
|
||||

|
||||
|
||||
|
@ -72,7 +72,7 @@ func (s *AppServer) Run(db *gorm.DB) error {
|
||||
func errorHandler(c *gin.Context) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
logger.Error("panic: %v\n", r)
|
||||
logger.Error("Handler Panic: %v\n", r)
|
||||
debug.PrintStack()
|
||||
c.JSON(http.StatusOK, types.BizVo{Code: types.Failed, Message: types.ErrorMsg})
|
||||
c.Abort()
|
||||
|
@ -25,7 +25,7 @@ import (
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
const ErrorMsg = "抱歉,AI 助手开小差了,请马上联系管理员去盘它。"
|
||||
const ErrorMsg = "抱歉,AI 助手开小差了,请稍后再试。"
|
||||
|
||||
type ChatHandler struct {
|
||||
BaseHandler
|
||||
@ -218,7 +218,7 @@ func (h *ChatHandler) sendMessage(ctx context.Context, session types.ChatSession
|
||||
}
|
||||
|
||||
err = json.Unmarshal([]byte(line[6:]), &responseBody)
|
||||
if err != nil { // 数据解析出错
|
||||
if err != nil || len(responseBody.Choices) == 0 { // 数据解析出错
|
||||
logger.Error(err, line)
|
||||
replyMessage(ws, ErrorMsg)
|
||||
replyMessage(ws, "")
|
||||
|
@ -48,6 +48,12 @@ func (l *AppLifecycle) OnStop(context.Context) error {
|
||||
|
||||
func main() {
|
||||
logger.Info("Loading config file: ", configFile)
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
logger.Error("Panic Error:", err)
|
||||
}
|
||||
}()
|
||||
|
||||
app := fx.New(
|
||||
// 初始化配置应用配置
|
||||
fx.Provide(func() *types.AppConfig {
|
||||
|
@ -12,21 +12,19 @@ npm run build
|
||||
cd ../docker
|
||||
|
||||
# remove docker image if exists
|
||||
docker rmi registry.cn-hangzhou.aliyuncs.com/geekmaster/chatgpt-plus-go:$version
|
||||
docker rmi chatgpt-plus-go:$version
|
||||
docker rmi -f registry.cn-hangzhou.aliyuncs.com/geekmaster/chatgpt-plus-go:$version
|
||||
docker rmi -f chatgpt-plus-go:$version
|
||||
# build docker image for chatgpt-plus-go
|
||||
docker build -t chatgpt-plus-go:$version -f dockerfile-api-go ../
|
||||
|
||||
# build docker image for chatgpt-plus-vue
|
||||
docker rmi registry.cn-hangzhou.aliyuncs.com/geekmaster/chatgpt-plus-vue:$version
|
||||
docker rmi chatgpt-plus-vue:$version
|
||||
docker rmi -f registry.cn-hangzhou.aliyuncs.com/geekmaster/chatgpt-plus-vue:$version
|
||||
docker rmi -f chatgpt-plus-vue:$version
|
||||
docker build -t chatgpt-plus-vue:$version -f dockerfile-vue ../
|
||||
|
||||
# add tag for aliyum docker registry
|
||||
goImageId=`docker images |grep chatgpt-plus-go |grep $version |awk '{print $3}'`
|
||||
docker tag $goImageId registry.cn-hangzhou.aliyuncs.com/geekmaster/chatgpt-plus-go:$version
|
||||
echo $goImageId
|
||||
vueImageId=`docker images |grep chatgpt-plus-vue |grep $version |awk '{print $3}'`
|
||||
echo $vueImageId
|
||||
docker tag $vueImageId registry.cn-hangzhou.aliyuncs.com/geekmaster/chatgpt-plus-vue:$version
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user