feat: auto translate image creating prompt

This commit is contained in:
RockYang
2023-12-19 18:54:19 +08:00
parent 3d37087916
commit bf19120c27
11 changed files with 446 additions and 99 deletions

View File

@@ -132,11 +132,13 @@ func (h *ChatHandler) sendOpenAiMessage(
utils.ReplyMessage(ws, ErrImg)
} else {
f := h.App.Functions[functionName]
// translate prompt
if functionName == types.FuncImage {
params["user_id"] = userVo.Id
params["role_id"] = role.Id
params["chat_id"] = session.ChatId
params["session_id"] = session.SessionId
const translatePromptTemplate = "Translate the following painting prompt words into English keyword phrases. Without any explanation, directly output the keyword phrases separated by commas. The content to be translated is: [%s]"
r, err := utils.OpenAIRequest(fmt.Sprintf(translatePromptTemplate, params["prompt"]), apiKey, h.App.Config.ProxyURL, chatConfig.OpenAI.ApiURL)
if err == nil {
params["prompt"] = r
}
}
data, err := f.Invoke(params)
if err != nil {

View File

@@ -4,11 +4,10 @@ import (
"chatplus/core"
"chatplus/core/types"
"chatplus/store/model"
"chatplus/utils"
"chatplus/utils/resp"
"fmt"
"github.com/imroc/req/v3"
"github.com/gin-gonic/gin"
"gorm.io/gorm"
)
@@ -27,27 +26,6 @@ func NewPromptHandler(app *core.AppServer, db *gorm.DB) *PromptHandler {
return h
}
type apiRes struct {
Model string `json:"model"`
Choices []struct {
Index int `json:"index"`
Message struct {
Role string `json:"role"`
Content string `json:"content"`
} `json:"message"`
FinishReason string `json:"finish_reason"`
} `json:"choices"`
}
type apiErrRes struct {
Error struct {
Code interface{} `json:"code"`
Message string `json:"message"`
Param interface{} `json:"param"`
Type string `json:"type"`
} `json:"error"`
}
// Rewrite translate and rewrite prompt with ChatGPT
func (h *PromptHandler) Rewrite(c *gin.Context) {
var data struct {
@@ -93,28 +71,5 @@ func (h *PromptHandler) request(prompt string, promptTemplate string) (string, e
return "", fmt.Errorf("error with fetch OpenAI API KEY%v", res.Error)
}
messages := make([]interface{}, 1)
messages[0] = types.Message{
Role: "user",
Content: fmt.Sprintf(promptTemplate, prompt),
}
var response apiRes
var errRes apiErrRes
r, err := req.C().SetProxyURL(h.App.Config.ProxyURL).R().SetHeader("Content-Type", "application/json").
SetHeader("Authorization", "Bearer "+apiKey.Value).
SetBody(types.ApiRequest{
Model: "gpt-3.5-turbo",
Temperature: 0.9,
MaxTokens: 1024,
Stream: false,
Messages: messages,
}).
SetErrorResult(&errRes).
SetSuccessResult(&response).Post(h.App.ChatConfig.OpenAI.ApiURL)
if err != nil || r.IsErrorState() {
return "", fmt.Errorf("error with http request: %v%v%s", err, r.Err, errRes.Error.Message)
}
return response.Choices[0].Message.Content, nil
return utils.OpenAIRequest(fmt.Sprintf(promptTemplate, prompt), apiKey.Value, h.App.Config.ProxyURL, h.App.ChatConfig.OpenAI.ApiURL)
}