mirror of
https://github.com/linux-do/new-api.git
synced 2025-09-17 16:06:38 +08:00
Merge remote-tracking branch 'upstream/main'
This commit is contained in:
commit
77ea6bec46
@ -77,6 +77,7 @@ var LogConsumeEnabled = true
|
|||||||
|
|
||||||
var SMTPServer = ""
|
var SMTPServer = ""
|
||||||
var SMTPPort = 587
|
var SMTPPort = 587
|
||||||
|
var SMTPSSLEnabled = false
|
||||||
var SMTPAccount = ""
|
var SMTPAccount = ""
|
||||||
var SMTPFrom = ""
|
var SMTPFrom = ""
|
||||||
var SMTPToken = ""
|
var SMTPToken = ""
|
||||||
|
@ -24,7 +24,7 @@ func SendEmail(subject string, receiver string, content string) error {
|
|||||||
addr := fmt.Sprintf("%s:%d", SMTPServer, SMTPPort)
|
addr := fmt.Sprintf("%s:%d", SMTPServer, SMTPPort)
|
||||||
to := strings.Split(receiver, ";")
|
to := strings.Split(receiver, ";")
|
||||||
var err error
|
var err error
|
||||||
if SMTPPort == 465 {
|
if SMTPPort == 465 || SMTPSSLEnabled {
|
||||||
tlsConfig := &tls.Config{
|
tlsConfig := &tls.Config{
|
||||||
InsecureSkipVerify: true,
|
InsecureSkipVerify: true,
|
||||||
ServerName: SMTPServer,
|
ServerName: SMTPServer,
|
||||||
|
@ -4,7 +4,8 @@ import "strings"
|
|||||||
|
|
||||||
var CheckSensitiveEnabled = true
|
var CheckSensitiveEnabled = true
|
||||||
var CheckSensitiveOnPromptEnabled = true
|
var CheckSensitiveOnPromptEnabled = true
|
||||||
var CheckSensitiveOnCompletionEnabled = true
|
|
||||||
|
//var CheckSensitiveOnCompletionEnabled = true
|
||||||
|
|
||||||
// StopOnSensitiveEnabled 如果检测到敏感词,是否立刻停止生成,否则替换敏感词
|
// StopOnSensitiveEnabled 如果检测到敏感词,是否立刻停止生成,否则替换敏感词
|
||||||
var StopOnSensitiveEnabled = true
|
var StopOnSensitiveEnabled = true
|
||||||
@ -37,6 +38,6 @@ func ShouldCheckPromptSensitive() bool {
|
|||||||
return CheckSensitiveEnabled && CheckSensitiveOnPromptEnabled
|
return CheckSensitiveEnabled && CheckSensitiveOnPromptEnabled
|
||||||
}
|
}
|
||||||
|
|
||||||
func ShouldCheckCompletionSensitive() bool {
|
//func ShouldCheckCompletionSensitive() bool {
|
||||||
return CheckSensitiveEnabled && CheckSensitiveOnCompletionEnabled
|
// return CheckSensitiveEnabled && CheckSensitiveOnCompletionEnabled
|
||||||
}
|
//}
|
||||||
|
@ -87,7 +87,7 @@ func testChannel(channel *model.Channel, testModel string) (err error, openaiErr
|
|||||||
err := relaycommon.RelayErrorHandler(resp)
|
err := relaycommon.RelayErrorHandler(resp)
|
||||||
return fmt.Errorf("status code %d: %s", resp.StatusCode, err.Error.Message), &err.Error
|
return fmt.Errorf("status code %d: %s", resp.StatusCode, err.Error.Message), &err.Error
|
||||||
}
|
}
|
||||||
usage, respErr, _ := adaptor.DoResponse(c, resp, meta)
|
usage, respErr := adaptor.DoResponse(c, resp, meta)
|
||||||
if respErr != nil {
|
if respErr != nil {
|
||||||
return fmt.Errorf("%s", respErr.Error.Message), &respErr.Error
|
return fmt.Errorf("%s", respErr.Error.Message), &respErr.Error
|
||||||
}
|
}
|
||||||
|
@ -147,7 +147,7 @@ func UpdateMidjourneyTaskBulk() {
|
|||||||
task.Buttons = string(buttonStr)
|
task.Buttons = string(buttonStr)
|
||||||
}
|
}
|
||||||
|
|
||||||
if task.Progress != "100%" && responseItem.FailReason != "" {
|
if (task.Progress != "100%" && responseItem.FailReason != "") || (task.Progress == "100%" && task.Status == "FAILURE") {
|
||||||
common.LogInfo(ctx, task.MjId+" 构建失败,"+task.FailReason)
|
common.LogInfo(ctx, task.MjId+" 构建失败,"+task.FailReason)
|
||||||
task.Progress = "100%"
|
task.Progress = "100%"
|
||||||
err = model.CacheUpdateUserQuota(task.UserId)
|
err = model.CacheUpdateUserQuota(task.UserId)
|
||||||
|
@ -11,6 +11,12 @@ type TextResponseWithError struct {
|
|||||||
Error OpenAIError `json:"error"`
|
Error OpenAIError `json:"error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type SimpleResponse struct {
|
||||||
|
Usage `json:"usage"`
|
||||||
|
Error OpenAIError `json:"error"`
|
||||||
|
Choices []OpenAITextResponseChoice `json:"choices"`
|
||||||
|
}
|
||||||
|
|
||||||
type TextResponse struct {
|
type TextResponse struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
|
@ -44,7 +44,7 @@ func Distribute() func(c *gin.Context) {
|
|||||||
// Select a channel for the user
|
// Select a channel for the user
|
||||||
var modelRequest ModelRequest
|
var modelRequest ModelRequest
|
||||||
var err error
|
var err error
|
||||||
if strings.HasPrefix(c.Request.URL.Path, "/mj") {
|
if strings.Contains(c.Request.URL.Path, "/mj/") {
|
||||||
relayMode := relayconstant.Path2RelayModeMidjourney(c.Request.URL.Path)
|
relayMode := relayconstant.Path2RelayModeMidjourney(c.Request.URL.Path)
|
||||||
if relayMode == relayconstant.RelayModeMidjourneyTaskFetch ||
|
if relayMode == relayconstant.RelayModeMidjourneyTaskFetch ||
|
||||||
relayMode == relayconstant.RelayModeMidjourneyTaskFetchByCondition ||
|
relayMode == relayconstant.RelayModeMidjourneyTaskFetchByCondition ||
|
||||||
|
@ -52,6 +52,7 @@ func InitOptionMap() {
|
|||||||
common.OptionMap["SMTPPort"] = strconv.Itoa(common.SMTPPort)
|
common.OptionMap["SMTPPort"] = strconv.Itoa(common.SMTPPort)
|
||||||
common.OptionMap["SMTPAccount"] = ""
|
common.OptionMap["SMTPAccount"] = ""
|
||||||
common.OptionMap["SMTPToken"] = ""
|
common.OptionMap["SMTPToken"] = ""
|
||||||
|
common.OptionMap["SMTPSSLEnabled"] = strconv.FormatBool(common.SMTPSSLEnabled)
|
||||||
common.OptionMap["Notice"] = ""
|
common.OptionMap["Notice"] = ""
|
||||||
common.OptionMap["About"] = ""
|
common.OptionMap["About"] = ""
|
||||||
common.OptionMap["HomePageContent"] = ""
|
common.OptionMap["HomePageContent"] = ""
|
||||||
@ -97,7 +98,7 @@ func InitOptionMap() {
|
|||||||
common.OptionMap["MjNotifyEnabled"] = strconv.FormatBool(constant.MjNotifyEnabled)
|
common.OptionMap["MjNotifyEnabled"] = strconv.FormatBool(constant.MjNotifyEnabled)
|
||||||
common.OptionMap["CheckSensitiveEnabled"] = strconv.FormatBool(constant.CheckSensitiveEnabled)
|
common.OptionMap["CheckSensitiveEnabled"] = strconv.FormatBool(constant.CheckSensitiveEnabled)
|
||||||
common.OptionMap["CheckSensitiveOnPromptEnabled"] = strconv.FormatBool(constant.CheckSensitiveOnPromptEnabled)
|
common.OptionMap["CheckSensitiveOnPromptEnabled"] = strconv.FormatBool(constant.CheckSensitiveOnPromptEnabled)
|
||||||
common.OptionMap["CheckSensitiveOnCompletionEnabled"] = strconv.FormatBool(constant.CheckSensitiveOnCompletionEnabled)
|
//common.OptionMap["CheckSensitiveOnCompletionEnabled"] = strconv.FormatBool(constant.CheckSensitiveOnCompletionEnabled)
|
||||||
common.OptionMap["StopOnSensitiveEnabled"] = strconv.FormatBool(constant.StopOnSensitiveEnabled)
|
common.OptionMap["StopOnSensitiveEnabled"] = strconv.FormatBool(constant.StopOnSensitiveEnabled)
|
||||||
common.OptionMap["SensitiveWords"] = constant.SensitiveWordsToString()
|
common.OptionMap["SensitiveWords"] = constant.SensitiveWordsToString()
|
||||||
common.OptionMap["StreamCacheQueueLength"] = strconv.Itoa(constant.StreamCacheQueueLength)
|
common.OptionMap["StreamCacheQueueLength"] = strconv.Itoa(constant.StreamCacheQueueLength)
|
||||||
@ -204,10 +205,12 @@ func updateOptionMap(key string, value string) (err error) {
|
|||||||
constant.CheckSensitiveEnabled = boolValue
|
constant.CheckSensitiveEnabled = boolValue
|
||||||
case "CheckSensitiveOnPromptEnabled":
|
case "CheckSensitiveOnPromptEnabled":
|
||||||
constant.CheckSensitiveOnPromptEnabled = boolValue
|
constant.CheckSensitiveOnPromptEnabled = boolValue
|
||||||
case "CheckSensitiveOnCompletionEnabled":
|
//case "CheckSensitiveOnCompletionEnabled":
|
||||||
constant.CheckSensitiveOnCompletionEnabled = boolValue
|
// constant.CheckSensitiveOnCompletionEnabled = boolValue
|
||||||
case "StopOnSensitiveEnabled":
|
case "StopOnSensitiveEnabled":
|
||||||
constant.StopOnSensitiveEnabled = boolValue
|
constant.StopOnSensitiveEnabled = boolValue
|
||||||
|
case "SMTPSSLEnabled":
|
||||||
|
common.SMTPSSLEnabled = boolValue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
switch key {
|
switch key {
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -75,8 +76,26 @@ func GetAllUsers(startIdx int, num int) (users []*User, err error) {
|
|||||||
return users, err
|
return users, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func SearchUsers(keyword string) (users []*User, err error) {
|
func SearchUsers(keyword string) ([]*User, error) {
|
||||||
err = DB.Omit("password").Where("id = ? or username LIKE ? or email LIKE ? or display_name LIKE ?", keyword, keyword+"%", keyword+"%", keyword+"%").Find(&users).Error
|
var users []*User
|
||||||
|
var err error
|
||||||
|
|
||||||
|
// 尝试将关键字转换为整数ID
|
||||||
|
keywordInt, err := strconv.Atoi(keyword)
|
||||||
|
if err == nil {
|
||||||
|
// 如果转换成功,按照ID搜索用户
|
||||||
|
err = DB.Unscoped().Omit("password").Where("id = ?", keywordInt).Find(&users).Error
|
||||||
|
if err != nil || len(users) > 0 {
|
||||||
|
// 如果依据ID找到用户或者发生错误,返回结果或错误
|
||||||
|
return users, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 如果ID转换失败或者没有找到用户,依据其他字段进行模糊搜索
|
||||||
|
err = DB.Unscoped().Omit("password").
|
||||||
|
Where("username LIKE ? OR email LIKE ? OR display_name LIKE ?", keyword+"%", keyword+"%", keyword+"%").
|
||||||
|
Find(&users).Error
|
||||||
|
|
||||||
return users, err
|
return users, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ type Adaptor interface {
|
|||||||
SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error
|
SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error
|
||||||
ConvertRequest(c *gin.Context, relayMode int, request *dto.GeneralOpenAIRequest) (any, error)
|
ConvertRequest(c *gin.Context, relayMode int, request *dto.GeneralOpenAIRequest) (any, error)
|
||||||
DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error)
|
DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error)
|
||||||
DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse)
|
DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode)
|
||||||
GetModelList() []string
|
GetModelList() []string
|
||||||
GetChannelName() string
|
GetChannelName() string
|
||||||
}
|
}
|
||||||
|
@ -57,7 +57,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
err, usage = aliStreamHandler(c, resp)
|
err, usage = aliStreamHandler(c, resp)
|
||||||
} else {
|
} else {
|
||||||
|
@ -69,7 +69,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
err, usage = baiduStreamHandler(c, resp)
|
err, usage = baiduStreamHandler(c, resp)
|
||||||
} else {
|
} else {
|
||||||
|
@ -63,7 +63,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
err, usage = claudeStreamHandler(a.RequestMode, info.UpstreamModelName, info.PromptTokens, c, resp)
|
err, usage = claudeStreamHandler(a.RequestMode, info.UpstreamModelName, info.PromptTokens, c, resp)
|
||||||
} else {
|
} else {
|
||||||
|
@ -8,7 +8,6 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
"one-api/constant"
|
|
||||||
"one-api/dto"
|
"one-api/dto"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
"strings"
|
"strings"
|
||||||
@ -317,7 +316,7 @@ func claudeHandler(requestMode int, c *gin.Context, resp *http.Response, promptT
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
fullTextResponse := responseClaude2OpenAI(requestMode, &claudeResponse)
|
fullTextResponse := responseClaude2OpenAI(requestMode, &claudeResponse)
|
||||||
completionTokens, err, _ := service.CountTokenText(claudeResponse.Completion, model, constant.ShouldCheckCompletionSensitive())
|
completionTokens, err, _ := service.CountTokenText(claudeResponse.Completion, model, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "count_token_text_failed", http.StatusInternalServerError), nil
|
return service.OpenAIErrorWrapper(err, "count_token_text_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
var responseText string
|
var responseText string
|
||||||
err, responseText = geminiChatStreamHandler(c, resp)
|
err, responseText = geminiChatStreamHandler(c, resp)
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
"one-api/constant"
|
|
||||||
"one-api/dto"
|
"one-api/dto"
|
||||||
relaycommon "one-api/relay/common"
|
relaycommon "one-api/relay/common"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
@ -257,7 +256,7 @@ func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, mo
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse)
|
fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse)
|
||||||
completionTokens, _, _ := service.CountTokenText(geminiResponse.GetResponseText(), model, constant.ShouldCheckCompletionSensitive())
|
completionTokens, _, _ := service.CountTokenText(geminiResponse.GetResponseText(), model, false)
|
||||||
usage := dto.Usage{
|
usage := dto.Usage{
|
||||||
PromptTokens: promptTokens,
|
PromptTokens: promptTokens,
|
||||||
CompletionTokens: completionTokens,
|
CompletionTokens: completionTokens,
|
||||||
|
@ -49,16 +49,16 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
var responseText string
|
var responseText string
|
||||||
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
|
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
|
||||||
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
||||||
} else {
|
} else {
|
||||||
if info.RelayMode == relayconstant.RelayModeEmbeddings {
|
if info.RelayMode == relayconstant.RelayModeEmbeddings {
|
||||||
err, usage, sensitiveResp = ollamaEmbeddingHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
|
err, usage = ollamaEmbeddingHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
|
||||||
} else {
|
} else {
|
||||||
err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
|
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
|
@ -45,19 +45,19 @@ func requestOpenAI2Embeddings(request dto.GeneralOpenAIRequest) *OllamaEmbedding
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens int, model string, relayMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage, *dto.SensitiveResponse) {
|
func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens int, model string, relayMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
|
||||||
var ollamaEmbeddingResponse OllamaEmbeddingResponse
|
var ollamaEmbeddingResponse OllamaEmbeddingResponse
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil, nil
|
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
err = resp.Body.Close()
|
err = resp.Body.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil
|
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
err = json.Unmarshal(responseBody, &ollamaEmbeddingResponse)
|
err = json.Unmarshal(responseBody, &ollamaEmbeddingResponse)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil, nil
|
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
data := make([]dto.OpenAIEmbeddingResponseItem, 0, 1)
|
data := make([]dto.OpenAIEmbeddingResponseItem, 0, 1)
|
||||||
data = append(data, dto.OpenAIEmbeddingResponseItem{
|
data = append(data, dto.OpenAIEmbeddingResponseItem{
|
||||||
@ -77,7 +77,7 @@ func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens in
|
|||||||
}
|
}
|
||||||
doResponseBody, err := json.Marshal(embeddingResponse)
|
doResponseBody, err := json.Marshal(embeddingResponse)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil, nil
|
return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
resp.Body = io.NopCloser(bytes.NewBuffer(doResponseBody))
|
resp.Body = io.NopCloser(bytes.NewBuffer(doResponseBody))
|
||||||
// We shouldn't set the header before we parse the response body, because the parse part may fail.
|
// We shouldn't set the header before we parse the response body, because the parse part may fail.
|
||||||
@ -98,11 +98,11 @@ func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens in
|
|||||||
c.Writer.WriteHeader(resp.StatusCode)
|
c.Writer.WriteHeader(resp.StatusCode)
|
||||||
_, err = io.Copy(c.Writer, resp.Body)
|
_, err = io.Copy(c.Writer, resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil, nil
|
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
err = resp.Body.Close()
|
err = resp.Body.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil
|
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
return nil, usage, nil
|
return nil, usage
|
||||||
}
|
}
|
||||||
|
@ -69,13 +69,13 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
var responseText string
|
var responseText string
|
||||||
err, responseText = OpenaiStreamHandler(c, resp, info.RelayMode)
|
err, responseText = OpenaiStreamHandler(c, resp, info.RelayMode)
|
||||||
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
||||||
} else {
|
} else {
|
||||||
err, usage, sensitiveResp = OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
|
err, usage = OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -4,14 +4,10 @@ import (
|
|||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"io"
|
"io"
|
||||||
"log"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
"one-api/constant"
|
|
||||||
"one-api/dto"
|
"one-api/dto"
|
||||||
relayconstant "one-api/relay/constant"
|
relayconstant "one-api/relay/constant"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
@ -21,7 +17,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func OpenaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*dto.OpenAIErrorWithStatusCode, string) {
|
func OpenaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*dto.OpenAIErrorWithStatusCode, string) {
|
||||||
checkSensitive := constant.ShouldCheckCompletionSensitive()
|
//checkSensitive := constant.ShouldCheckCompletionSensitive()
|
||||||
var responseTextBuilder strings.Builder
|
var responseTextBuilder strings.Builder
|
||||||
scanner := bufio.NewScanner(resp.Body)
|
scanner := bufio.NewScanner(resp.Body)
|
||||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
@ -53,20 +49,11 @@ func OpenaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*d
|
|||||||
if data[:6] != "data: " && data[:6] != "[DONE]" {
|
if data[:6] != "data: " && data[:6] != "[DONE]" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
sensitive := false
|
|
||||||
if checkSensitive {
|
|
||||||
// check sensitive
|
|
||||||
sensitive, _, data = service.SensitiveWordReplace(data, false)
|
|
||||||
}
|
|
||||||
dataChan <- data
|
dataChan <- data
|
||||||
data = data[6:]
|
data = data[6:]
|
||||||
if !strings.HasPrefix(data, "[DONE]") {
|
if !strings.HasPrefix(data, "[DONE]") {
|
||||||
streamItems = append(streamItems, data)
|
streamItems = append(streamItems, data)
|
||||||
}
|
}
|
||||||
if sensitive && constant.StopOnSensitiveEnabled {
|
|
||||||
dataChan <- "data: [DONE]"
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
streamResp := "[" + strings.Join(streamItems, ",") + "]"
|
streamResp := "[" + strings.Join(streamItems, ",") + "]"
|
||||||
switch relayMode {
|
switch relayMode {
|
||||||
@ -142,118 +129,56 @@ func OpenaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*d
|
|||||||
return nil, responseTextBuilder.String()
|
return nil, responseTextBuilder.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model string, relayMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage, *dto.SensitiveResponse) {
|
func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
|
||||||
var responseWithError dto.TextResponseWithError
|
var simpleResponse dto.SimpleResponse
|
||||||
responseBody, err := io.ReadAll(resp.Body)
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil, nil
|
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
err = resp.Body.Close()
|
err = resp.Body.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil
|
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
err = json.Unmarshal(responseBody, &responseWithError)
|
err = json.Unmarshal(responseBody, &simpleResponse)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("unmarshal_response_body_failed: body: %s, err: %v", string(responseBody), err)
|
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil, nil
|
|
||||||
}
|
}
|
||||||
if responseWithError.Error.Type != "" {
|
if simpleResponse.Error.Type != "" {
|
||||||
return &dto.OpenAIErrorWithStatusCode{
|
return &dto.OpenAIErrorWithStatusCode{
|
||||||
Error: responseWithError.Error,
|
Error: simpleResponse.Error,
|
||||||
StatusCode: resp.StatusCode,
|
StatusCode: resp.StatusCode,
|
||||||
}, nil, nil
|
}, nil
|
||||||
|
}
|
||||||
|
// Reset response body
|
||||||
|
resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
|
||||||
|
// We shouldn't set the header before we parse the response body, because the parse part may fail.
|
||||||
|
// And then we will have to send an error response, but in this case, the header has already been set.
|
||||||
|
// So the httpClient will be confused by the response.
|
||||||
|
// For example, Postman will report error, and we cannot check the response at all.
|
||||||
|
for k, v := range resp.Header {
|
||||||
|
c.Writer.Header().Set(k, v[0])
|
||||||
|
}
|
||||||
|
c.Writer.WriteHeader(resp.StatusCode)
|
||||||
|
_, err = io.Copy(c.Writer, resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
checkSensitive := constant.ShouldCheckCompletionSensitive()
|
if simpleResponse.Usage.TotalTokens == 0 {
|
||||||
sensitiveWords := make([]string, 0)
|
completionTokens := 0
|
||||||
triggerSensitive := false
|
for _, choice := range simpleResponse.Choices {
|
||||||
|
ctkm, _, _ := service.CountTokenText(string(choice.Message.Content), model, false)
|
||||||
usage := &responseWithError.Usage
|
completionTokens += ctkm
|
||||||
|
|
||||||
//textResponse := &dto.TextResponse{
|
|
||||||
// Choices: responseWithError.Choices,
|
|
||||||
// Usage: responseWithError.Usage,
|
|
||||||
//}
|
|
||||||
var doResponseBody []byte
|
|
||||||
|
|
||||||
switch relayMode {
|
|
||||||
case relayconstant.RelayModeEmbeddings:
|
|
||||||
embeddingResponse := &dto.OpenAIEmbeddingResponse{
|
|
||||||
Object: responseWithError.Object,
|
|
||||||
Data: responseWithError.Data,
|
|
||||||
Model: responseWithError.Model,
|
|
||||||
Usage: *usage,
|
|
||||||
}
|
}
|
||||||
doResponseBody, err = json.Marshal(embeddingResponse)
|
simpleResponse.Usage = dto.Usage{
|
||||||
default:
|
PromptTokens: promptTokens,
|
||||||
if responseWithError.Usage.TotalTokens == 0 || checkSensitive {
|
CompletionTokens: completionTokens,
|
||||||
completionTokens := 0
|
TotalTokens: promptTokens + completionTokens,
|
||||||
for i, choice := range responseWithError.Choices {
|
|
||||||
stringContent := string(choice.Message.Content)
|
|
||||||
ctkm, _, _ := service.CountTokenText(stringContent, model, false)
|
|
||||||
completionTokens += ctkm
|
|
||||||
if checkSensitive {
|
|
||||||
sensitive, words, stringContent := service.SensitiveWordReplace(stringContent, false)
|
|
||||||
if sensitive {
|
|
||||||
triggerSensitive = true
|
|
||||||
msg := choice.Message
|
|
||||||
msg.Content = common.StringToByteSlice(stringContent)
|
|
||||||
responseWithError.Choices[i].Message = msg
|
|
||||||
sensitiveWords = append(sensitiveWords, words...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
responseWithError.Usage = dto.Usage{
|
|
||||||
PromptTokens: promptTokens,
|
|
||||||
CompletionTokens: completionTokens,
|
|
||||||
TotalTokens: promptTokens + completionTokens,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
textResponse := &dto.TextResponse{
|
|
||||||
Id: responseWithError.Id,
|
|
||||||
Created: responseWithError.Created,
|
|
||||||
Object: responseWithError.Object,
|
|
||||||
Choices: responseWithError.Choices,
|
|
||||||
Model: responseWithError.Model,
|
|
||||||
Usage: *usage,
|
|
||||||
}
|
|
||||||
doResponseBody, err = json.Marshal(textResponse)
|
|
||||||
}
|
|
||||||
|
|
||||||
if checkSensitive && triggerSensitive && constant.StopOnSensitiveEnabled {
|
|
||||||
sensitiveWords = common.RemoveDuplicate(sensitiveWords)
|
|
||||||
return service.OpenAIErrorWrapper(errors.New(fmt.Sprintf("sensitive words detected on response: %s",
|
|
||||||
strings.Join(sensitiveWords, ", "))), "sensitive_words_detected", http.StatusBadRequest),
|
|
||||||
usage, &dto.SensitiveResponse{
|
|
||||||
SensitiveWords: sensitiveWords,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Reset response body
|
|
||||||
resp.Body = io.NopCloser(bytes.NewBuffer(doResponseBody))
|
|
||||||
// We shouldn't set the header before we parse the response body, because the parse part may fail.
|
|
||||||
// And then we will have to send an error response, but in this case, the header has already been set.
|
|
||||||
// So the httpClient will be confused by the response.
|
|
||||||
// For example, Postman will report error, and we cannot check the response at all.
|
|
||||||
// Copy headers
|
|
||||||
for k, v := range resp.Header {
|
|
||||||
// 删除任何现有的相同头部,以防止重复添加头部
|
|
||||||
c.Writer.Header().Del(k)
|
|
||||||
for _, vv := range v {
|
|
||||||
c.Writer.Header().Add(k, vv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// reset content length
|
|
||||||
c.Writer.Header().Del("Content-Length")
|
|
||||||
c.Writer.Header().Set("Content-Length", fmt.Sprintf("%d", len(doResponseBody)))
|
|
||||||
c.Writer.WriteHeader(resp.StatusCode)
|
|
||||||
_, err = io.Copy(c.Writer, resp.Body)
|
|
||||||
if err != nil {
|
|
||||||
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil, nil
|
|
||||||
}
|
|
||||||
err = resp.Body.Close()
|
|
||||||
if err != nil {
|
|
||||||
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil, usage, nil
|
return nil, &simpleResponse.Usage
|
||||||
}
|
}
|
||||||
|
@ -39,7 +39,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
var responseText string
|
var responseText string
|
||||||
err, responseText = palmStreamHandler(c, resp)
|
err, responseText = palmStreamHandler(c, resp)
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
"one-api/constant"
|
|
||||||
"one-api/dto"
|
"one-api/dto"
|
||||||
relaycommon "one-api/relay/common"
|
relaycommon "one-api/relay/common"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
@ -157,7 +156,7 @@ func palmHandler(c *gin.Context, resp *http.Response, promptTokens int, model st
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
fullTextResponse := responsePaLM2OpenAI(&palmResponse)
|
fullTextResponse := responsePaLM2OpenAI(&palmResponse)
|
||||||
completionTokens, _, _ := service.CountTokenText(palmResponse.Candidates[0].Content, model, constant.ShouldCheckCompletionSensitive())
|
completionTokens, _, _ := service.CountTokenText(palmResponse.Candidates[0].Content, model, false)
|
||||||
usage := dto.Usage{
|
usage := dto.Usage{
|
||||||
PromptTokens: promptTokens,
|
PromptTokens: promptTokens,
|
||||||
CompletionTokens: completionTokens,
|
CompletionTokens: completionTokens,
|
||||||
|
@ -43,13 +43,13 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
var responseText string
|
var responseText string
|
||||||
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
|
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
|
||||||
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
||||||
} else {
|
} else {
|
||||||
err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
|
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -53,7 +53,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
var responseText string
|
var responseText string
|
||||||
err, responseText = tencentStreamHandler(c, resp)
|
err, responseText = tencentStreamHandler(c, resp)
|
||||||
|
@ -43,13 +43,13 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return dummyResp, nil
|
return dummyResp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
splits := strings.Split(info.ApiKey, "|")
|
splits := strings.Split(info.ApiKey, "|")
|
||||||
if len(splits) != 3 {
|
if len(splits) != 3 {
|
||||||
return nil, service.OpenAIErrorWrapper(errors.New("invalid auth"), "invalid_auth", http.StatusBadRequest), nil
|
return nil, service.OpenAIErrorWrapper(errors.New("invalid auth"), "invalid_auth", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
if a.request == nil {
|
if a.request == nil {
|
||||||
return nil, service.OpenAIErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest), nil
|
return nil, service.OpenAIErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
err, usage = xunfeiStreamHandler(c, *a.request, splits[0], splits[1], splits[2])
|
err, usage = xunfeiStreamHandler(c, *a.request, splits[0], splits[1], splits[2])
|
||||||
|
@ -46,7 +46,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
err, usage = zhipuStreamHandler(c, resp)
|
err, usage = zhipuStreamHandler(c, resp)
|
||||||
} else {
|
} else {
|
||||||
|
@ -44,13 +44,13 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
|
|||||||
return channel.DoApiRequest(a, c, info, requestBody)
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
if info.IsStream {
|
if info.IsStream {
|
||||||
var responseText string
|
var responseText string
|
||||||
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
|
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
|
||||||
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
||||||
} else {
|
} else {
|
||||||
err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
|
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -56,29 +56,29 @@ func Path2RelayMode(path string) int {
|
|||||||
|
|
||||||
func Path2RelayModeMidjourney(path string) int {
|
func Path2RelayModeMidjourney(path string) int {
|
||||||
relayMode := RelayModeUnknown
|
relayMode := RelayModeUnknown
|
||||||
if strings.HasPrefix(path, "/mj/submit/action") {
|
if strings.HasSuffix(path, "/mj/submit/action") {
|
||||||
// midjourney plus
|
// midjourney plus
|
||||||
relayMode = RelayModeMidjourneyAction
|
relayMode = RelayModeMidjourneyAction
|
||||||
} else if strings.HasPrefix(path, "/mj/submit/modal") {
|
} else if strings.HasSuffix(path, "/mj/submit/modal") {
|
||||||
// midjourney plus
|
// midjourney plus
|
||||||
relayMode = RelayModeMidjourneyModal
|
relayMode = RelayModeMidjourneyModal
|
||||||
} else if strings.HasPrefix(path, "/mj/submit/shorten") {
|
} else if strings.HasSuffix(path, "/mj/submit/shorten") {
|
||||||
// midjourney plus
|
// midjourney plus
|
||||||
relayMode = RelayModeMidjourneyShorten
|
relayMode = RelayModeMidjourneyShorten
|
||||||
} else if strings.HasPrefix(path, "/mj/insight-face/swap") {
|
} else if strings.HasSuffix(path, "/mj/insight-face/swap") {
|
||||||
// midjourney plus
|
// midjourney plus
|
||||||
relayMode = RelayModeSwapFace
|
relayMode = RelayModeSwapFace
|
||||||
} else if strings.HasPrefix(path, "/mj/submit/imagine") {
|
} else if strings.HasSuffix(path, "/mj/submit/imagine") {
|
||||||
relayMode = RelayModeMidjourneyImagine
|
relayMode = RelayModeMidjourneyImagine
|
||||||
} else if strings.HasPrefix(path, "/mj/submit/blend") {
|
} else if strings.HasSuffix(path, "/mj/submit/blend") {
|
||||||
relayMode = RelayModeMidjourneyBlend
|
relayMode = RelayModeMidjourneyBlend
|
||||||
} else if strings.HasPrefix(path, "/mj/submit/describe") {
|
} else if strings.HasSuffix(path, "/mj/submit/describe") {
|
||||||
relayMode = RelayModeMidjourneyDescribe
|
relayMode = RelayModeMidjourneyDescribe
|
||||||
} else if strings.HasPrefix(path, "/mj/notify") {
|
} else if strings.HasSuffix(path, "/mj/notify") {
|
||||||
relayMode = RelayModeMidjourneyNotify
|
relayMode = RelayModeMidjourneyNotify
|
||||||
} else if strings.HasPrefix(path, "/mj/submit/change") {
|
} else if strings.HasSuffix(path, "/mj/submit/change") {
|
||||||
relayMode = RelayModeMidjourneyChange
|
relayMode = RelayModeMidjourneyChange
|
||||||
} else if strings.HasPrefix(path, "/mj/submit/simple-change") {
|
} else if strings.HasSuffix(path, "/mj/submit/simple-change") {
|
||||||
relayMode = RelayModeMidjourneyChange
|
relayMode = RelayModeMidjourneyChange
|
||||||
} else if strings.HasSuffix(path, "/fetch") {
|
} else if strings.HasSuffix(path, "/fetch") {
|
||||||
relayMode = RelayModeMidjourneyTaskFetch
|
relayMode = RelayModeMidjourneyTaskFetch
|
||||||
|
@ -173,7 +173,7 @@ func AudioHelper(c *gin.Context, relayMode int) *dto.OpenAIErrorWithStatusCode {
|
|||||||
if strings.HasPrefix(audioRequest.Model, "tts-1") {
|
if strings.HasPrefix(audioRequest.Model, "tts-1") {
|
||||||
quota = promptTokens
|
quota = promptTokens
|
||||||
} else {
|
} else {
|
||||||
quota, err, _ = service.CountAudioToken(audioResponse.Text, audioRequest.Model, constant.ShouldCheckCompletionSensitive())
|
quota, err, _ = service.CountAudioToken(audioResponse.Text, audioRequest.Model, false)
|
||||||
}
|
}
|
||||||
quota = int(float64(quota) * ratio)
|
quota = int(float64(quota) * ratio)
|
||||||
if ratio != 0 && quota <= 0 {
|
if ratio != 0 && quota <= 0 {
|
||||||
|
@ -180,7 +180,7 @@ func RelaySwapFace(c *gin.Context) *dto.MidjourneyResponse {
|
|||||||
Description: "quota_not_enough",
|
Description: "quota_not_enough",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
requestURL := c.Request.URL.String()
|
requestURL := getMjRequestPath(c.Request.URL.String())
|
||||||
baseURL := c.GetString("base_url")
|
baseURL := c.GetString("base_url")
|
||||||
fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)
|
fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)
|
||||||
mjResp, _, err := service.DoMidjourneyHttpRequest(c, time.Second*60, fullRequestURL)
|
mjResp, _, err := service.DoMidjourneyHttpRequest(c, time.Second*60, fullRequestURL)
|
||||||
@ -260,7 +260,7 @@ func RelayMidjourneyTaskImageSeed(c *gin.Context) *dto.MidjourneyResponse {
|
|||||||
c.Set("channel_id", originTask.ChannelId)
|
c.Set("channel_id", originTask.ChannelId)
|
||||||
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
|
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
|
||||||
|
|
||||||
requestURL := c.Request.URL.String()
|
requestURL := getMjRequestPath(c.Request.URL.String())
|
||||||
fullRequestURL := fmt.Sprintf("%s%s", channel.GetBaseURL(), requestURL)
|
fullRequestURL := fmt.Sprintf("%s%s", channel.GetBaseURL(), requestURL)
|
||||||
midjResponseWithStatus, _, err := service.DoMidjourneyHttpRequest(c, time.Second*30, fullRequestURL)
|
midjResponseWithStatus, _, err := service.DoMidjourneyHttpRequest(c, time.Second*30, fullRequestURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -440,7 +440,7 @@ func RelayMidjourneySubmit(c *gin.Context, relayMode int) *dto.MidjourneyRespons
|
|||||||
}
|
}
|
||||||
|
|
||||||
//baseURL := common.ChannelBaseURLs[channelType]
|
//baseURL := common.ChannelBaseURLs[channelType]
|
||||||
requestURL := c.Request.URL.String()
|
requestURL := getMjRequestPath(c.Request.URL.String())
|
||||||
|
|
||||||
baseURL := c.GetString("base_url")
|
baseURL := c.GetString("base_url")
|
||||||
|
|
||||||
@ -605,3 +605,15 @@ type taskChangeParams struct {
|
|||||||
Action string
|
Action string
|
||||||
Index int
|
Index int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getMjRequestPath(path string) string {
|
||||||
|
requestURL := path
|
||||||
|
if strings.Contains(requestURL, "/mj-") {
|
||||||
|
urls := strings.Split(requestURL, "/mj/")
|
||||||
|
if len(urls) < 2 {
|
||||||
|
return requestURL
|
||||||
|
}
|
||||||
|
requestURL = "/mj/" + urls[1]
|
||||||
|
}
|
||||||
|
return requestURL
|
||||||
|
}
|
||||||
|
@ -165,21 +165,12 @@ func TextHelper(c *gin.Context) *dto.OpenAIErrorWithStatusCode {
|
|||||||
return service.OpenAIErrorWrapper(fmt.Errorf("bad response status code: %d", resp.StatusCode), "bad_response_status_code", resp.StatusCode)
|
return service.OpenAIErrorWrapper(fmt.Errorf("bad response status code: %d", resp.StatusCode), "bad_response_status_code", resp.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
usage, openaiErr, sensitiveResp := adaptor.DoResponse(c, resp, relayInfo)
|
usage, openaiErr := adaptor.DoResponse(c, resp, relayInfo)
|
||||||
if openaiErr != nil {
|
if openaiErr != nil {
|
||||||
if sensitiveResp == nil { // 如果没有敏感词检查结果
|
returnPreConsumedQuota(c, relayInfo.TokenId, userQuota, preConsumedQuota)
|
||||||
returnPreConsumedQuota(c, relayInfo.TokenId, userQuota, preConsumedQuota)
|
return openaiErr
|
||||||
return openaiErr
|
|
||||||
} else {
|
|
||||||
// 如果有敏感词检查结果,不返回预消耗配额,继续消耗配额
|
|
||||||
postConsumeQuota(c, relayInfo, *textRequest, usage, ratio, preConsumedQuota, userQuota, modelRatio, groupRatio, modelPrice, sensitiveResp)
|
|
||||||
if constant.StopOnSensitiveEnabled { // 是否直接返回错误
|
|
||||||
return openaiErr
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
postConsumeQuota(c, relayInfo, *textRequest, usage, ratio, preConsumedQuota, userQuota, modelRatio, groupRatio, modelPrice, nil)
|
postConsumeQuota(c, relayInfo, *textRequest, usage, ratio, preConsumedQuota, userQuota, modelRatio, groupRatio, modelPrice)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,7 +249,7 @@ func returnPreConsumedQuota(c *gin.Context, tokenId int, userQuota int, preConsu
|
|||||||
|
|
||||||
func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, textRequest dto.GeneralOpenAIRequest,
|
func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, textRequest dto.GeneralOpenAIRequest,
|
||||||
usage *dto.Usage, ratio float64, preConsumedQuota int, userQuota int, modelRatio float64, groupRatio float64,
|
usage *dto.Usage, ratio float64, preConsumedQuota int, userQuota int, modelRatio float64, groupRatio float64,
|
||||||
modelPrice float64, sensitiveResp *dto.SensitiveResponse) {
|
modelPrice float64) {
|
||||||
|
|
||||||
useTimeSeconds := time.Now().Unix() - relayInfo.StartTime.Unix()
|
useTimeSeconds := time.Now().Unix() - relayInfo.StartTime.Unix()
|
||||||
promptTokens := usage.PromptTokens
|
promptTokens := usage.PromptTokens
|
||||||
@ -293,9 +284,9 @@ func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, textRe
|
|||||||
logContent += fmt.Sprintf("(可能是上游超时)")
|
logContent += fmt.Sprintf("(可能是上游超时)")
|
||||||
common.LogError(ctx, fmt.Sprintf("total tokens is 0, cannot consume quota, userId %d, channelId %d, tokenId %d, model %s, pre-consumed quota %d", relayInfo.UserId, relayInfo.ChannelId, relayInfo.TokenId, textRequest.Model, preConsumedQuota))
|
common.LogError(ctx, fmt.Sprintf("total tokens is 0, cannot consume quota, userId %d, channelId %d, tokenId %d, model %s, pre-consumed quota %d", relayInfo.UserId, relayInfo.ChannelId, relayInfo.TokenId, textRequest.Model, preConsumedQuota))
|
||||||
} else {
|
} else {
|
||||||
if sensitiveResp != nil {
|
//if sensitiveResp != nil {
|
||||||
logContent += fmt.Sprintf(",敏感词:%s", strings.Join(sensitiveResp.SensitiveWords, ", "))
|
// logContent += fmt.Sprintf(",敏感词:%s", strings.Join(sensitiveResp.SensitiveWords, ", "))
|
||||||
}
|
//}
|
||||||
quotaDelta := quota - preConsumedQuota
|
quotaDelta := quota - preConsumedQuota
|
||||||
err := model.PostConsumeTokenQuota(relayInfo.TokenId, userQuota, quotaDelta, preConsumedQuota, true)
|
err := model.PostConsumeTokenQuota(relayInfo.TokenId, userQuota, quotaDelta, preConsumedQuota, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -43,7 +43,16 @@ func SetRelayRouter(router *gin.Engine) {
|
|||||||
relayV1Router.DELETE("/models/:model", controller.RelayNotImplemented)
|
relayV1Router.DELETE("/models/:model", controller.RelayNotImplemented)
|
||||||
relayV1Router.POST("/moderations", controller.Relay)
|
relayV1Router.POST("/moderations", controller.Relay)
|
||||||
}
|
}
|
||||||
|
|
||||||
relayMjRouter := router.Group("/mj")
|
relayMjRouter := router.Group("/mj")
|
||||||
|
registerMjRouterGroup(relayMjRouter)
|
||||||
|
|
||||||
|
relayMjModeRouter := router.Group("/:mode/mj")
|
||||||
|
registerMjRouterGroup(relayMjModeRouter)
|
||||||
|
//relayMjRouter.Use()
|
||||||
|
}
|
||||||
|
|
||||||
|
func registerMjRouterGroup(relayMjRouter *gin.RouterGroup) {
|
||||||
relayMjRouter.GET("/image/:id", relay.RelayMidjourneyImage)
|
relayMjRouter.GET("/image/:id", relay.RelayMidjourneyImage)
|
||||||
relayMjRouter.Use(middleware.TokenAuth(), middleware.Distribute())
|
relayMjRouter.Use(middleware.TokenAuth(), middleware.Distribute())
|
||||||
{
|
{
|
||||||
@ -61,5 +70,4 @@ func SetRelayRouter(router *gin.Engine) {
|
|||||||
relayMjRouter.POST("/task/list-by-condition", controller.RelayMidjourney)
|
relayMjRouter.POST("/task/list-by-condition", controller.RelayMidjourney)
|
||||||
relayMjRouter.POST("/insight-face/swap", controller.RelayMidjourney)
|
relayMjRouter.POST("/insight-face/swap", controller.RelayMidjourney)
|
||||||
}
|
}
|
||||||
//relayMjRouter.Use()
|
|
||||||
}
|
}
|
||||||
|
@ -330,21 +330,21 @@ const OperationSetting = () => {
|
|||||||
name='CheckSensitiveOnPromptEnabled'
|
name='CheckSensitiveOnPromptEnabled'
|
||||||
onChange={handleInputChange}
|
onChange={handleInputChange}
|
||||||
/>
|
/>
|
||||||
<Form.Checkbox
|
{/*<Form.Checkbox*/}
|
||||||
checked={inputs.CheckSensitiveOnCompletionEnabled === 'true'}
|
{/* checked={inputs.CheckSensitiveOnCompletionEnabled === 'true'}*/}
|
||||||
label='启用生成内容检查'
|
{/* label='启用生成内容检查'*/}
|
||||||
name='CheckSensitiveOnCompletionEnabled'
|
{/* name='CheckSensitiveOnCompletionEnabled'*/}
|
||||||
onChange={handleInputChange}
|
{/* onChange={handleInputChange}*/}
|
||||||
/>
|
{/*/>*/}
|
||||||
</Form.Group>
|
|
||||||
<Form.Group inline>
|
|
||||||
<Form.Checkbox
|
|
||||||
checked={inputs.StopOnSensitiveEnabled === 'true'}
|
|
||||||
label='在检测到屏蔽词时,立刻停止生成,否则替换屏蔽词'
|
|
||||||
name='StopOnSensitiveEnabled'
|
|
||||||
onChange={handleInputChange}
|
|
||||||
/>
|
|
||||||
</Form.Group>
|
</Form.Group>
|
||||||
|
{/*<Form.Group inline>*/}
|
||||||
|
{/* <Form.Checkbox*/}
|
||||||
|
{/* checked={inputs.StopOnSensitiveEnabled === 'true'}*/}
|
||||||
|
{/* label='在检测到屏蔽词时,立刻停止生成,否则替换屏蔽词'*/}
|
||||||
|
{/* name='StopOnSensitiveEnabled'*/}
|
||||||
|
{/* onChange={handleInputChange}*/}
|
||||||
|
{/* />*/}
|
||||||
|
{/*</Form.Group>*/}
|
||||||
{/*<Form.Group>*/}
|
{/*<Form.Group>*/}
|
||||||
{/* <Form.Input*/}
|
{/* <Form.Input*/}
|
||||||
{/* label="流模式下缓存队列,默认不缓存,设置越大检测越准确,但是回复会有卡顿感"*/}
|
{/* label="流模式下缓存队列,默认不缓存,设置越大检测越准确,但是回复会有卡顿感"*/}
|
||||||
|
@ -47,6 +47,7 @@ const SystemSetting = () => {
|
|||||||
RegisterEnabled: '',
|
RegisterEnabled: '',
|
||||||
UserSelfDeletionEnabled: false,
|
UserSelfDeletionEnabled: false,
|
||||||
EmailDomainRestrictionEnabled: '',
|
EmailDomainRestrictionEnabled: '',
|
||||||
|
SMTPSSLEnabled: '',
|
||||||
EmailDomainWhitelist: [],
|
EmailDomainWhitelist: [],
|
||||||
// telegram login
|
// telegram login
|
||||||
TelegramOAuthEnabled: '',
|
TelegramOAuthEnabled: '',
|
||||||
@ -104,6 +105,7 @@ const SystemSetting = () => {
|
|||||||
case 'TelegramOAuthEnabled':
|
case 'TelegramOAuthEnabled':
|
||||||
case 'TurnstileCheckEnabled':
|
case 'TurnstileCheckEnabled':
|
||||||
case 'EmailDomainRestrictionEnabled':
|
case 'EmailDomainRestrictionEnabled':
|
||||||
|
case 'SMTPSSLEnabled':
|
||||||
case 'RegisterEnabled':
|
case 'RegisterEnabled':
|
||||||
case 'UserSelfDeletionEnabled':
|
case 'UserSelfDeletionEnabled':
|
||||||
case 'PaymentEnabled':
|
case 'PaymentEnabled':
|
||||||
@ -139,7 +141,7 @@ const SystemSetting = () => {
|
|||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
name === 'Notice' ||
|
name === 'Notice' ||
|
||||||
name.startsWith('SMTP') ||
|
(name.startsWith('SMTP') && name !== 'SMTPSSLEnabled') ||
|
||||||
name === 'ServerAddress' ||
|
name === 'ServerAddress' ||
|
||||||
name === 'StripeApiSecret' ||
|
name === 'StripeApiSecret' ||
|
||||||
name === 'StripeWebhookSecret' ||
|
name === 'StripeWebhookSecret' ||
|
||||||
@ -652,6 +654,14 @@ const SystemSetting = () => {
|
|||||||
placeholder='敏感信息不会发送到前端显示'
|
placeholder='敏感信息不会发送到前端显示'
|
||||||
/>
|
/>
|
||||||
</Form.Group>
|
</Form.Group>
|
||||||
|
<Form.Group widths={3}>
|
||||||
|
<Form.Checkbox
|
||||||
|
label='启用SMTP SSL(465端口强制开启)'
|
||||||
|
name='SMTPSSLEnabled'
|
||||||
|
onChange={handleInputChange}
|
||||||
|
checked={inputs.SMTPSSLEnabled === 'true'}
|
||||||
|
/>
|
||||||
|
</Form.Group>
|
||||||
<Form.Button onClick={submitSMTP}>保存 SMTP 设置</Form.Button>
|
<Form.Button onClick={submitSMTP}>保存 SMTP 设置</Form.Button>
|
||||||
<Divider />
|
<Divider />
|
||||||
<Header as='h3'>
|
<Header as='h3'>
|
||||||
|
Loading…
Reference in New Issue
Block a user