From 9b64f4a34a2ccaf46ec336613a75a48d32823627 Mon Sep 17 00:00:00 2001 From: CaIon <1808837298@qq.com> Date: Thu, 21 Mar 2024 15:04:04 +0800 Subject: [PATCH 1/8] fix: fix mj panic --- service/midjourney.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/service/midjourney.go b/service/midjourney.go index 11ec5bd..4f43b52 100644 --- a/service/midjourney.go +++ b/service/midjourney.go @@ -185,7 +185,12 @@ func DoMidjourneyHttpRequest(c *gin.Context, timeout time.Duration, fullRequestU req = req.WithContext(ctx) req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type")) req.Header.Set("Accept", c.Request.Header.Get("Accept")) - req.Header.Set("mj-api-secret", strings.Split(c.Request.Header.Get("Authorization"), " ")[1]) + auth := c.Request.Header.Get("Authorization") + if auth != "" { + auth = strings.TrimPrefix(auth, "Bearer ") + auth = strings.Split(auth, "-")[0] + req.Header.Set("mj-api-secret", auth) + } defer cancel() resp, err := GetHttpClient().Do(req) if err != nil { From 3f808be254e0dd49ab6a6f59430432f43737d3e5 Mon Sep 17 00:00:00 2001 From: CaIon <1808837298@qq.com> Date: Thu, 21 Mar 2024 16:26:26 +0800 Subject: [PATCH 2/8] fix: add missing version --- controller/misc.go | 1 + 1 file changed, 1 insertion(+) diff --git a/controller/misc.go b/controller/misc.go index 9a51479..ac094b2 100644 --- a/controller/misc.go +++ b/controller/misc.go @@ -33,6 +33,7 @@ func GetStatus(c *gin.Context) { "success": true, "message": "", "data": gin.H{ + "version": common.Version, "start_time": common.StartTime, "email_verification": common.EmailVerificationEnabled, "github_oauth": common.GitHubOAuthEnabled, From 031957714a546650535e97c24afb6bd27705858c Mon Sep 17 00:00:00 2001 From: CaIon <1808837298@qq.com> Date: Thu, 21 Mar 2024 17:19:21 +0800 Subject: [PATCH 3/8] =?UTF-8?q?refactor:=20=E4=BB=A3=E7=A0=81=E7=BB=93?= =?UTF-8?q?=E6=9E=84=E4=BC=98=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- relay/channel/openai/relay-openai.go | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/relay/channel/openai/relay-openai.go b/relay/channel/openai/relay-openai.go index 349d5d5..3698a70 100644 --- a/relay/channel/openai/relay-openai.go +++ b/relay/channel/openai/relay-openai.go @@ -157,7 +157,7 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model if textResponse.Usage.TotalTokens == 0 || checkSensitive { completionTokens := 0 - for _, choice := range textResponse.Choices { + for i, choice := range textResponse.Choices { stringContent := string(choice.Message.Content) ctkm, _, _ := service.CountTokenText(stringContent, model, false) completionTokens += ctkm @@ -167,7 +167,7 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model triggerSensitive = true msg := choice.Message msg.Content = common.StringToByteSlice(stringContent) - choice.Message = msg + textResponse.Choices[i].Message = msg sensitiveWords = append(sensitiveWords, words...) } } @@ -179,8 +179,13 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model } } - if checkSensitive && constant.StopOnSensitiveEnabled && triggerSensitive { - + if checkSensitive && triggerSensitive && constant.StopOnSensitiveEnabled { + sensitiveWords = common.RemoveDuplicate(sensitiveWords) + return service.OpenAIErrorWrapper(errors.New(fmt.Sprintf("sensitive words detected on response: %s", + strings.Join(sensitiveWords, ", "))), "sensitive_words_detected", http.StatusBadRequest), + &textResponse.Usage, &dto.SensitiveResponse{ + SensitiveWords: sensitiveWords, + } } else { responseBody, err = json.Marshal(textResponse) // Reset response body @@ -202,12 +207,5 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil } } - - if checkSensitive && triggerSensitive { - sensitiveWords = common.RemoveDuplicate(sensitiveWords) - return service.OpenAIErrorWrapper(errors.New(fmt.Sprintf("sensitive words detected on response: %s", strings.Join(sensitiveWords, ", "))), "sensitive_words_detected", http.StatusBadRequest), &textResponse.Usage, &dto.SensitiveResponse{ - SensitiveWords: sensitiveWords, - } - } return nil, &textResponse.Usage, nil } From c4b3d3a97546f3a137719e93e571c0658f500ec3 Mon Sep 17 00:00:00 2001 From: CaIon <1808837298@qq.com> Date: Thu, 21 Mar 2024 17:39:05 +0800 Subject: [PATCH 4/8] fix: fix embedding --- dto/text_response.go | 6 +- relay/channel/ollama/adaptor.go | 2 +- relay/channel/openai/adaptor.go | 2 +- relay/channel/openai/relay-openai.go | 83 +++++++++++++++++----------- relay/channel/perplexity/adaptor.go | 2 +- relay/channel/zhipu_4v/adaptor.go | 2 +- 6 files changed, 60 insertions(+), 37 deletions(-) diff --git a/dto/text_response.go b/dto/text_response.go index 63a344d..4ef06dd 100644 --- a/dto/text_response.go +++ b/dto/text_response.go @@ -1,12 +1,16 @@ package dto type TextResponseWithError struct { - Choices []OpenAITextResponseChoice `json:"choices"` + Choices []OpenAITextResponseChoice `json:"choices"` + Object string `json:"object"` + Data []OpenAIEmbeddingResponseItem `json:"data"` + Model string `json:"model"` Usage `json:"usage"` Error OpenAIError `json:"error"` } type TextResponse struct { + Model string `json:"model"` Choices []OpenAITextResponseChoice `json:"choices"` Usage `json:"usage"` } diff --git a/relay/channel/ollama/adaptor.go b/relay/channel/ollama/adaptor.go index 6ef2f30..69a97e3 100644 --- a/relay/channel/ollama/adaptor.go +++ b/relay/channel/ollama/adaptor.go @@ -45,7 +45,7 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycom err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode) usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens) } else { - err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName) + err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode) } return } diff --git a/relay/channel/openai/adaptor.go b/relay/channel/openai/adaptor.go index d9c52f8..9e2845d 100644 --- a/relay/channel/openai/adaptor.go +++ b/relay/channel/openai/adaptor.go @@ -77,7 +77,7 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycom err, responseText = OpenaiStreamHandler(c, resp, info.RelayMode) usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens) } else { - err, usage, sensitiveResp = OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName) + err, usage, sensitiveResp = OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode) } return } diff --git a/relay/channel/openai/relay-openai.go b/relay/channel/openai/relay-openai.go index 3698a70..bbb17ef 100644 --- a/relay/channel/openai/relay-openai.go +++ b/relay/channel/openai/relay-openai.go @@ -124,8 +124,8 @@ func OpenaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*d return nil, responseTextBuilder.String() } -func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*dto.OpenAIErrorWithStatusCode, *dto.Usage, *dto.SensitiveResponse) { - var textResponseWithError dto.TextResponseWithError +func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model string, relayMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage, *dto.SensitiveResponse) { + var responseWithError dto.TextResponseWithError responseBody, err := io.ReadAll(resp.Body) if err != nil { return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil, nil @@ -134,62 +134,81 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model if err != nil { return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil } - err = json.Unmarshal(responseBody, &textResponseWithError) + err = json.Unmarshal(responseBody, &responseWithError) if err != nil { log.Printf("unmarshal_response_body_failed: body: %s, err: %v", string(responseBody), err) return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil, nil } - if textResponseWithError.Error.Type != "" { + if responseWithError.Error.Type != "" { return &dto.OpenAIErrorWithStatusCode{ - Error: textResponseWithError.Error, + Error: responseWithError.Error, StatusCode: resp.StatusCode, }, nil, nil } - textResponse := &dto.TextResponse{ - Choices: textResponseWithError.Choices, - Usage: textResponseWithError.Usage, - } - checkSensitive := constant.ShouldCheckCompletionSensitive() sensitiveWords := make([]string, 0) triggerSensitive := false - if textResponse.Usage.TotalTokens == 0 || checkSensitive { - completionTokens := 0 - for i, choice := range textResponse.Choices { - stringContent := string(choice.Message.Content) - ctkm, _, _ := service.CountTokenText(stringContent, model, false) - completionTokens += ctkm - if checkSensitive { - sensitive, words, stringContent := service.SensitiveWordReplace(stringContent, false) - if sensitive { - triggerSensitive = true - msg := choice.Message - msg.Content = common.StringToByteSlice(stringContent) - textResponse.Choices[i].Message = msg - sensitiveWords = append(sensitiveWords, words...) + usage := &responseWithError.Usage + + //textResponse := &dto.TextResponse{ + // Choices: responseWithError.Choices, + // Usage: responseWithError.Usage, + //} + var doResponseBody []byte + + switch relayMode { + case relayconstant.RelayModeEmbeddings: + embeddingResponse := &dto.OpenAIEmbeddingResponse{ + Object: responseWithError.Object, + Data: responseWithError.Data, + Model: responseWithError.Model, + Usage: *usage, + } + doResponseBody, err = json.Marshal(embeddingResponse) + default: + if responseWithError.Usage.TotalTokens == 0 || checkSensitive { + completionTokens := 0 + for i, choice := range responseWithError.Choices { + stringContent := string(choice.Message.Content) + ctkm, _, _ := service.CountTokenText(stringContent, model, false) + completionTokens += ctkm + if checkSensitive { + sensitive, words, stringContent := service.SensitiveWordReplace(stringContent, false) + if sensitive { + triggerSensitive = true + msg := choice.Message + msg.Content = common.StringToByteSlice(stringContent) + responseWithError.Choices[i].Message = msg + sensitiveWords = append(sensitiveWords, words...) + } } } + responseWithError.Usage = dto.Usage{ + PromptTokens: promptTokens, + CompletionTokens: completionTokens, + TotalTokens: promptTokens + completionTokens, + } } - textResponse.Usage = dto.Usage{ - PromptTokens: promptTokens, - CompletionTokens: completionTokens, - TotalTokens: promptTokens + completionTokens, + textResponse := &dto.TextResponse{ + Choices: responseWithError.Choices, + Model: responseWithError.Model, + Usage: *usage, } + doResponseBody, err = json.Marshal(textResponse) } if checkSensitive && triggerSensitive && constant.StopOnSensitiveEnabled { sensitiveWords = common.RemoveDuplicate(sensitiveWords) return service.OpenAIErrorWrapper(errors.New(fmt.Sprintf("sensitive words detected on response: %s", strings.Join(sensitiveWords, ", "))), "sensitive_words_detected", http.StatusBadRequest), - &textResponse.Usage, &dto.SensitiveResponse{ + usage, &dto.SensitiveResponse{ SensitiveWords: sensitiveWords, } } else { - responseBody, err = json.Marshal(textResponse) // Reset response body - resp.Body = io.NopCloser(bytes.NewBuffer(responseBody)) + resp.Body = io.NopCloser(bytes.NewBuffer(doResponseBody)) // We shouldn't set the header before we parse the response body, because the parse part may fail. // And then we will have to send an error response, but in this case, the header has already been set. // So the httpClient will be confused by the response. @@ -207,5 +226,5 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil } } - return nil, &textResponse.Usage, nil + return nil, usage, nil } diff --git a/relay/channel/perplexity/adaptor.go b/relay/channel/perplexity/adaptor.go index 8d056ec..d04af1e 100644 --- a/relay/channel/perplexity/adaptor.go +++ b/relay/channel/perplexity/adaptor.go @@ -49,7 +49,7 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycom err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode) usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens) } else { - err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName) + err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode) } return } diff --git a/relay/channel/zhipu_4v/adaptor.go b/relay/channel/zhipu_4v/adaptor.go index dded3c5..c7ea903 100644 --- a/relay/channel/zhipu_4v/adaptor.go +++ b/relay/channel/zhipu_4v/adaptor.go @@ -50,7 +50,7 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycom err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode) usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens) } else { - err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName) + err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode) } return } From d9344d79cfd1259266588bcdbf9f154717e7bf14 Mon Sep 17 00:00:00 2001 From: CaIon <1808837298@qq.com> Date: Thu, 21 Mar 2024 23:25:07 +0800 Subject: [PATCH 5/8] fix: try to fix curl: (18) --- relay/channel/openai/relay-openai.go | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/relay/channel/openai/relay-openai.go b/relay/channel/openai/relay-openai.go index bbb17ef..6b31d8b 100644 --- a/relay/channel/openai/relay-openai.go +++ b/relay/channel/openai/relay-openai.go @@ -213,9 +213,17 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model // And then we will have to send an error response, but in this case, the header has already been set. // So the httpClient will be confused by the response. // For example, Postman will report error, and we cannot check the response at all. + // Copy headers for k, v := range resp.Header { - c.Writer.Header().Set(k, v[0]) + // 删除任何现有的相同头部,以防止重复添加头部 + c.Writer.Header().Del(k) + for _, vv := range v { + c.Writer.Header().Add(k, vv) + } } + // reset content length + c.Writer.Header().Del("Content-Length") + c.Writer.Header().Set("Content-Length", fmt.Sprintf("%d", len(doResponseBody))) c.Writer.WriteHeader(resp.StatusCode) _, err = io.Copy(c.Writer, resp.Body) if err != nil { From 3e03c5a742cd05c143b0b80affd1b55c747d202d Mon Sep 17 00:00:00 2001 From: CaIon <1808837298@qq.com> Date: Thu, 21 Mar 2024 23:44:39 +0800 Subject: [PATCH 6/8] fix: add missing id,object,created --- dto/text_response.go | 7 ++++++- relay/channel/openai/relay-openai.go | 2 ++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/dto/text_response.go b/dto/text_response.go index 4ef06dd..16deb0d 100644 --- a/dto/text_response.go +++ b/dto/text_response.go @@ -1,8 +1,10 @@ package dto type TextResponseWithError struct { - Choices []OpenAITextResponseChoice `json:"choices"` + Id string `json:"id"` Object string `json:"object"` + Created int64 `json:"created"` + Choices []OpenAITextResponseChoice `json:"choices"` Data []OpenAIEmbeddingResponseItem `json:"data"` Model string `json:"model"` Usage `json:"usage"` @@ -10,6 +12,9 @@ type TextResponseWithError struct { } type TextResponse struct { + Id string `json:"id"` + Object string `json:"object"` + Created int64 `json:"created"` Model string `json:"model"` Choices []OpenAITextResponseChoice `json:"choices"` Usage `json:"usage"` diff --git a/relay/channel/openai/relay-openai.go b/relay/channel/openai/relay-openai.go index 6b31d8b..17efad3 100644 --- a/relay/channel/openai/relay-openai.go +++ b/relay/channel/openai/relay-openai.go @@ -192,6 +192,8 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model } } textResponse := &dto.TextResponse{ + Id: responseWithError.Id, + Object: responseWithError.Object, Choices: responseWithError.Choices, Model: responseWithError.Model, Usage: *usage, From ff7da08bad2f5d8dac16ec8c4951f386696549c5 Mon Sep 17 00:00:00 2001 From: CaIon <1808837298@qq.com> Date: Thu, 21 Mar 2024 23:46:43 +0800 Subject: [PATCH 7/8] fix: add missing created --- relay/channel/openai/relay-openai.go | 1 + 1 file changed, 1 insertion(+) diff --git a/relay/channel/openai/relay-openai.go b/relay/channel/openai/relay-openai.go index 17efad3..b8b7d8d 100644 --- a/relay/channel/openai/relay-openai.go +++ b/relay/channel/openai/relay-openai.go @@ -193,6 +193,7 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model } textResponse := &dto.TextResponse{ Id: responseWithError.Id, + Created: responseWithError.Created, Object: responseWithError.Object, Choices: responseWithError.Choices, Model: responseWithError.Model, From c33b1522cc723c31716c95bd8c4e5eb4288bc52d Mon Sep 17 00:00:00 2001 From: CaIon <1808837298@qq.com> Date: Thu, 21 Mar 2024 23:57:48 +0800 Subject: [PATCH 8/8] =?UTF-8?q?fix:=20=E5=85=85=E5=80=BC=E5=B9=B6=E5=8F=91?= =?UTF-8?q?=E5=AF=BC=E8=87=B4=E8=AE=A2=E5=8D=95=E5=8F=B7=E7=9B=B8=E5=90=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- controller/topup.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/controller/topup.go b/controller/topup.go index 039c409..7e4b30c 100644 --- a/controller/topup.go +++ b/controller/topup.go @@ -77,7 +77,7 @@ func RequestEpay(c *gin.Context) { callBackAddress := service.GetCallbackAddress() returnUrl, _ := url.Parse(common.ServerAddress + "/log") notifyUrl, _ := url.Parse(callBackAddress + "/api/user/epay/notify") - tradeNo := strconv.FormatInt(time.Now().Unix(), 10) + tradeNo := fmt.Sprintf("%s%d", common.GetRandomString(6), time.Now().Unix()) client := GetEpayClient() if client == nil { c.JSON(200, gin.H{"message": "error", "data": "当前管理员未配置支付信息"})