From 0b4317ce282b5a761373207e54219ff5213b676a Mon Sep 17 00:00:00 2001 From: HynoR <20227709+HynoR@users.noreply.github.com> Date: Mon, 2 Sep 2024 06:47:49 +0700 Subject: [PATCH 1/5] Update Cohere Safety Setting --- README.md | 2 +- common/constants.go | 3 +++ relay/channel/cohere/dto.go | 1 + relay/channel/cohere/relay-cohere.go | 2 ++ 4 files changed, 7 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b35edb7..8f43a74 100644 --- a/README.md +++ b/README.md @@ -66,7 +66,7 @@ - `GET_MEDIA_TOKEN_NOT_STREAM`:是否在非流(`stream=false`)情况下统计图片token,默认为 `true`。 - `UPDATE_TASK`:是否更新异步任务(Midjourney、Suno),默认为 `true`,关闭后将不会更新任务进度。 - `GEMINI_MODEL_MAP`:Gemini模型指定版本(v1/v1beta),使用“模型:版本”指定,","分隔,例如:-e GEMINI_MODEL_MAP="gemini-1.5-pro-latest:v1beta,gemini-1.5-pro-001:v1beta",为空则使用默认配置 - +- `COHERE_SAFETY_SETTING`:Cohere模型[安全设置](https://docs.cohere.com/docs/safety-modes#overview),可选值为 `NONE`, `CONTEXTUAL`,`STRICT`,默认为 `NONE`。 ## 部署 ### 部署要求 - 本地数据库(默认):SQLite(Docker 部署默认使用 SQLite,必须挂载 `/data` 目录到宿主机) diff --git a/common/constants.go b/common/constants.go index 51144fd..3700fce 100644 --- a/common/constants.go +++ b/common/constants.go @@ -112,6 +112,9 @@ var RelayTimeout = GetEnvOrDefault("RELAY_TIMEOUT", 0) // unit is second var GeminiSafetySetting = GetEnvOrDefaultString("GEMINI_SAFETY_SETTING", "BLOCK_NONE") +// https://docs.cohere.com/docs/safety-modes Type; NONE/CONTEXTUAL/STRICT +var CohereSafetySetting = GetEnvOrDefaultString("COHERE_SAFETY_SETTING", "NONE") + const ( RequestIdKey = "X-Oneapi-Request-Id" ) diff --git a/relay/channel/cohere/dto.go b/relay/channel/cohere/dto.go index b2c2739..7f50a15 100644 --- a/relay/channel/cohere/dto.go +++ b/relay/channel/cohere/dto.go @@ -8,6 +8,7 @@ type CohereRequest struct { Message string `json:"message"` Stream bool `json:"stream"` MaxTokens int `json:"max_tokens"` + SafetyMode string `json:"safety_mode"` } type ChatHistory struct { diff --git a/relay/channel/cohere/relay-cohere.go b/relay/channel/cohere/relay-cohere.go index 6c9c238..adec316 100644 --- a/relay/channel/cohere/relay-cohere.go +++ b/relay/channel/cohere/relay-cohere.go @@ -22,6 +22,7 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest { Message: "", Stream: textRequest.Stream, MaxTokens: textRequest.GetMaxTokens(), + SafetyMode: common.CohereSafetySetting, } if cohereReq.MaxTokens == 0 { cohereReq.MaxTokens = 4000 @@ -44,6 +45,7 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest { }) } } + return &cohereReq } From 5863aa80613e0c3b29326ff2c97cc82f712235fd Mon Sep 17 00:00:00 2001 From: CalciumIon <1808837298@qq.com> Date: Thu, 5 Sep 2024 18:34:04 +0800 Subject: [PATCH 2/5] feat: remove lobe chat link #457 --- web/src/components/TokensTable.js | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/web/src/components/TokensTable.js b/web/src/components/TokensTable.js index 9cece1e..64a189f 100644 --- a/web/src/components/TokensTable.js +++ b/web/src/components/TokensTable.js @@ -225,14 +225,14 @@ const TokensTable = () => { onOpenLink('next-mj', record.key); }, }, - { - node: 'item', - key: 'lobe', - name: 'Lobe Chat', - onClick: () => { - onOpenLink('lobe', record.key); - }, - }, + // { + // node: 'item', + // key: 'lobe', + // name: 'Lobe Chat', + // onClick: () => { + // onOpenLink('lobe', record.key); + // }, + // }, { node: 'item', key: 'ama', From e3b3fdec48d34c1941927ac93dd9542d99217bcb Mon Sep 17 00:00:00 2001 From: CalciumIon <1808837298@qq.com> Date: Thu, 5 Sep 2024 18:35:34 +0800 Subject: [PATCH 3/5] feat: update chatgpt-4o token encoder --- service/token_counter.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/service/token_counter.go b/service/token_counter.go index 155ffa2..9a55871 100644 --- a/service/token_counter.go +++ b/service/token_counter.go @@ -52,7 +52,7 @@ func InitTokenEncoders() { } func getModelDefaultTokenEncoder(model string) *tiktoken.Tiktoken { - if strings.HasPrefix(model, "gpt-4o") { + if strings.HasPrefix(model, "gpt-4o") || strings.HasPrefix(model, "chatgpt-4o") { return cl200kTokenEncoder } return defaultTokenEncoder From d168a685c15c4b2a349f00591e860cc1ac6f3c15 Mon Sep 17 00:00:00 2001 From: CalciumIon <1808837298@qq.com> Date: Wed, 11 Sep 2024 19:12:32 +0800 Subject: [PATCH 4/5] fix: cohere SafetyMode --- relay/channel/cohere/dto.go | 2 +- relay/channel/cohere/relay-cohere.go | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/relay/channel/cohere/dto.go b/relay/channel/cohere/dto.go index 7f50a15..e7452fd 100644 --- a/relay/channel/cohere/dto.go +++ b/relay/channel/cohere/dto.go @@ -8,7 +8,7 @@ type CohereRequest struct { Message string `json:"message"` Stream bool `json:"stream"` MaxTokens int `json:"max_tokens"` - SafetyMode string `json:"safety_mode"` + SafetyMode string `json:"safety_mode,omitempty"` } type ChatHistory struct { diff --git a/relay/channel/cohere/relay-cohere.go b/relay/channel/cohere/relay-cohere.go index adec316..132039b 100644 --- a/relay/channel/cohere/relay-cohere.go +++ b/relay/channel/cohere/relay-cohere.go @@ -22,7 +22,9 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest { Message: "", Stream: textRequest.Stream, MaxTokens: textRequest.GetMaxTokens(), - SafetyMode: common.CohereSafetySetting, + } + if common.CohereSafetySetting != "NONE" { + cohereReq.SafetyMode = common.CohereSafetySetting } if cohereReq.MaxTokens == 0 { cohereReq.MaxTokens = 4000 From 2650ec9b5943bdd0061c40d5512dce1720e74dea Mon Sep 17 00:00:00 2001 From: CalciumIon <1808837298@qq.com> Date: Wed, 11 Sep 2024 19:12:55 +0800 Subject: [PATCH 5/5] feat: claude response return model name --- dto/text_response.go | 1 + relay/channel/claude/relay-claude.go | 1 + 2 files changed, 2 insertions(+) diff --git a/dto/text_response.go b/dto/text_response.go index 9b12683..5d13773 100644 --- a/dto/text_response.go +++ b/dto/text_response.go @@ -34,6 +34,7 @@ type OpenAITextResponseChoice struct { type OpenAITextResponse struct { Id string `json:"id"` + Model string `json:"model"` Object string `json:"object"` Created int64 `json:"created"` Choices []OpenAITextResponseChoice `json:"choices"` diff --git a/relay/channel/claude/relay-claude.go b/relay/channel/claude/relay-claude.go index 1923e35..874d428 100644 --- a/relay/channel/claude/relay-claude.go +++ b/relay/channel/claude/relay-claude.go @@ -341,6 +341,7 @@ func ResponseClaude2OpenAI(reqMode int, claudeResponse *ClaudeResponse) *dto.Ope if len(tools) > 0 { choice.Message.ToolCalls = tools } + fullTextResponse.Model = claudeResponse.Model choices = append(choices, choice) fullTextResponse.Choices = choices return &fullTextResponse