Compare commits

...

7 Commits

Author SHA1 Message Date
wozulong
8d83630d85 merge upstream
Signed-off-by: wozulong <>
2024-09-10 17:28:04 +08:00
wozulong
a60f209c85 optimized blocking issue during bulk log data deletion
Signed-off-by: wozulong <>
2024-09-10 17:24:44 +08:00
Calcium-Ion
5cab06d1ce Merge pull request #459 from HynoR/main
chore: 适配cohere的safety参数
2024-09-05 18:37:47 +08:00
CalciumIon
e3b3fdec48 feat: update chatgpt-4o token encoder 2024-09-05 18:35:34 +08:00
CalciumIon
5863aa8061 feat: remove lobe chat link #457 2024-09-05 18:34:04 +08:00
HynoR
416f831a6c Merge remote-tracking branch 'origin/main' 2024-09-02 06:47:58 +07:00
HynoR
0b4317ce28 Update Cohere Safety Setting 2024-09-02 06:47:49 +07:00
8 changed files with 39 additions and 13 deletions

View File

@@ -66,7 +66,7 @@
- `GET_MEDIA_TOKEN_NOT_STREAM`:是否在非流(`stream=false`情况下统计图片token默认为 `true`
- `UPDATE_TASK`是否更新异步任务Midjourney、Suno默认为 `true`,关闭后将不会更新任务进度。
- `GEMINI_MODEL_MAP`Gemini模型指定版本(v1/v1beta),使用“模型:版本”指定,","分隔,例如:-e GEMINI_MODEL_MAP="gemini-1.5-pro-latest:v1beta,gemini-1.5-pro-001:v1beta",为空则使用默认配置
- `COHERE_SAFETY_SETTING`Cohere模型[安全设置](https://docs.cohere.com/docs/safety-modes#overview),可选值为 `NONE`, `CONTEXTUAL``STRICT`,默认为 `NONE`
## 部署
### 部署要求
- 本地数据库默认SQLiteDocker 部署默认使用 SQLite必须挂载 `/data` 目录到宿主机)

View File

@@ -129,6 +129,9 @@ var RelayTimeout = GetEnvOrDefault("RELAY_TIMEOUT", 0) // unit is second
var GeminiSafetySetting = GetEnvOrDefaultString("GEMINI_SAFETY_SETTING", "BLOCK_NONE")
// https://docs.cohere.com/docs/safety-modes Type; NONE/CONTEXTUAL/STRICT
var CohereSafetySetting = GetEnvOrDefaultString("COHERE_SAFETY_SETTING", "NONE")
const (
RequestIdKey = "X-Oneapi-Request-Id"
)

View File

@@ -192,7 +192,7 @@ func DeleteHistoryLogs(c *gin.Context) {
})
return
}
count, err := model.DeleteOldLog(targetTimestamp)
count, err := model.DeleteOldLog(c.Request.Context(), targetTimestamp, 100)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,

View File

@@ -247,7 +247,25 @@ func SumUsedToken(logType int, startTimestamp int64, endTimestamp int64, modelNa
return token
}
func DeleteOldLog(targetTimestamp int64) (int64, error) {
result := LOG_DB.Where("created_at < ?", targetTimestamp).Delete(&Log{})
return result.RowsAffected, result.Error
func DeleteOldLog(ctx context.Context, targetTimestamp int64, limit int) (int64, error) {
var total int64 = 0
for {
if nil != ctx.Err() {
return total, ctx.Err()
}
result := LOG_DB.Where("created_at < ?", targetTimestamp).Limit(limit).Delete(&Log{})
if nil != result.Error {
return total, result.Error
}
if 0 == result.RowsAffected {
break
}
total += result.RowsAffected
}
return total, nil
}

View File

@@ -8,6 +8,7 @@ type CohereRequest struct {
Message string `json:"message"`
Stream bool `json:"stream"`
MaxTokens int `json:"max_tokens"`
SafetyMode string `json:"safety_mode"`
}
type ChatHistory struct {

View File

@@ -22,6 +22,7 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest {
Message: "",
Stream: textRequest.Stream,
MaxTokens: textRequest.GetMaxTokens(),
SafetyMode: common.CohereSafetySetting,
}
if cohereReq.MaxTokens == 0 {
cohereReq.MaxTokens = 4000
@@ -44,6 +45,7 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest {
})
}
}
return &cohereReq
}

View File

@@ -40,6 +40,8 @@ func InitTokenEncoders() {
tokenEncoderMap[model] = gpt35TokenEncoder
} else if strings.HasPrefix(model, "gpt-4o") {
tokenEncoderMap[model] = gpt4oTokenEncoder
} else if strings.HasPrefix(model, "chatgpt-4o") {
tokenEncoderMap[model] = gpt4oTokenEncoder
} else if strings.HasPrefix(model, "gpt-4") {
tokenEncoderMap[model] = gpt4TokenEncoder
} else {

View File

@@ -225,14 +225,14 @@ const TokensTable = () => {
onOpenLink('next-mj', record.key);
},
},
{
node: 'item',
key: 'lobe',
name: 'Lobe Chat',
onClick: () => {
onOpenLink('lobe', record.key);
},
},
// {
// node: 'item',
// key: 'lobe',
// name: 'Lobe Chat',
// onClick: () => {
// onOpenLink('lobe', record.key);
// },
// },
{
node: 'item',
key: 'ama',