Merge pull request #333 from Calcium-Ion/suno

Update Suno
This commit is contained in:
Calcium-Ion 2024-06-26 17:23:39 +08:00 committed by GitHub
commit f6fcb2fd5e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 1799 additions and 1507 deletions

View File

@ -19,9 +19,6 @@ import (
) )
func UpdateMidjourneyTaskBulk() { func UpdateMidjourneyTaskBulk() {
if !common.IsMasterNode {
return
}
//imageModel := "midjourney" //imageModel := "midjourney"
ctx := context.TODO() ctx := context.TODO()
for { for {

View File

@ -89,12 +89,14 @@ func main() {
} }
go controller.AutomaticallyTestChannels(frequency) go controller.AutomaticallyTestChannels(frequency)
} }
if common.IsMasterNode {
common.SafeGoroutine(func() { common.SafeGoroutine(func() {
controller.UpdateMidjourneyTaskBulk() controller.UpdateMidjourneyTaskBulk()
}) })
common.SafeGoroutine(func() { common.SafeGoroutine(func() {
controller.UpdateTaskBulk() controller.UpdateTaskBulk()
}) })
}
if os.Getenv("BATCH_UPDATE_ENABLED") == "true" { if os.Getenv("BATCH_UPDATE_ENABLED") == "true" {
common.BatchUpdateEnabled = true common.BatchUpdateEnabled = true
common.SysLog("batch update enabled with interval " + strconv.Itoa(common.BatchUpdateInterval) + "s") common.SysLog("batch update enabled with interval " + strconv.Itoa(common.BatchUpdateInterval) + "s")

View File

@ -187,7 +187,7 @@ func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
} }
if simpleResponse.Usage.TotalTokens == 0 { if simpleResponse.Usage.TotalTokens == 0 || (simpleResponse.Usage.PromptTokens == 0 && simpleResponse.Usage.CompletionTokens == 0) {
completionTokens := 0 completionTokens := 0
for _, choice := range simpleResponse.Choices { for _, choice := range simpleResponse.Choices {
ctkm, _ := service.CountTokenText(string(choice.Message.Content), model) ctkm, _ := service.CountTokenText(string(choice.Message.Content), model)

File diff suppressed because it is too large Load Diff