🐛 fix: Tokens may not be counted in the stream below

This commit is contained in:
MartialBE
2024-05-31 14:03:06 +08:00
parent 05adacefff
commit acf61f8b89
9 changed files with 31 additions and 9 deletions

View File

@@ -270,6 +270,9 @@ func (h *minimaxStreamHandler) convertToOpenaiStream(miniResponse *MiniMaxChatRe
if miniResponse.Usage != nil {
h.handleUsage(miniResponse)
} else {
h.Usage.CompletionTokens += common.CountTokenText(miniChoice.Messages[0].Text, h.Request.Model)
h.Usage.TotalTokens = h.Usage.PromptTokens + h.Usage.CompletionTokens
}
}