feat: Add token counting functionality to vision-related functions

- Add function `CountVisionImageToken` to count vision image tokens
- Modify function `imageSize` to handle different image types
- Add function `countVisonTokenMessages` to count tokens in vision messages
- Add logic to count tokens for different types of vision messages in `countVisonTokenMessages`
- Add tokens for role and name in `countVisonTokenMessages`
- Update total token count calculation in `countVisonTokenMessages` to include image tokens and message tokens
- Add constant values for tokens per message and tokens per name in `countVisonTokenMessages`
- Modify the error message on line 12 to include the JSON string that failed to unmarshal
This commit is contained in:
Laisky.Cai
2023-11-17 04:00:49 +00:00
parent d0c0b9b650
commit 08ca72184a
3 changed files with 115 additions and 8 deletions

View File

@@ -202,12 +202,20 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
var completionTokens int
switch relayMode {
case RelayModeChatCompletions:
messages, err := textRequest.TextMessages()
if err != nil {
return errorWrapper(err, "parse_text_messages_failed", http.StatusBadRequest)
// first try to parse as text messages
if messages, err := textRequest.TextMessages(); err != nil {
// then try to parse as vision messages
if messages, err := textRequest.VisionMessages(); err != nil {
return errorWrapper(err, "parse_text_messages_failed", http.StatusBadRequest)
} else {
// vision message
if promptTokens, err = countVisonTokenMessages(messages, textRequest.Model); err != nil {
return errorWrapper(err, "count_token_messages_failed", http.StatusInternalServerError)
}
}
} else {
promptTokens = countTokenMessages(messages, textRequest.Model)
}
promptTokens = countTokenMessages(messages, textRequest.Model)
case RelayModeCompletions:
promptTokens = countTokenInput(textRequest.Prompt, textRequest.Model)
case RelayModeModerations: