feat: support gpt-4o-mini image tokens

This commit is contained in:
CalciumIon 2024-07-19 12:59:37 +08:00
parent 733b374596
commit e2cf6b1e14

View File

@ -82,11 +82,12 @@ func getTokenNum(tokenEncoder *tiktoken.Tiktoken, text string) int {
}
func getImageToken(imageUrl *dto.MessageImageUrl, model string, stream bool) (int, error) {
baseTokens := 85
if model == "glm-4v" {
return 1047, nil
}
if imageUrl.Detail == "low" {
return 85, nil
return baseTokens, nil
}
// TODO: 非流模式下不计算图片token数量
if !constant.GetMediaTokenNotStream && !stream {
@ -100,6 +101,12 @@ func getImageToken(imageUrl *dto.MessageImageUrl, model string, stream bool) (in
if imageUrl.Detail == "auto" || imageUrl.Detail == "" {
imageUrl.Detail = "high"
}
tileTokens := 170
if strings.HasPrefix(model, "gpt-4o-mini") {
tileTokens = 5667
baseTokens = 2833
}
var config image.Config
var err error
var format string
@ -146,7 +153,7 @@ func getImageToken(imageUrl *dto.MessageImageUrl, model string, stream bool) (in
// 计算图片的token数量(边的长度除以512向上取整)
tiles := (shortSide + 511) / 512 * ((otherSide + 511) / 512)
log.Printf("tiles: %d", tiles)
return tiles*170 + 85, nil
return tiles*tileTokens + baseTokens, nil
}
func CountTokenChatRequest(request dto.GeneralOpenAIRequest, model string) (int, error) {