fix: o1 do not support system prompt and max_tokens

This commit is contained in:
Laisky.Cai 2025-01-05 02:42:37 +00:00
parent 3915ce9814
commit d7e1b2a231
2 changed files with 16 additions and 1 deletions

View File

@ -82,6 +82,21 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
}
request.StreamOptions.IncludeUsage = true
}
// o1/o1-mini/o1-preview do not support system prompt and max_tokens
if strings.HasPrefix(request.Model, "o1") {
request.MaxTokens = 0
request.Messages = func(raw []model.Message) (filtered []model.Message) {
for i := range raw {
if raw[i].Role != "system" {
filtered = append(filtered, raw[i])
}
}
return
}(request.Messages)
}
return request, nil
}

View File

@ -466,7 +466,7 @@ func GetCompletionRatio(name string, channelType int) float64 {
}
return 2
}
// including o1, o1-preview, o1-mini
// including o1/o1-preview/o1-mini
if strings.HasPrefix(name, "o1") {
return 4
}