fix: update OpenAI adaptor to set default temperature and clarify model limitations

This commit is contained in:
Laisky.Cai 2025-01-24 13:41:00 +00:00
parent bd9372d759
commit 1ea322f371

View File

@ -85,9 +85,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
request.StreamOptions.IncludeUsage = true request.StreamOptions.IncludeUsage = true
} }
// o1/o1-mini/o1-preview do not support system prompt and max_tokens // o1/o1-mini/o1-preview do not support system prompt/max_tokens/temperature
if strings.HasPrefix(request.Model, "o1") { if strings.HasPrefix(request.Model, "o1") {
temperature := float64(1)
request.Temperature = &temperature // Only the default (1) value is supported
request.MaxTokens = 0 request.MaxTokens = 0
request.Messages = func(raw []model.Message) (filtered []model.Message) { request.Messages = func(raw []model.Message) (filtered []model.Message) {
for i := range raw { for i := range raw {
if raw[i].Role != "system" { if raw[i].Role != "system" {