mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-09-17 01:06:37 +08:00
Compare commits
3 Commits
12be617907
...
a9210690a6
Author | SHA1 | Date | |
---|---|---|---|
|
a9210690a6 | ||
|
cb4ee5e86a | ||
|
94f3223fd7 |
@ -65,13 +65,19 @@ type ChatTools struct {
|
||||
FunctionDeclarations any `json:"function_declarations,omitempty"`
|
||||
}
|
||||
|
||||
type ChatGenerationConfig struct {
|
||||
ResponseMimeType string `json:"responseMimeType,omitempty"`
|
||||
ResponseSchema any `json:"responseSchema,omitempty"`
|
||||
Temperature *float64 `json:"temperature,omitempty"`
|
||||
TopP *float64 `json:"topP,omitempty"`
|
||||
TopK float64 `json:"topK,omitempty"`
|
||||
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
||||
CandidateCount int `json:"candidateCount,omitempty"`
|
||||
StopSequences []string `json:"stopSequences,omitempty"`
|
||||
type ThinkingConfig struct {
|
||||
IncludeThoughts bool `json:"includeThoughts"`
|
||||
ThinkingBudget int `json:"thinkingBudget"`
|
||||
}
|
||||
|
||||
type ChatGenerationConfig struct {
|
||||
ResponseMimeType string `json:"responseMimeType,omitempty"`
|
||||
ResponseSchema any `json:"responseSchema,omitempty"`
|
||||
Temperature *float64 `json:"temperature,omitempty"`
|
||||
TopP *float64 `json:"topP,omitempty"`
|
||||
TopK float64 `json:"topK,omitempty"`
|
||||
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
||||
CandidateCount int `json:"candidateCount,omitempty"`
|
||||
StopSequences []string `json:"stopSequences,omitempty"`
|
||||
ThinkingConfig *ThinkingConfig `json:"thinkingConfig,omitempty"`
|
||||
}
|
||||
|
@ -2,6 +2,9 @@ package vertexai
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/pkg/errors"
|
||||
@ -27,13 +30,47 @@ var ModelList = []string{
|
||||
type Adaptor struct {
|
||||
}
|
||||
|
||||
func (a *Adaptor) parseGeminiChatGenerationThinking(model string) (string, *gemini.ThinkingConfig) {
|
||||
thinkingConfig := &gemini.ThinkingConfig{
|
||||
IncludeThoughts: false,
|
||||
ThinkingBudget: 0,
|
||||
}
|
||||
modelName := model
|
||||
if strings.Contains(model, "?") {
|
||||
parts := strings.Split(model, "?")
|
||||
_modelName := parts[0]
|
||||
if len(parts) >= 2 {
|
||||
modelOptions, err := url.ParseQuery(parts[1])
|
||||
if err == nil && modelOptions != nil {
|
||||
modelName = _modelName
|
||||
hasThinkingFlag := modelOptions.Has("thinking")
|
||||
if hasThinkingFlag {
|
||||
thinkingConfig.IncludeThoughts = modelOptions.Get("thinking") == "1"
|
||||
}
|
||||
thinkingBudget := modelOptions.Get("thinking_budget")
|
||||
if thinkingBudget != "" {
|
||||
thinkingBudgetInt, err := strconv.Atoi(thinkingBudget)
|
||||
if err == nil {
|
||||
thinkingConfig.ThinkingBudget = thinkingBudgetInt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return modelName, thinkingConfig
|
||||
}
|
||||
|
||||
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
|
||||
modelName, thinkingConfig := a.parseGeminiChatGenerationThinking(request.Model)
|
||||
request.Model = modelName
|
||||
geminiRequest := gemini.ConvertRequest(*request)
|
||||
c.Set(ctxkey.RequestModel, request.Model)
|
||||
if thinkingConfig != nil {
|
||||
geminiRequest.GenerationConfig.ThinkingConfig = thinkingConfig
|
||||
}
|
||||
c.Set(ctxkey.RequestModel, modelName)
|
||||
c.Set(ctxkey.ConvertedRequest, geminiRequest)
|
||||
return geminiRequest, nil
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user