feat: add support for gemini-2.0-flash-thinking-exp model

This commit is contained in:
Jinjun Liu 2024-12-21 19:46:35 -06:00
parent 42dfcd6167
commit c4afcc8337
5 changed files with 17 additions and 4 deletions

View File

@ -25,7 +25,8 @@ func (a *Adaptor) Init(meta *meta.Meta) {
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
defaultVersion := config.GeminiVersion defaultVersion := config.GeminiVersion
if meta.ActualModelName == "gemini-2.0-flash-exp" { // gemini-2.0-flash-exp and gemini-2.0-flash-thinking-exp use v1beta
if meta.ActualModelName == "gemini-2.0-flash-exp" || meta.ActualModelName == "gemini-2.0-flash-thinking-exp" {
defaultVersion = "v1beta" defaultVersion = "v1beta"
} }

View File

@ -6,5 +6,5 @@ var ModelList = []string{
"gemini-pro", "gemini-1.0-pro", "gemini-pro", "gemini-1.0-pro",
"gemini-1.5-flash", "gemini-1.5-pro", "gemini-1.5-flash", "gemini-1.5-pro",
"text-embedding-004", "aqa", "text-embedding-004", "aqa",
"gemini-2.0-flash-exp", "gemini-2.0-flash-exp", "gemini-2.0-flash-thinking-exp",
} }

View File

@ -54,6 +54,10 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
Category: "HARM_CATEGORY_DANGEROUS_CONTENT", Category: "HARM_CATEGORY_DANGEROUS_CONTENT",
Threshold: config.GeminiSafetySetting, Threshold: config.GeminiSafetySetting,
}, },
{
Category: "HARM_CATEGORY_CIVIC_INTEGRITY",
Threshold: config.GeminiSafetySetting,
},
}, },
GenerationConfig: ChatGenerationConfig{ GenerationConfig: ChatGenerationConfig{
Temperature: textRequest.Temperature, Temperature: textRequest.Temperature,
@ -246,7 +250,14 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
if candidate.Content.Parts[0].FunctionCall != nil { if candidate.Content.Parts[0].FunctionCall != nil {
choice.Message.ToolCalls = getToolCalls(&candidate) choice.Message.ToolCalls = getToolCalls(&candidate)
} else { } else {
choice.Message.Content = candidate.Content.Parts[0].Text var builder strings.Builder
for _, part := range candidate.Content.Parts {
if i > 0 {
builder.WriteString("\n")
}
builder.WriteString(part.Text)
}
choice.Message.Content = builder.String()
} }
} else { } else {
choice.Message.Content = "" choice.Message.Content = ""

View File

@ -18,7 +18,7 @@ var ModelList = []string{
"gemini-pro", "gemini-pro-vision", "gemini-pro", "gemini-pro-vision",
"gemini-1.5-pro-001", "gemini-1.5-flash-001", "gemini-1.5-pro-001", "gemini-1.5-flash-001",
"gemini-1.5-pro-002", "gemini-1.5-flash-002", "gemini-1.5-pro-002", "gemini-1.5-flash-002",
"gemini-2.0-flash-exp", "gemini-2.0-flash-exp", "gemini-2.0-flash-thinking-exp",
} }
type Adaptor struct { type Adaptor struct {

View File

@ -117,6 +117,7 @@ var ModelRatio = map[string]float64{
"gemini-1.5-flash": 1, "gemini-1.5-flash": 1,
"gemini-1.5-flash-001": 1, "gemini-1.5-flash-001": 1,
"gemini-2.0-flash-exp": 1, "gemini-2.0-flash-exp": 1,
"gemini-2.0-flash-thinking-exp": 1,
"aqa": 1, "aqa": 1,
// https://open.bigmodel.cn/pricing // https://open.bigmodel.cn/pricing
"glm-4": 0.1 * RMB, "glm-4": 0.1 * RMB,