mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-10 18:43:41 +08:00
Merge branch 'feature/gemini-2.0-flash'
This commit is contained in:
@@ -21,7 +21,6 @@ type Adaptor struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
|
|||||||
@@ -8,8 +8,34 @@ var ModelList = []string{
|
|||||||
"gemini-1.5-flash", "gemini-1.5-flash-8b",
|
"gemini-1.5-flash", "gemini-1.5-flash-8b",
|
||||||
"gemini-1.5-pro", "gemini-1.5-pro-experimental",
|
"gemini-1.5-pro", "gemini-1.5-pro-experimental",
|
||||||
"text-embedding-004", "aqa",
|
"text-embedding-004", "aqa",
|
||||||
"gemini-2.0-pro-exp-02-05",
|
|
||||||
"gemini-2.0-flash", "gemini-2.0-flash-exp",
|
"gemini-2.0-flash", "gemini-2.0-flash-exp",
|
||||||
"gemini-2.0-flash-lite-preview-02-05",
|
"gemini-2.0-flash-lite-preview-02-05",
|
||||||
"gemini-2.0-flash-thinking-exp-01-21",
|
"gemini-2.0-flash-thinking-exp-01-21",
|
||||||
|
"gemini-2.0-pro-exp-02-05",
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModelsSupportSystemInstruction is the list of models that support system instruction.
|
||||||
|
//
|
||||||
|
// https://cloud.google.com/vertex-ai/generative-ai/docs/learn/prompts/system-instructions
|
||||||
|
var ModelsSupportSystemInstruction = []string{
|
||||||
|
// "gemini-1.0-pro-002",
|
||||||
|
// "gemini-1.5-flash", "gemini-1.5-flash-001", "gemini-1.5-flash-002",
|
||||||
|
// "gemini-1.5-flash-8b",
|
||||||
|
// "gemini-1.5-pro", "gemini-1.5-pro-001", "gemini-1.5-pro-002",
|
||||||
|
// "gemini-1.5-pro-experimental",
|
||||||
|
"gemini-2.0-flash", "gemini-2.0-flash-exp",
|
||||||
|
"gemini-2.0-flash-thinking-exp-01-21",
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsModelSupportSystemInstruction check if the model support system instruction.
|
||||||
|
//
|
||||||
|
// Because the main version of Go is 1.20, slice.Contains cannot be used
|
||||||
|
func IsModelSupportSystemInstruction(model string) bool {
|
||||||
|
for _, m := range ModelsSupportSystemInstruction {
|
||||||
|
if m == model {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
|
||||||
"github.com/songquanpeng/one-api/common"
|
"github.com/songquanpeng/one-api/common"
|
||||||
"github.com/songquanpeng/one-api/common/config"
|
"github.com/songquanpeng/one-api/common/config"
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
@@ -33,12 +32,6 @@ var mimeTypeMap = map[string]string{
|
|||||||
"text": "text/plain",
|
"text": "text/plain",
|
||||||
}
|
}
|
||||||
|
|
||||||
var toolChoiceTypeMap = map[string]string{
|
|
||||||
"none": "NONE",
|
|
||||||
"auto": "AUTO",
|
|
||||||
"required": "ANY",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Setting safety to the lowest possible values since Gemini is already powerless enough
|
// Setting safety to the lowest possible values since Gemini is already powerless enough
|
||||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
||||||
geminiRequest := ChatRequest{
|
geminiRequest := ChatRequest{
|
||||||
@@ -97,24 +90,7 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if textRequest.ToolChoice != nil {
|
shouldAddDummyModelMessage := false
|
||||||
geminiRequest.ToolConfig = &ToolConfig{
|
|
||||||
FunctionCallingConfig: FunctionCallingConfig{
|
|
||||||
Mode: "auto",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
switch mode := textRequest.ToolChoice.(type) {
|
|
||||||
case string:
|
|
||||||
geminiRequest.ToolConfig.FunctionCallingConfig.Mode = toolChoiceTypeMap[mode]
|
|
||||||
case map[string]interface{}:
|
|
||||||
geminiRequest.ToolConfig.FunctionCallingConfig.Mode = "ANY"
|
|
||||||
if fn, ok := mode["function"].(map[string]interface{}); ok {
|
|
||||||
if name, ok := fn["name"].(string); ok {
|
|
||||||
geminiRequest.ToolConfig.FunctionCallingConfig.AllowedFunctionNames = []string{name}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, message := range textRequest.Messages {
|
for _, message := range textRequest.Messages {
|
||||||
content := ChatContent{
|
content := ChatContent{
|
||||||
Role: message.Role,
|
Role: message.Role,
|
||||||
@@ -152,21 +128,32 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
if content.Role == "assistant" {
|
if content.Role == "assistant" {
|
||||||
content.Role = "model"
|
content.Role = "model"
|
||||||
}
|
}
|
||||||
// Converting system prompt to SystemInstructions
|
// Converting system prompt to prompt from user for the same reason
|
||||||
if content.Role == "system" {
|
if content.Role == "system" {
|
||||||
geminiRequest.SystemInstruction = &content
|
shouldAddDummyModelMessage = true
|
||||||
continue
|
if IsModelSupportSystemInstruction(textRequest.Model) {
|
||||||
|
geminiRequest.SystemInstruction = &content
|
||||||
|
geminiRequest.SystemInstruction.Role = ""
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
content.Role = "user"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
geminiRequest.Contents = append(geminiRequest.Contents, content)
|
|
||||||
}
|
|
||||||
|
|
||||||
// As of 2025-02-06, the newly released gemini 2.0 models do not support system_instruction,
|
geminiRequest.Contents = append(geminiRequest.Contents, content)
|
||||||
// which can reasonably be considered a bug. Google may fix this issue in the future.
|
|
||||||
if geminiRequest.SystemInstruction != nil &&
|
// If a system message is the last message, we need to add a dummy model message to make gemini happy
|
||||||
strings.Contains(textRequest.Model, "-2.0") &&
|
if shouldAddDummyModelMessage {
|
||||||
textRequest.Model != "gemini-2.0-flash-exp" &&
|
geminiRequest.Contents = append(geminiRequest.Contents, ChatContent{
|
||||||
textRequest.Model != "gemini-2.0-flash-thinking-exp-01-21" {
|
Role: "model",
|
||||||
geminiRequest.SystemInstruction = nil
|
Parts: []Part{
|
||||||
|
{
|
||||||
|
Text: "Okay",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
shouldAddDummyModelMessage = false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &geminiRequest
|
return &geminiRequest
|
||||||
@@ -204,16 +191,10 @@ func (g *ChatResponse) GetResponseText() string {
|
|||||||
if g == nil {
|
if g == nil {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
var builder strings.Builder
|
if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 {
|
||||||
for _, candidate := range g.Candidates {
|
return g.Candidates[0].Content.Parts[0].Text
|
||||||
for idx, part := range candidate.Content.Parts {
|
|
||||||
if idx > 0 {
|
|
||||||
builder.WriteString("\n")
|
|
||||||
}
|
|
||||||
builder.WriteString(part.Text)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return builder.String()
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatCandidate struct {
|
type ChatCandidate struct {
|
||||||
@@ -276,8 +257,8 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
|
|||||||
choice.Message.ToolCalls = getToolCalls(&candidate)
|
choice.Message.ToolCalls = getToolCalls(&candidate)
|
||||||
} else {
|
} else {
|
||||||
var builder strings.Builder
|
var builder strings.Builder
|
||||||
for idx, part := range candidate.Content.Parts {
|
for _, part := range candidate.Content.Parts {
|
||||||
if idx > 0 {
|
if i > 0 {
|
||||||
builder.WriteString("\n")
|
builder.WriteString("\n")
|
||||||
}
|
}
|
||||||
builder.WriteString(part.Text)
|
builder.WriteString(part.Text)
|
||||||
|
|||||||
@@ -2,11 +2,10 @@ package gemini
|
|||||||
|
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
Contents []ChatContent `json:"contents"`
|
Contents []ChatContent `json:"contents"`
|
||||||
SystemInstruction *ChatContent `json:"system_instruction,omitempty"`
|
|
||||||
SafetySettings []ChatSafetySettings `json:"safety_settings,omitempty"`
|
SafetySettings []ChatSafetySettings `json:"safety_settings,omitempty"`
|
||||||
GenerationConfig ChatGenerationConfig `json:"generation_config,omitempty"`
|
GenerationConfig ChatGenerationConfig `json:"generation_config,omitempty"`
|
||||||
Tools []ChatTools `json:"tools,omitempty"`
|
Tools []ChatTools `json:"tools,omitempty"`
|
||||||
ToolConfig *ToolConfig `json:"tool_config,omitempty"`
|
SystemInstruction *ChatContent `json:"system_instruction,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type EmbeddingRequest struct {
|
type EmbeddingRequest struct {
|
||||||
|
|||||||
Reference in New Issue
Block a user