mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-17 13:43:42 +08:00
🐛 fix: Gemini only returns a single tools_call #197
This commit is contained in:
@@ -112,10 +112,13 @@ func convertFromChatOpenai(request *types.ChatCompletionRequest) (*GeminiChatReq
|
||||
MaxOutputTokens: request.MaxTokens,
|
||||
},
|
||||
}
|
||||
if request.Tools != nil {
|
||||
|
||||
functions := request.GetFunctions()
|
||||
|
||||
if functions != nil {
|
||||
var geminiChatTools GeminiChatTools
|
||||
for _, tool := range request.Tools {
|
||||
geminiChatTools.FunctionDeclarations = append(geminiChatTools.FunctionDeclarations, tool.Function)
|
||||
for _, function := range functions {
|
||||
geminiChatTools.FunctionDeclarations = append(geminiChatTools.FunctionDeclarations, *function)
|
||||
}
|
||||
geminiRequest.Tools = append(geminiRequest.Tools, geminiChatTools)
|
||||
}
|
||||
@@ -147,30 +150,8 @@ func (p *GeminiProvider) convertToChatOpenai(response *GeminiChatResponse, reque
|
||||
Model: request.Model,
|
||||
Choices: make([]types.ChatCompletionChoice, 0, len(response.Candidates)),
|
||||
}
|
||||
for i, candidate := range response.Candidates {
|
||||
choice := types.ChatCompletionChoice{
|
||||
Index: i,
|
||||
Message: types.ChatCompletionMessage{
|
||||
Role: "assistant",
|
||||
// Content: "",
|
||||
},
|
||||
FinishReason: types.FinishReasonStop,
|
||||
}
|
||||
if len(candidate.Content.Parts) == 0 {
|
||||
choice.Message.Content = ""
|
||||
openaiResponse.Choices = append(openaiResponse.Choices, choice)
|
||||
continue
|
||||
// choice.Message.Content = candidate.Content.Parts[0].Text
|
||||
}
|
||||
// 开始判断
|
||||
geminiParts := candidate.Content.Parts[0]
|
||||
|
||||
if geminiParts.FunctionCall != nil {
|
||||
choice.Message.ToolCalls = geminiParts.FunctionCall.ToOpenAITool()
|
||||
} else {
|
||||
choice.Message.Content = geminiParts.Text
|
||||
}
|
||||
openaiResponse.Choices = append(openaiResponse.Choices, choice)
|
||||
for _, candidate := range response.Candidates {
|
||||
openaiResponse.Choices = append(openaiResponse.Choices, candidate.ToOpenAIChoice(request))
|
||||
}
|
||||
|
||||
*p.Usage = convertOpenAIUsage(request.Model, response.UsageMetadata)
|
||||
@@ -218,42 +199,11 @@ func (h *geminiStreamHandler) convertToOpenaiStream(geminiResponse *GeminiChatRe
|
||||
|
||||
choices := make([]types.ChatCompletionStreamChoice, 0, len(geminiResponse.Candidates))
|
||||
|
||||
for i, candidate := range geminiResponse.Candidates {
|
||||
parts := candidate.Content.Parts[0]
|
||||
|
||||
choice := types.ChatCompletionStreamChoice{
|
||||
Index: i,
|
||||
Delta: types.ChatCompletionStreamChoiceDelta{
|
||||
Role: types.ChatMessageRoleAssistant,
|
||||
},
|
||||
FinishReason: types.FinishReasonStop,
|
||||
}
|
||||
|
||||
if parts.FunctionCall != nil {
|
||||
if parts.FunctionCall.Args == nil {
|
||||
parts.FunctionCall.Args = map[string]interface{}{}
|
||||
}
|
||||
args, _ := json.Marshal(parts.FunctionCall.Args)
|
||||
|
||||
choice.Delta.ToolCalls = []*types.ChatCompletionToolCalls{
|
||||
{
|
||||
Id: "call_" + common.GetRandomString(24),
|
||||
Type: types.ChatMessageRoleFunction,
|
||||
Index: 0,
|
||||
Function: &types.ChatCompletionToolCallsFunction{
|
||||
Name: parts.FunctionCall.Name,
|
||||
Arguments: string(args),
|
||||
},
|
||||
},
|
||||
}
|
||||
} else {
|
||||
choice.Delta.Content = parts.Text
|
||||
}
|
||||
|
||||
choices = append(choices, choice)
|
||||
for _, candidate := range geminiResponse.Candidates {
|
||||
choices = append(choices, candidate.ToOpenAIStreamChoice(h.Request))
|
||||
}
|
||||
|
||||
if len(choices) > 0 && choices[0].Delta.ToolCalls != nil {
|
||||
if len(choices) > 0 && (choices[0].Delta.ToolCalls != nil || choices[0].Delta.FunctionCall != nil) {
|
||||
choices := choices[0].ConvertOpenaiStream()
|
||||
for _, choice := range choices {
|
||||
chatCompletionCopy := streamResponse
|
||||
|
||||
@@ -32,6 +32,80 @@ type GeminiFunctionCall struct {
|
||||
Args map[string]interface{} `json:"args,omitempty"`
|
||||
}
|
||||
|
||||
func (candidate *GeminiChatCandidate) ToOpenAIStreamChoice(request *types.ChatCompletionRequest) types.ChatCompletionStreamChoice {
|
||||
choice := types.ChatCompletionStreamChoice{
|
||||
Index: int(candidate.Index),
|
||||
Delta: types.ChatCompletionStreamChoiceDelta{
|
||||
Role: types.ChatMessageRoleAssistant,
|
||||
},
|
||||
FinishReason: types.FinishReasonStop,
|
||||
}
|
||||
|
||||
content := ""
|
||||
isTools := false
|
||||
|
||||
for _, part := range candidate.Content.Parts {
|
||||
if part.FunctionCall != nil {
|
||||
if choice.Delta.ToolCalls == nil {
|
||||
choice.Delta.ToolCalls = make([]*types.ChatCompletionToolCalls, 0)
|
||||
}
|
||||
isTools = true
|
||||
choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, part.FunctionCall.ToOpenAITool())
|
||||
} else {
|
||||
content += part.Text
|
||||
}
|
||||
}
|
||||
|
||||
choice.Delta.Content = content
|
||||
|
||||
if isTools {
|
||||
choice.FinishReason = types.FinishReasonToolCalls
|
||||
}
|
||||
choice.CheckChoice(request)
|
||||
|
||||
return choice
|
||||
}
|
||||
|
||||
func (candidate *GeminiChatCandidate) ToOpenAIChoice(request *types.ChatCompletionRequest) types.ChatCompletionChoice {
|
||||
choice := types.ChatCompletionChoice{
|
||||
Index: int(candidate.Index),
|
||||
Message: types.ChatCompletionMessage{
|
||||
Role: "assistant",
|
||||
},
|
||||
FinishReason: types.FinishReasonStop,
|
||||
}
|
||||
|
||||
if len(candidate.Content.Parts) == 0 {
|
||||
choice.Message.Content = ""
|
||||
return choice
|
||||
}
|
||||
|
||||
content := ""
|
||||
useTools := false
|
||||
|
||||
for _, part := range candidate.Content.Parts {
|
||||
if part.FunctionCall != nil {
|
||||
if choice.Message.ToolCalls == nil {
|
||||
choice.Message.ToolCalls = make([]*types.ChatCompletionToolCalls, 0)
|
||||
}
|
||||
useTools = true
|
||||
choice.Message.ToolCalls = append(choice.Message.ToolCalls, part.FunctionCall.ToOpenAITool())
|
||||
} else {
|
||||
content += part.Text
|
||||
}
|
||||
}
|
||||
|
||||
choice.Message.Content = content
|
||||
|
||||
if useTools {
|
||||
choice.FinishReason = types.FinishReasonToolCalls
|
||||
}
|
||||
|
||||
choice.CheckChoice(request)
|
||||
|
||||
return choice
|
||||
}
|
||||
|
||||
type GeminiFunctionResponse struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
Response GeminiFunctionResponseContent `json:"response,omitempty"`
|
||||
@@ -42,18 +116,16 @@ type GeminiFunctionResponseContent struct {
|
||||
Content string `json:"content,omitempty"`
|
||||
}
|
||||
|
||||
func (g *GeminiFunctionCall) ToOpenAITool() []*types.ChatCompletionToolCalls {
|
||||
func (g *GeminiFunctionCall) ToOpenAITool() *types.ChatCompletionToolCalls {
|
||||
args, _ := json.Marshal(g.Args)
|
||||
|
||||
return []*types.ChatCompletionToolCalls{
|
||||
{
|
||||
Id: "",
|
||||
Type: types.ChatMessageRoleFunction,
|
||||
Index: 0,
|
||||
Function: &types.ChatCompletionToolCallsFunction{
|
||||
Name: g.Name,
|
||||
Arguments: string(args),
|
||||
},
|
||||
return &types.ChatCompletionToolCalls{
|
||||
Id: "call_" + common.GetRandomString(24),
|
||||
Type: types.ChatMessageRoleFunction,
|
||||
Index: 0,
|
||||
Function: &types.ChatCompletionToolCallsFunction{
|
||||
Name: g.Name,
|
||||
Arguments: string(args),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user