feat: enhance Gemini API to support image response modalities and update model ratios

This commit is contained in:
Laisky.Cai
2025-03-17 01:22:33 +00:00
parent c893672635
commit 34c7523f01
4 changed files with 66 additions and 13 deletions

View File

@@ -6,6 +6,19 @@ type ChatRequest struct {
GenerationConfig ChatGenerationConfig `json:"generation_config,omitempty"`
Tools []ChatTools `json:"tools,omitempty"`
SystemInstruction *ChatContent `json:"system_instruction,omitempty"`
ModelVersion string `json:"model_version,omitempty"`
UsageMetadata *UsageMetadata `json:"usage_metadata,omitempty"`
}
type UsageMetadata struct {
PromptTokenCount int `json:"promptTokenCount,omitempty"`
TotalTokenCount int `json:"totalTokenCount,omitempty"`
PromptTokensDetails []PromptTokensDetails `json:"promptTokensDetails,omitempty"`
}
type PromptTokensDetails struct {
Modality string `json:"modality,omitempty"`
TokenCount int `json:"tokenCount,omitempty"`
}
type EmbeddingRequest struct {
@@ -66,12 +79,13 @@ type ChatTools struct {
}
type ChatGenerationConfig struct {
ResponseMimeType string `json:"responseMimeType,omitempty"`
ResponseSchema any `json:"responseSchema,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"topP,omitempty"`
TopK float64 `json:"topK,omitempty"`
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`
StopSequences []string `json:"stopSequences,omitempty"`
ResponseMimeType string `json:"responseMimeType,omitempty"`
ResponseSchema any `json:"responseSchema,omitempty"`
Temperature *float64 `json:"temperature,omitempty"`
TopP *float64 `json:"topP,omitempty"`
TopK float64 `json:"topK,omitempty"`
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`
StopSequences []string `json:"stopSequences,omitempty"`
ResponseModalities []string `json:"responseModalities,omitempty"`
}