mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-09-17 01:06:37 +08:00
Compare commits
2 Commits
89712aca97
...
b3d2a32312
Author | SHA1 | Date | |
---|---|---|---|
|
b3d2a32312 | ||
|
e1ee4fe7d9 |
@ -66,6 +66,23 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
MaxOutputTokens: textRequest.MaxTokens,
|
MaxOutputTokens: textRequest.MaxTokens,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if textRequest.ReasoningEffort != nil {
|
||||||
|
var thinkBudget int
|
||||||
|
switch *textRequest.ReasoningEffort {
|
||||||
|
case "low":
|
||||||
|
thinkBudget = 1000
|
||||||
|
case "medium":
|
||||||
|
thinkBudget = 8000
|
||||||
|
case "high":
|
||||||
|
thinkBudget = 24000
|
||||||
|
}
|
||||||
|
geminiRequest.GenerationConfig.ThinkingConfig = &ThinkingConfig{
|
||||||
|
ThinkingBudget: thinkBudget,
|
||||||
|
IncludeThoughts: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if textRequest.ResponseFormat != nil {
|
if textRequest.ResponseFormat != nil {
|
||||||
if mimeType, ok := mimeTypeMap[textRequest.ResponseFormat.Type]; ok {
|
if mimeType, ok := mimeTypeMap[textRequest.ResponseFormat.Type]; ok {
|
||||||
geminiRequest.GenerationConfig.ResponseMimeType = mimeType
|
geminiRequest.GenerationConfig.ResponseMimeType = mimeType
|
||||||
@ -199,6 +216,21 @@ func (g *ChatResponse) GetResponseText() string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (g *ChatResponse) GetResponseTextAndThought() (content string, thought string) {
|
||||||
|
if g == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 {
|
||||||
|
contentPart := g.Candidates[0].Content.Parts[0]
|
||||||
|
if contentPart.Thought {
|
||||||
|
thought = contentPart.Text
|
||||||
|
return
|
||||||
|
}
|
||||||
|
content = contentPart.Text
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
type ChatCandidate struct {
|
type ChatCandidate struct {
|
||||||
Content ChatContent `json:"content"`
|
Content ChatContent `json:"content"`
|
||||||
FinishReason string `json:"finishReason"`
|
FinishReason string `json:"finishReason"`
|
||||||
@ -263,7 +295,11 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
|
|||||||
if i > 0 {
|
if i > 0 {
|
||||||
builder.WriteString("\n")
|
builder.WriteString("\n")
|
||||||
}
|
}
|
||||||
builder.WriteString(part.Text)
|
if part.Thought {
|
||||||
|
builder.WriteString(fmt.Sprintf("<think>%s</think>\n", part.Text))
|
||||||
|
} else {
|
||||||
|
builder.WriteString(part.Text)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
choice.Message.Content = builder.String()
|
choice.Message.Content = builder.String()
|
||||||
}
|
}
|
||||||
@ -278,7 +314,7 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
|
|||||||
|
|
||||||
func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
|
func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
|
||||||
var choice openai.ChatCompletionsStreamResponseChoice
|
var choice openai.ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content = geminiResponse.GetResponseText()
|
choice.Delta.Content, choice.Delta.ReasoningContent = geminiResponse.GetResponseTextAndThought()
|
||||||
//choice.FinishReason = &constant.StopFinishReason
|
//choice.FinishReason = &constant.StopFinishReason
|
||||||
var response openai.ChatCompletionsStreamResponse
|
var response openai.ChatCompletionsStreamResponse
|
||||||
response.Id = fmt.Sprintf("chatcmpl-%s", random.GetUUID())
|
response.Id = fmt.Sprintf("chatcmpl-%s", random.GetUUID())
|
||||||
|
@ -49,6 +49,7 @@ type Part struct {
|
|||||||
Text string `json:"text,omitempty"`
|
Text string `json:"text,omitempty"`
|
||||||
InlineData *InlineData `json:"inlineData,omitempty"`
|
InlineData *InlineData `json:"inlineData,omitempty"`
|
||||||
FunctionCall *FunctionCall `json:"functionCall,omitempty"`
|
FunctionCall *FunctionCall `json:"functionCall,omitempty"`
|
||||||
|
Thought bool `json:"thought,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatContent struct {
|
type ChatContent struct {
|
||||||
@ -66,12 +67,18 @@ type ChatTools struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type ChatGenerationConfig struct {
|
type ChatGenerationConfig struct {
|
||||||
ResponseMimeType string `json:"responseMimeType,omitempty"`
|
ResponseMimeType string `json:"responseMimeType,omitempty"`
|
||||||
ResponseSchema any `json:"responseSchema,omitempty"`
|
ResponseSchema any `json:"responseSchema,omitempty"`
|
||||||
Temperature *float64 `json:"temperature,omitempty"`
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
TopP *float64 `json:"topP,omitempty"`
|
TopP *float64 `json:"topP,omitempty"`
|
||||||
TopK float64 `json:"topK,omitempty"`
|
TopK float64 `json:"topK,omitempty"`
|
||||||
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
||||||
CandidateCount int `json:"candidateCount,omitempty"`
|
CandidateCount int `json:"candidateCount,omitempty"`
|
||||||
StopSequences []string `json:"stopSequences,omitempty"`
|
StopSequences []string `json:"stopSequences,omitempty"`
|
||||||
|
ThinkingConfig *ThinkingConfig `json:"thinkingConfig,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ThinkingConfig struct {
|
||||||
|
ThinkingBudget int `json:"thinkingBudget"`
|
||||||
|
IncludeThoughts bool `json:"includeThoughts"`
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user