mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-09-18 17:46:37 +08:00
Compare commits
1 Commits
b3d2a32312
...
89712aca97
Author | SHA1 | Date | |
---|---|---|---|
|
89712aca97 |
@ -66,23 +66,6 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
MaxOutputTokens: textRequest.MaxTokens,
|
MaxOutputTokens: textRequest.MaxTokens,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if textRequest.ReasoningEffort != nil {
|
|
||||||
var thinkBudget int
|
|
||||||
switch *textRequest.ReasoningEffort {
|
|
||||||
case "low":
|
|
||||||
thinkBudget = 1000
|
|
||||||
case "medium":
|
|
||||||
thinkBudget = 8000
|
|
||||||
case "high":
|
|
||||||
thinkBudget = 24000
|
|
||||||
}
|
|
||||||
geminiRequest.GenerationConfig.ThinkingConfig = &ThinkingConfig{
|
|
||||||
ThinkingBudget: thinkBudget,
|
|
||||||
IncludeThoughts: true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if textRequest.ResponseFormat != nil {
|
if textRequest.ResponseFormat != nil {
|
||||||
if mimeType, ok := mimeTypeMap[textRequest.ResponseFormat.Type]; ok {
|
if mimeType, ok := mimeTypeMap[textRequest.ResponseFormat.Type]; ok {
|
||||||
geminiRequest.GenerationConfig.ResponseMimeType = mimeType
|
geminiRequest.GenerationConfig.ResponseMimeType = mimeType
|
||||||
@ -216,21 +199,6 @@ func (g *ChatResponse) GetResponseText() string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *ChatResponse) GetResponseTextAndThought() (content string, thought string) {
|
|
||||||
if g == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 {
|
|
||||||
contentPart := g.Candidates[0].Content.Parts[0]
|
|
||||||
if contentPart.Thought {
|
|
||||||
thought = contentPart.Text
|
|
||||||
return
|
|
||||||
}
|
|
||||||
content = contentPart.Text
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
type ChatCandidate struct {
|
type ChatCandidate struct {
|
||||||
Content ChatContent `json:"content"`
|
Content ChatContent `json:"content"`
|
||||||
FinishReason string `json:"finishReason"`
|
FinishReason string `json:"finishReason"`
|
||||||
@ -295,12 +263,8 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
|
|||||||
if i > 0 {
|
if i > 0 {
|
||||||
builder.WriteString("\n")
|
builder.WriteString("\n")
|
||||||
}
|
}
|
||||||
if part.Thought {
|
|
||||||
builder.WriteString(fmt.Sprintf("<think>%s</think>\n", part.Text))
|
|
||||||
} else {
|
|
||||||
builder.WriteString(part.Text)
|
builder.WriteString(part.Text)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
choice.Message.Content = builder.String()
|
choice.Message.Content = builder.String()
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -314,7 +278,7 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
|
|||||||
|
|
||||||
func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
|
func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
|
||||||
var choice openai.ChatCompletionsStreamResponseChoice
|
var choice openai.ChatCompletionsStreamResponseChoice
|
||||||
choice.Delta.Content, choice.Delta.ReasoningContent = geminiResponse.GetResponseTextAndThought()
|
choice.Delta.Content = geminiResponse.GetResponseText()
|
||||||
//choice.FinishReason = &constant.StopFinishReason
|
//choice.FinishReason = &constant.StopFinishReason
|
||||||
var response openai.ChatCompletionsStreamResponse
|
var response openai.ChatCompletionsStreamResponse
|
||||||
response.Id = fmt.Sprintf("chatcmpl-%s", random.GetUUID())
|
response.Id = fmt.Sprintf("chatcmpl-%s", random.GetUUID())
|
||||||
|
@ -49,7 +49,6 @@ type Part struct {
|
|||||||
Text string `json:"text,omitempty"`
|
Text string `json:"text,omitempty"`
|
||||||
InlineData *InlineData `json:"inlineData,omitempty"`
|
InlineData *InlineData `json:"inlineData,omitempty"`
|
||||||
FunctionCall *FunctionCall `json:"functionCall,omitempty"`
|
FunctionCall *FunctionCall `json:"functionCall,omitempty"`
|
||||||
Thought bool `json:"thought,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatContent struct {
|
type ChatContent struct {
|
||||||
@ -75,10 +74,4 @@ type ChatGenerationConfig struct {
|
|||||||
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
|
||||||
CandidateCount int `json:"candidateCount,omitempty"`
|
CandidateCount int `json:"candidateCount,omitempty"`
|
||||||
StopSequences []string `json:"stopSequences,omitempty"`
|
StopSequences []string `json:"stopSequences,omitempty"`
|
||||||
ThinkingConfig *ThinkingConfig `json:"thinkingConfig,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ThinkingConfig struct {
|
|
||||||
ThinkingBudget int `json:"thinkingBudget"`
|
|
||||||
IncludeThoughts bool `json:"includeThoughts"`
|
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user