feat: update README and enhance response handling for Claude model to support reasoning and thinking

This commit is contained in:
Laisky.Cai
2025-02-25 03:13:21 +00:00
parent f7ec449728
commit f60300e52c
2 changed files with 20 additions and 5 deletions

View File

@@ -26,7 +26,7 @@ Also welcome to register and use my deployed one-api gateway, which supports var
- [Support o3-mini](#support-o3-mini) - [Support o3-mini](#support-o3-mini)
- [Support gemini-2.0-flash](#support-gemini-20-flash) - [Support gemini-2.0-flash](#support-gemini-20-flash)
- [Support OpenRouter's reasoning content](#support-openrouters-reasoning-content) - [Support OpenRouter's reasoning content](#support-openrouters-reasoning-content)
- [Support claude-3-7-sonnet #2143](#support-claude-3-7-sonnet-2143) - [Support claude-3-7-sonnet \& thinking](#support-claude-3-7-sonnet--thinking)
- [Bug fix](#bug-fix) - [Bug fix](#bug-fix)
## New Features ## New Features
@@ -124,9 +124,12 @@ type UserRequestCost struct {
- [feat: support OpenRouter reasoning #2108](https://github.com/songquanpeng/one-api/pull/2108) - [feat: support OpenRouter reasoning #2108](https://github.com/songquanpeng/one-api/pull/2108)
### Support claude-3-7-sonnet #2143 ### Support claude-3-7-sonnet & thinking
- [feat: support claude-3-7-sonnet #2143](https://github.com/songquanpeng/one-api/pull/2143/files) - [feat: support claude-3-7-sonnet #2143](https://github.com/songquanpeng/one-api/pull/2143/files)
- [feat: support claude thinking #2144](https://github.com/songquanpeng/one-api/pull/2144)
![](https://s3.laisky.com/uploads/2025/02/claude-thinking.png)
## Bug fix ## Bug fix

View File

@@ -2,6 +2,7 @@ package anthropic
import ( import (
"bufio" "bufio"
"context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
@@ -235,9 +236,17 @@ func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
tools := make([]model.Tool, 0) tools := make([]model.Tool, 0)
for _, v := range claudeResponse.Content { for _, v := range claudeResponse.Content {
reasoningText += v.Text switch v.Type {
if v.Thinking != nil { case "thinking":
reasoningText += *v.Thinking if v.Thinking != nil {
reasoningText += *v.Thinking
} else {
logger.Errorf(context.Background(), "thinking is nil in response")
}
case "text":
responseText += v.Text
default:
logger.Warnf(context.Background(), "unknown response type %q", v.Type)
} }
if v.Type == "tool_use" { if v.Type == "tool_use" {
@@ -252,6 +261,7 @@ func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
}) })
} }
} }
choice := openai.TextResponseChoice{ choice := openai.TextResponseChoice{
Index: 0, Index: 0,
Message: model.Message{ Message: model.Message{
@@ -373,6 +383,8 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
} }
logger.Debugf(c.Request.Context(), "response <- %s\n", string(responseBody))
var claudeResponse Response var claudeResponse Response
err = json.Unmarshal(responseBody, &claudeResponse) err = json.Unmarshal(responseBody, &claudeResponse)
if err != nil { if err != nil {