fix: claude thinking for non-stream mode

This commit is contained in:
Laisky.Cai
2025-02-25 03:13:21 +00:00
parent 3a8924d7af
commit c61d6440f9
2 changed files with 16 additions and 4 deletions

View File

@@ -385,7 +385,7 @@ graph LR
+ 例子:`NODE_TYPE=slave` + 例子:`NODE_TYPE=slave`
9. `CHANNEL_UPDATE_FREQUENCY`:设置之后将定期更新渠道余额,单位为分钟,未设置则不进行更新。 9. `CHANNEL_UPDATE_FREQUENCY`:设置之后将定期更新渠道余额,单位为分钟,未设置则不进行更新。
+ 例子:`CHANNEL_UPDATE_FREQUENCY=1440` + 例子:`CHANNEL_UPDATE_FREQUENCY=1440`
10. `CHANNEL_TEST_FREQUENCY`:设置之后将定期检查渠道,单位为分钟,未设置则不进行检查。 10. `CHANNEL_TEST_FREQUENCY`:设置之后将定期检查渠道,单位为分钟,未设置则不进行检查。
+例子:`CHANNEL_TEST_FREQUENCY=1440` +例子:`CHANNEL_TEST_FREQUENCY=1440`
11. `POLLING_INTERVAL`:批量更新渠道余额以及测试可用性时的请求间隔,单位为秒,默认无间隔。 11. `POLLING_INTERVAL`:批量更新渠道余额以及测试可用性时的请求间隔,单位为秒,默认无间隔。
+ 例子:`POLLING_INTERVAL=5` + 例子:`POLLING_INTERVAL=5`

View File

@@ -2,6 +2,7 @@ package anthropic
import ( import (
"bufio" "bufio"
"context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
@@ -235,9 +236,17 @@ func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
tools := make([]model.Tool, 0) tools := make([]model.Tool, 0)
for _, v := range claudeResponse.Content { for _, v := range claudeResponse.Content {
reasoningText += v.Text switch v.Type {
if v.Thinking != nil { case "thinking":
reasoningText += *v.Thinking if v.Thinking != nil {
reasoningText += *v.Thinking
} else {
logger.Errorf(context.Background(), "thinking is nil in response")
}
case "text":
responseText += v.Text
default:
logger.Warnf(context.Background(), "unknown response type %q", v.Type)
} }
if v.Type == "tool_use" { if v.Type == "tool_use" {
@@ -252,6 +261,7 @@ func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
}) })
} }
} }
choice := openai.TextResponseChoice{ choice := openai.TextResponseChoice{
Index: 0, Index: 0,
Message: model.Message{ Message: model.Message{
@@ -374,6 +384,8 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
} }
logger.Debugf(c.Request.Context(), "response <- %s\n", string(responseBody))
var claudeResponse Response var claudeResponse Response
err = json.Unmarshal(responseBody, &claudeResponse) err = json.Unmarshal(responseBody, &claudeResponse)
if err != nil { if err != nil {