mirror of
https://github.com/linux-do/new-api.git
synced 2025-11-12 17:13:43 +08:00
feat: add claude-3.7-sonnet 128k output support
This commit is contained in:
@@ -25,12 +25,12 @@ services:
|
|||||||
- db
|
- db
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:latest
|
image: redis:7.4
|
||||||
container_name: redis
|
container_name: redis
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
db:
|
db:
|
||||||
image: mysql:8.2.0
|
image: mysql:8.2
|
||||||
container_name: mysql
|
container_name: mysql
|
||||||
restart: always
|
restart: always
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -70,9 +70,9 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, info *relaycommon.RelayInfo, re
|
|||||||
return nil, errors.New("request is nil")
|
return nil, errors.New("request is nil")
|
||||||
}
|
}
|
||||||
if a.RequestMode == RequestModeCompletion {
|
if a.RequestMode == RequestModeCompletion {
|
||||||
return RequestOpenAI2ClaudeComplete(*request), nil
|
return RequestOpenAI2ClaudeComplete(c, *request), nil
|
||||||
} else {
|
} else {
|
||||||
return RequestOpenAI2ClaudeMessage(*request)
|
return RequestOpenAI2ClaudeMessage(c, *request)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ func stopReasonClaude2OpenAI(reason string) string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeRequest {
|
func RequestOpenAI2ClaudeComplete(c *gin.Context, textRequest dto.GeneralOpenAIRequest) *ClaudeRequest {
|
||||||
|
|
||||||
claudeRequest := ClaudeRequest{
|
claudeRequest := ClaudeRequest{
|
||||||
Model: textRequest.Model,
|
Model: textRequest.Model,
|
||||||
@@ -59,16 +59,16 @@ func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeR
|
|||||||
return &claudeRequest
|
return &claudeRequest
|
||||||
}
|
}
|
||||||
|
|
||||||
func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeRequest, error) {
|
func RequestOpenAI2ClaudeMessage(c *gin.Context, textRequest dto.GeneralOpenAIRequest) (*ClaudeRequest, error) {
|
||||||
claudeTools := make([]Tool, 0, len(textRequest.Tools))
|
claudeTools := make([]Tool, 0, len(textRequest.Tools))
|
||||||
|
|
||||||
if strings.HasSuffix(textRequest.Model, "-thinking") {
|
if strings.HasSuffix(textRequest.Model, "-thinking") {
|
||||||
textRequest.Model = strings.TrimSuffix(textRequest.Model, "-thinking")
|
textRequest.Model = strings.TrimSuffix(textRequest.Model, "-thinking")
|
||||||
|
|
||||||
if textRequest.MaxTokens == 0 {
|
if textRequest.MaxTokens < 2048 {
|
||||||
textRequest.MaxTokens = 4096
|
textRequest.MaxTokens = 2048
|
||||||
} else if textRequest.MaxTokens < 1280 {
|
} else if textRequest.MaxTokens > 64000 {
|
||||||
textRequest.MaxTokens = 1280
|
c.Request.Header.Set("anthropic-beta", "output-128k-2025-02-19")
|
||||||
}
|
}
|
||||||
|
|
||||||
textRequest.TopP = 0
|
textRequest.TopP = 0
|
||||||
@@ -76,7 +76,7 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
|
|||||||
textRequest.Temperature = 0
|
textRequest.Temperature = 0
|
||||||
textRequest.Thinking = &dto.Thinking{
|
textRequest.Thinking = &dto.Thinking{
|
||||||
Type: "enabled",
|
Type: "enabled",
|
||||||
BudgetTokens: int(float64(textRequest.MaxTokens) * 0.8),
|
BudgetTokens: int(float64(textRequest.MaxTokens) * 0.5),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user