mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-20 23:16:49 +08:00
feat: 支持Anthropic API协议 | support Anthropic API protocol
- 添加Anthropic适配器实现 | Add Anthropic adaptor implementation - 支持Anthropic消息格式转换 | Support Anthropic message format conversion - 添加Vertex AI Claude适配器支持 | Add Vertex AI Claude adapter support - 更新Anthropic的中继模式定义 | Update relay mode definitions for Anthropic - 添加Anthropic控制器和路由 | Add Anthropic controller and routing 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: zgdmemail@gmail.com
This commit is contained in:
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||
)
|
||||
|
||||
type Adaptor struct {
|
||||
@@ -21,7 +22,15 @@ func (a *Adaptor) Init(meta *meta.Meta) {
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||
return fmt.Sprintf("%s/v1/messages", meta.BaseURL), nil
|
||||
// For native Anthropic API
|
||||
if strings.Contains(meta.BaseURL, "api.anthropic.com") {
|
||||
return fmt.Sprintf("%s/v1/messages", meta.BaseURL), nil
|
||||
}
|
||||
|
||||
// For third-party providers supporting Anthropic protocol (like DeepSeek)
|
||||
// They typically expose the endpoint at /anthropic/v1/messages
|
||||
baseURL := strings.TrimSuffix(meta.BaseURL, "/")
|
||||
return fmt.Sprintf("%s/anthropic/v1/messages", baseURL), nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||
@@ -47,6 +56,15 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
|
||||
// For native Anthropic protocol requests, return the request as-is (no conversion needed)
|
||||
if relayMode == relaymode.AnthropicMessages {
|
||||
// The request should already be in Anthropic format, so we pass it through
|
||||
// This will be handled by the caller which already has the anthropic request
|
||||
return request, nil
|
||||
}
|
||||
|
||||
// For OpenAI to Anthropic conversion (existing functionality)
|
||||
return ConvertRequest(*request), nil
|
||||
}
|
||||
|
||||
@@ -62,6 +80,17 @@ func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Read
|
||||
}
|
||||
|
||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||
// For native Anthropic protocol requests, handle response directly without conversion
|
||||
if meta.Mode == relaymode.AnthropicMessages {
|
||||
if meta.IsStream {
|
||||
err, usage = DirectStreamHandler(c, resp)
|
||||
} else {
|
||||
err, usage = DirectHandler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// For OpenAI to Anthropic conversion (existing functionality)
|
||||
if meta.IsStream {
|
||||
err, usage = StreamHandler(c, resp)
|
||||
} else {
|
||||
|
||||
@@ -4,11 +4,12 @@ import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/songquanpeng/one-api/common/render"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/songquanpeng/one-api/common/render"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
@@ -89,8 +90,12 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
||||
claudeRequest.Model = "claude-2.1"
|
||||
}
|
||||
for _, message := range textRequest.Messages {
|
||||
if message.Role == "system" && claudeRequest.System == "" {
|
||||
claudeRequest.System = message.StringContent()
|
||||
if message.Role == "system" && claudeRequest.System.IsEmpty() {
|
||||
// Create a SystemPrompt from the string content
|
||||
systemPrompt := SystemPrompt{}
|
||||
systemData := []byte(`"` + message.StringContent() + `"`) // Wrap in JSON string quotes
|
||||
_ = systemPrompt.UnmarshalJSON(systemData)
|
||||
claudeRequest.System = systemPrompt
|
||||
continue
|
||||
}
|
||||
claudeMessage := Message{
|
||||
@@ -377,3 +382,131 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
|
||||
_, err = c.Writer.Write(jsonResponse)
|
||||
return nil, &usage
|
||||
}
|
||||
|
||||
// DirectHandler handles native Anthropic API responses without conversion to OpenAI format
|
||||
func DirectHandler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||
ctx := c.Request.Context()
|
||||
logger.Infof(ctx, "=== DirectHandler Start ===")
|
||||
logger.Infof(ctx, "Response status: %d", resp.StatusCode)
|
||||
logger.Infof(ctx, "Response headers: %+v", resp.Header)
|
||||
|
||||
responseBody, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
logger.Errorf(ctx, "Failed to read response body: %s", err.Error())
|
||||
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
err = resp.Body.Close()
|
||||
if err != nil {
|
||||
logger.Errorf(ctx, "Failed to close response body: %s", err.Error())
|
||||
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
|
||||
logger.Infof(ctx, "Raw response body: %s", string(responseBody))
|
||||
|
||||
var claudeResponse Response
|
||||
err = json.Unmarshal(responseBody, &claudeResponse)
|
||||
if err != nil {
|
||||
logger.Errorf(ctx, "Failed to unmarshal response: %s", err.Error())
|
||||
// If we can't parse as Anthropic response, maybe it's an error response
|
||||
// Let's try to write it directly and see what happens
|
||||
c.Writer.Header().Set("Content-Type", "application/json")
|
||||
c.Writer.WriteHeader(resp.StatusCode)
|
||||
_, writeErr := c.Writer.Write(responseBody)
|
||||
if writeErr != nil {
|
||||
logger.Errorf(ctx, "Failed to write raw response: %s", writeErr.Error())
|
||||
return openai.ErrorWrapper(writeErr, "write_response_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
// Return a minimal usage for tracking
|
||||
usage := &model.Usage{PromptTokens: promptTokens, CompletionTokens: 0, TotalTokens: promptTokens}
|
||||
return nil, usage
|
||||
}
|
||||
|
||||
logger.Infof(ctx, "Parsed response - ID: %s, Model: %s, Usage: %+v",
|
||||
claudeResponse.Id, claudeResponse.Model, claudeResponse.Usage)
|
||||
|
||||
if claudeResponse.Error.Type != "" {
|
||||
logger.Errorf(ctx, "Anthropic API error: %s - %s", claudeResponse.Error.Type, claudeResponse.Error.Message)
|
||||
return &model.ErrorWithStatusCode{
|
||||
Error: model.Error{
|
||||
Message: claudeResponse.Error.Message,
|
||||
Type: claudeResponse.Error.Type,
|
||||
Param: "",
|
||||
Code: claudeResponse.Error.Type,
|
||||
},
|
||||
StatusCode: resp.StatusCode,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// For direct mode, return the response as-is without conversion
|
||||
usage := model.Usage{
|
||||
PromptTokens: claudeResponse.Usage.InputTokens,
|
||||
CompletionTokens: claudeResponse.Usage.OutputTokens,
|
||||
TotalTokens: claudeResponse.Usage.InputTokens + claudeResponse.Usage.OutputTokens,
|
||||
}
|
||||
|
||||
logger.Infof(ctx, "Usage calculated: %+v", usage)
|
||||
|
||||
// Write the original Anthropic response directly
|
||||
c.Writer.Header().Set("Content-Type", "application/json")
|
||||
c.Writer.WriteHeader(resp.StatusCode)
|
||||
_, err = c.Writer.Write(responseBody)
|
||||
if err != nil {
|
||||
logger.Errorf(ctx, "Failed to write response: %s", err.Error())
|
||||
return openai.ErrorWrapper(err, "write_response_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
|
||||
logger.Infof(ctx, "Response written successfully")
|
||||
logger.Infof(ctx, "=== DirectHandler End ===")
|
||||
return nil, &usage
|
||||
}
|
||||
|
||||
// DirectStreamHandler handles native Anthropic API streaming responses without conversion
|
||||
func DirectStreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Set headers for streaming
|
||||
c.Writer.Header().Set("Content-Type", "text/event-stream")
|
||||
c.Writer.Header().Set("Cache-Control", "no-cache")
|
||||
c.Writer.Header().Set("Connection", "keep-alive")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
c.Writer.Header().Set("Access-Control-Allow-Headers", "Content-Type")
|
||||
c.Writer.WriteHeader(resp.StatusCode)
|
||||
|
||||
// Stream the response directly without conversion
|
||||
var usage model.Usage
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
for scanner.Scan() {
|
||||
data := scanner.Text()
|
||||
if len(data) < 6 || !strings.HasPrefix(data, "data:") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Parse usage information if available
|
||||
if strings.Contains(data, "\"usage\":") {
|
||||
var eventData map[string]interface{}
|
||||
jsonData := strings.TrimPrefix(data, "data:")
|
||||
jsonData = strings.TrimSpace(jsonData)
|
||||
if err := json.Unmarshal([]byte(jsonData), &eventData); err == nil {
|
||||
if usageData, ok := eventData["usage"].(map[string]interface{}); ok {
|
||||
if inputTokens, ok := usageData["input_tokens"].(float64); ok {
|
||||
usage.PromptTokens = int(inputTokens)
|
||||
}
|
||||
if outputTokens, ok := usageData["output_tokens"].(float64); ok {
|
||||
usage.CompletionTokens = int(outputTokens)
|
||||
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write data directly to the response
|
||||
c.Writer.WriteString(data + "\n")
|
||||
c.Writer.Flush()
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return openai.ErrorWrapper(err, "stream_read_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
|
||||
return nil, &usage
|
||||
}
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
package anthropic
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// https://docs.anthropic.com/claude/reference/messages_post
|
||||
|
||||
type Metadata struct {
|
||||
@@ -41,18 +46,92 @@ type InputSchema struct {
|
||||
Required any `json:"required,omitempty"`
|
||||
}
|
||||
|
||||
// SystemPrompt can handle both string and array formats for the system field
|
||||
type SystemPrompt struct {
|
||||
value interface{}
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements json.Unmarshaler to handle both string and array formats
|
||||
func (s *SystemPrompt) UnmarshalJSON(data []byte) error {
|
||||
// Try to unmarshal as string first
|
||||
var str string
|
||||
if err := json.Unmarshal(data, &str); err == nil {
|
||||
s.value = str
|
||||
return nil
|
||||
}
|
||||
|
||||
// If that fails, try to unmarshal as array
|
||||
var arr []interface{}
|
||||
if err := json.Unmarshal(data, &arr); err == nil {
|
||||
s.value = arr
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("system field must be either a string or an array")
|
||||
}
|
||||
|
||||
// MarshalJSON implements json.Marshaler
|
||||
func (s SystemPrompt) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(s.value)
|
||||
}
|
||||
|
||||
// String returns the system prompt as a string
|
||||
func (s SystemPrompt) String() string {
|
||||
if s.value == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
switch v := s.value.(type) {
|
||||
case string:
|
||||
return v
|
||||
case []interface{}:
|
||||
// Convert array to string by concatenating text content
|
||||
var result string
|
||||
for _, item := range v {
|
||||
if itemMap, ok := item.(map[string]interface{}); ok {
|
||||
if text, exists := itemMap["text"]; exists {
|
||||
if textStr, ok := text.(string); ok {
|
||||
result += textStr + " "
|
||||
}
|
||||
}
|
||||
} else if str, ok := item.(string); ok {
|
||||
result += str + " "
|
||||
}
|
||||
}
|
||||
return result
|
||||
default:
|
||||
return fmt.Sprintf("%v", v)
|
||||
}
|
||||
}
|
||||
|
||||
// IsEmpty returns true if the system prompt is empty
|
||||
func (s SystemPrompt) IsEmpty() bool {
|
||||
if s.value == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
switch v := s.value.(type) {
|
||||
case string:
|
||||
return v == ""
|
||||
case []interface{}:
|
||||
return len(v) == 0
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type Request struct {
|
||||
Model string `json:"model"`
|
||||
Messages []Message `json:"messages"`
|
||||
System string `json:"system,omitempty"`
|
||||
MaxTokens int `json:"max_tokens,omitempty"`
|
||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
Temperature *float64 `json:"temperature,omitempty"`
|
||||
TopP *float64 `json:"top_p,omitempty"`
|
||||
TopK int `json:"top_k,omitempty"`
|
||||
Tools []Tool `json:"tools,omitempty"`
|
||||
ToolChoice any `json:"tool_choice,omitempty"`
|
||||
Model string `json:"model"`
|
||||
Messages []Message `json:"messages"`
|
||||
System SystemPrompt `json:"system,omitempty"`
|
||||
MaxTokens int `json:"max_tokens,omitempty"`
|
||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||
Stream bool `json:"stream,omitempty"`
|
||||
Temperature *float64 `json:"temperature,omitempty"`
|
||||
TopP *float64 `json:"top_p,omitempty"`
|
||||
TopK int `json:"top_k,omitempty"`
|
||||
Tools []Tool `json:"tools,omitempty"`
|
||||
ToolChoice any `json:"tool_choice,omitempty"`
|
||||
//Metadata `json:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user