mirror of
https://github.com/linux-do/new-api.git
synced 2025-11-08 15:13:42 +08:00
merge upstream
Signed-off-by: wozulong <>
This commit is contained in:
@@ -85,6 +85,7 @@ var defaultModelRatio = map[string]float64{
|
|||||||
"claude-2.1": 4, // $8 / 1M tokens
|
"claude-2.1": 4, // $8 / 1M tokens
|
||||||
"claude-3-haiku-20240307": 0.125, // $0.25 / 1M tokens
|
"claude-3-haiku-20240307": 0.125, // $0.25 / 1M tokens
|
||||||
"claude-3-5-sonnet-20240620": 1.5, // $3 / 1M tokens
|
"claude-3-5-sonnet-20240620": 1.5, // $3 / 1M tokens
|
||||||
|
"claude-3-5-sonnet-20241022": 1.5, // $3 / 1M tokens
|
||||||
"claude-3-sonnet-20240229": 1.5, // $3 / 1M tokens
|
"claude-3-sonnet-20240229": 1.5, // $3 / 1M tokens
|
||||||
"claude-3-opus-20240229": 7.5, // $15 / 1M tokens
|
"claude-3-opus-20240229": 7.5, // $15 / 1M tokens
|
||||||
"ERNIE-4.0-8K": 0.120 * RMB,
|
"ERNIE-4.0-8K": 0.120 * RMB,
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
package aws
|
package aws
|
||||||
|
|
||||||
var awsModelIDMap = map[string]string{
|
var awsModelIDMap = map[string]string{
|
||||||
"claude-instant-1.2": "anthropic.claude-instant-v1",
|
"claude-instant-1.2": "anthropic.claude-instant-v1",
|
||||||
"claude-2.0": "anthropic.claude-v2",
|
"claude-2.0": "anthropic.claude-v2",
|
||||||
"claude-2.1": "anthropic.claude-v2:1",
|
"claude-2.1": "anthropic.claude-v2:1",
|
||||||
"claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229-v1:0",
|
"claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229-v1:0",
|
||||||
"claude-3-opus-20240229": "anthropic.claude-3-opus-20240229-v1:0",
|
"claude-3-opus-20240229": "anthropic.claude-3-opus-20240229-v1:0",
|
||||||
"claude-3-haiku-20240307": "anthropic.claude-3-haiku-20240307-v1:0",
|
"claude-3-haiku-20240307": "anthropic.claude-3-haiku-20240307-v1:0",
|
||||||
"claude-3-5-sonnet-20240620": "anthropic.claude-3-5-sonnet-20240620-v1:0",
|
"claude-3-5-sonnet-20240620": "anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||||
|
"claude-3-5-sonnet-20241022": "anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||||
}
|
}
|
||||||
|
|
||||||
var ChannelName = "aws"
|
var ChannelName = "aws"
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
package aws
|
package aws
|
||||||
|
|
||||||
import "one-api/relay/channel/claude"
|
import (
|
||||||
|
"one-api/relay/channel/claude"
|
||||||
|
)
|
||||||
|
|
||||||
type AwsClaudeRequest struct {
|
type AwsClaudeRequest struct {
|
||||||
// AnthropicVersion should be "bedrock-2023-05-31"
|
// AnthropicVersion should be "bedrock-2023-05-31"
|
||||||
@@ -12,4 +14,6 @@ type AwsClaudeRequest struct {
|
|||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||||
|
Tools []claude.Tool `json:"tools,omitempty"`
|
||||||
|
ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ func awsModelID(requestModel string) (string, error) {
|
|||||||
return awsModelID, nil
|
return awsModelID, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return "", errors.Errorf("model %s not found", requestModel)
|
return requestModel, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func awsHandler(c *gin.Context, info *relaycommon.RelayInfo, requestMode int) (*relaymodel.OpenAIErrorWithStatusCode, *relaymodel.Usage) {
|
func awsHandler(c *gin.Context, info *relaycommon.RelayInfo, requestMode int) (*relaymodel.OpenAIErrorWithStatusCode, *relaymodel.Usage) {
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ var ModelList = []string{
|
|||||||
"claude-3-opus-20240229",
|
"claude-3-opus-20240229",
|
||||||
"claude-3-haiku-20240307",
|
"claude-3-haiku-20240307",
|
||||||
"claude-3-5-sonnet-20240620",
|
"claude-3-5-sonnet-20240620",
|
||||||
|
"claude-3-5-sonnet-20241022",
|
||||||
}
|
}
|
||||||
|
|
||||||
var ChannelName = "claude"
|
var ChannelName = "claude"
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ func TextHelper(c *gin.Context) *dto.OpenAIErrorWithStatusCode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// map model name
|
// map model name
|
||||||
|
isModelMapped := false
|
||||||
modelMapping := c.GetString("model_mapping")
|
modelMapping := c.GetString("model_mapping")
|
||||||
//isModelMapped := false
|
//isModelMapped := false
|
||||||
if modelMapping != "" && modelMapping != "{}" {
|
if modelMapping != "" && modelMapping != "{}" {
|
||||||
@@ -85,6 +86,7 @@ func TextHelper(c *gin.Context) *dto.OpenAIErrorWithStatusCode {
|
|||||||
return service.OpenAIErrorWrapperLocal(err, "unmarshal_model_mapping_failed", http.StatusInternalServerError)
|
return service.OpenAIErrorWrapperLocal(err, "unmarshal_model_mapping_failed", http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
if modelMap[textRequest.Model] != "" {
|
if modelMap[textRequest.Model] != "" {
|
||||||
|
isModelMapped = true
|
||||||
textRequest.Model = modelMap[textRequest.Model]
|
textRequest.Model = modelMap[textRequest.Model]
|
||||||
// set upstream model name
|
// set upstream model name
|
||||||
//isModelMapped = true
|
//isModelMapped = true
|
||||||
@@ -159,15 +161,23 @@ func TextHelper(c *gin.Context) *dto.OpenAIErrorWithStatusCode {
|
|||||||
adaptor.Init(relayInfo)
|
adaptor.Init(relayInfo)
|
||||||
var requestBody io.Reader
|
var requestBody io.Reader
|
||||||
|
|
||||||
convertedRequest, err := adaptor.ConvertRequest(c, relayInfo, textRequest)
|
if relayInfo.ChannelType == common.ChannelTypeOpenAI && !isModelMapped {
|
||||||
if err != nil {
|
body, err := common.GetRequestBody(c)
|
||||||
return service.OpenAIErrorWrapperLocal(err, "convert_request_failed", http.StatusInternalServerError)
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapperLocal(err, "get_request_body_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
requestBody = bytes.NewBuffer(body)
|
||||||
|
} else {
|
||||||
|
convertedRequest, err := adaptor.ConvertRequest(c, relayInfo, textRequest)
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapperLocal(err, "convert_request_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
jsonData, err := json.Marshal(convertedRequest)
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapperLocal(err, "json_marshal_failed", http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
requestBody = bytes.NewBuffer(jsonData)
|
||||||
}
|
}
|
||||||
jsonData, err := json.Marshal(convertedRequest)
|
|
||||||
if err != nil {
|
|
||||||
return service.OpenAIErrorWrapperLocal(err, "json_marshal_failed", http.StatusInternalServerError)
|
|
||||||
}
|
|
||||||
requestBody = bytes.NewBuffer(jsonData)
|
|
||||||
|
|
||||||
statusCodeMappingStr := c.GetString("status_code_mapping")
|
statusCodeMappingStr := c.GetString("status_code_mapping")
|
||||||
resp, err := adaptor.DoRequest(c, relayInfo, requestBody)
|
resp, err := adaptor.DoRequest(c, relayInfo, requestBody)
|
||||||
|
|||||||
Reference in New Issue
Block a user