mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-16 21:23:44 +08:00
Compare commits
1 Commits
52eacbfb90
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
929c2438b6 |
@@ -44,4 +44,4 @@ COPY --from=builder2 /build/one-api /
|
||||
|
||||
EXPOSE 3000
|
||||
WORKDIR /data
|
||||
ENTRYPOINT ["/one-api"]
|
||||
ENTRYPOINT ["/one-api"]
|
||||
@@ -385,7 +385,7 @@ graph LR
|
||||
+ 例子:`NODE_TYPE=slave`
|
||||
9. `CHANNEL_UPDATE_FREQUENCY`:设置之后将定期更新渠道余额,单位为分钟,未设置则不进行更新。
|
||||
+ 例子:`CHANNEL_UPDATE_FREQUENCY=1440`
|
||||
10. `CHANNEL_TEST_FREQUENCY`:设置之后将定期检查渠道,单位为分钟,未设置则不进行检查。
|
||||
10. `CHANNEL_TEST_FREQUENCY`:设置之后将定期检查渠道,单位为分钟,未设置则不进行检查。
|
||||
+例子:`CHANNEL_TEST_FREQUENCY=1440`
|
||||
11. `POLLING_INTERVAL`:批量更新渠道余额以及测试可用性时的请求间隔,单位为秒,默认无间隔。
|
||||
+ 例子:`POLLING_INTERVAL=5`
|
||||
|
||||
@@ -164,6 +164,3 @@ var UserContentRequestTimeout = env.Int("USER_CONTENT_REQUEST_TIMEOUT", 30)
|
||||
|
||||
var EnforceIncludeUsage = env.Bool("ENFORCE_INCLUDE_USAGE", false)
|
||||
var TestPrompt = env.String("TEST_PROMPT", "Output only your specific model name with no additional text.")
|
||||
|
||||
// OpenrouterProviderSort is used to determine the order of the providers in the openrouter
|
||||
var OpenrouterProviderSort = env.String("OPENROUTER_PROVIDER_SORT", "")
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
package conv
|
||||
|
||||
func AsString(v any) string {
|
||||
if str, ok := v.(string); ok {
|
||||
return str
|
||||
}
|
||||
|
||||
return ""
|
||||
str, _ := v.(string)
|
||||
return str
|
||||
}
|
||||
|
||||
4
go.mod
4
go.mod
@@ -1,6 +1,6 @@
|
||||
module github.com/songquanpeng/one-api
|
||||
|
||||
go 1.20
|
||||
go 1.23.0
|
||||
|
||||
require (
|
||||
cloud.google.com/go/iam v1.1.10
|
||||
@@ -98,7 +98,7 @@ require (
|
||||
go.opentelemetry.io/otel/trace v1.24.0 // indirect
|
||||
golang.org/x/arch v0.8.0 // indirect
|
||||
golang.org/x/net v0.26.0 // indirect
|
||||
golang.org/x/oauth2 v0.21.0 // indirect
|
||||
golang.org/x/oauth2 v0.27.0 // indirect
|
||||
golang.org/x/sys v0.28.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/time v0.5.0 // indirect
|
||||
|
||||
15
go.sum
15
go.sum
@@ -73,6 +73,7 @@ github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ4
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
@@ -111,8 +112,10 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
|
||||
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
@@ -157,6 +160,7 @@ github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuV
|
||||
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
|
||||
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
@@ -171,8 +175,11 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
|
||||
github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
|
||||
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
|
||||
github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||
@@ -185,6 +192,7 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
|
||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||
github.com/smarty/assertions v1.15.0 h1:cR//PqUBUiQRakZWqBiFFQ9wb8emQGDb0HeGdqGByCY=
|
||||
github.com/smarty/assertions v1.15.0/go.mod h1:yABtdzeQs6l1brC900WlRNwj6ZR55d7B+E8C6HtKdec=
|
||||
github.com/smartystreets/goconvey v1.8.1 h1:qGjIddxOk4grTu9JPOU31tVfq3cNdBlNa5sSznIX1xY=
|
||||
@@ -239,8 +247,8 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ=
|
||||
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs=
|
||||
golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M=
|
||||
golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -297,8 +305,11 @@ google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6h
|
||||
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
@@ -36,8 +36,8 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
|
||||
|
||||
// https://x.com/alexalbert__/status/1812921642143900036
|
||||
// claude-3-5-sonnet can support 8k context
|
||||
if strings.HasPrefix(meta.ActualModelName, "claude-3-7-sonnet") {
|
||||
req.Header.Set("anthropic-beta", "output-128k-2025-02-19")
|
||||
if strings.HasPrefix(meta.ActualModelName, "claude-3-5-sonnet") {
|
||||
req.Header.Set("anthropic-beta", "max-tokens-3-5-sonnet-2024-07-15")
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -47,7 +47,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
return ConvertRequest(c, *request)
|
||||
return ConvertRequest(*request), nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||
|
||||
@@ -3,13 +3,11 @@ package anthropic
|
||||
var ModelList = []string{
|
||||
"claude-instant-1.2", "claude-2.0", "claude-2.1",
|
||||
"claude-3-haiku-20240307",
|
||||
"claude-3-5-haiku-latest",
|
||||
"claude-3-5-haiku-20241022",
|
||||
"claude-3-5-haiku-latest",
|
||||
"claude-3-sonnet-20240229",
|
||||
"claude-3-opus-20240229",
|
||||
"claude-3-5-sonnet-latest",
|
||||
"claude-3-5-sonnet-20240620",
|
||||
"claude-3-5-sonnet-20241022",
|
||||
"claude-3-7-sonnet-latest",
|
||||
"claude-3-7-sonnet-20250219",
|
||||
"claude-3-5-sonnet-latest",
|
||||
}
|
||||
|
||||
@@ -2,21 +2,18 @@ package anthropic
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/songquanpeng/one-api/common/render"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/image"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/common/render"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
)
|
||||
@@ -39,16 +36,7 @@ func stopReasonClaude2OpenAI(reason *string) string {
|
||||
}
|
||||
}
|
||||
|
||||
// isModelSupportThinking is used to check if the model supports extended thinking
|
||||
func isModelSupportThinking(model string) bool {
|
||||
if strings.Contains(model, "claude-3-7-sonnet") {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func ConvertRequest(c *gin.Context, textRequest model.GeneralOpenAIRequest) (*Request, error) {
|
||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
||||
claudeTools := make([]Tool, 0, len(textRequest.Tools))
|
||||
|
||||
for _, tool := range textRequest.Tools {
|
||||
@@ -73,27 +61,7 @@ func ConvertRequest(c *gin.Context, textRequest model.GeneralOpenAIRequest) (*Re
|
||||
TopK: textRequest.TopK,
|
||||
Stream: textRequest.Stream,
|
||||
Tools: claudeTools,
|
||||
Thinking: textRequest.Thinking,
|
||||
}
|
||||
|
||||
if isModelSupportThinking(textRequest.Model) &&
|
||||
c.Request.URL.Query().Has("thinking") && claudeRequest.Thinking == nil {
|
||||
claudeRequest.Thinking = &model.Thinking{
|
||||
Type: "enabled",
|
||||
BudgetTokens: int(math.Min(1024, float64(claudeRequest.MaxTokens/2))),
|
||||
}
|
||||
}
|
||||
|
||||
if isModelSupportThinking(textRequest.Model) &&
|
||||
claudeRequest.Thinking != nil {
|
||||
if claudeRequest.MaxTokens <= 1024 {
|
||||
return nil, errors.New("max_tokens must be greater than 1024 when using extended thinking")
|
||||
}
|
||||
|
||||
// top_p must be nil when using extended thinking
|
||||
claudeRequest.TopP = nil
|
||||
}
|
||||
|
||||
if len(claudeTools) > 0 {
|
||||
claudeToolChoice := struct {
|
||||
Type string `json:"type"`
|
||||
@@ -174,14 +142,13 @@ func ConvertRequest(c *gin.Context, textRequest model.GeneralOpenAIRequest) (*Re
|
||||
claudeMessage.Content = contents
|
||||
claudeRequest.Messages = append(claudeRequest.Messages, claudeMessage)
|
||||
}
|
||||
return &claudeRequest, nil
|
||||
return &claudeRequest
|
||||
}
|
||||
|
||||
// https://docs.anthropic.com/claude/reference/messages-streaming
|
||||
func StreamResponseClaude2OpenAI(claudeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
|
||||
var response *Response
|
||||
var responseText string
|
||||
var reasoningText string
|
||||
var stopReason string
|
||||
tools := make([]model.Tool, 0)
|
||||
|
||||
@@ -191,10 +158,6 @@ func StreamResponseClaude2OpenAI(claudeResponse *StreamResponse) (*openai.ChatCo
|
||||
case "content_block_start":
|
||||
if claudeResponse.ContentBlock != nil {
|
||||
responseText = claudeResponse.ContentBlock.Text
|
||||
if claudeResponse.ContentBlock.Thinking != nil {
|
||||
reasoningText = *claudeResponse.ContentBlock.Thinking
|
||||
}
|
||||
|
||||
if claudeResponse.ContentBlock.Type == "tool_use" {
|
||||
tools = append(tools, model.Tool{
|
||||
Id: claudeResponse.ContentBlock.Id,
|
||||
@@ -209,10 +172,6 @@ func StreamResponseClaude2OpenAI(claudeResponse *StreamResponse) (*openai.ChatCo
|
||||
case "content_block_delta":
|
||||
if claudeResponse.Delta != nil {
|
||||
responseText = claudeResponse.Delta.Text
|
||||
if claudeResponse.Delta.Thinking != nil {
|
||||
reasoningText = *claudeResponse.Delta.Thinking
|
||||
}
|
||||
|
||||
if claudeResponse.Delta.Type == "input_json_delta" {
|
||||
tools = append(tools, model.Tool{
|
||||
Function: model.Function{
|
||||
@@ -230,20 +189,9 @@ func StreamResponseClaude2OpenAI(claudeResponse *StreamResponse) (*openai.ChatCo
|
||||
if claudeResponse.Delta != nil && claudeResponse.Delta.StopReason != nil {
|
||||
stopReason = *claudeResponse.Delta.StopReason
|
||||
}
|
||||
case "thinking_delta":
|
||||
if claudeResponse.Delta != nil && claudeResponse.Delta.Thinking != nil {
|
||||
reasoningText = *claudeResponse.Delta.Thinking
|
||||
}
|
||||
case "ping",
|
||||
"message_stop",
|
||||
"content_block_stop":
|
||||
default:
|
||||
logger.SysErrorf("unknown stream response type %q", claudeResponse.Type)
|
||||
}
|
||||
|
||||
var choice openai.ChatCompletionsStreamResponseChoice
|
||||
choice.Delta.Content = responseText
|
||||
choice.Delta.Reasoning = &reasoningText
|
||||
if len(tools) > 0 {
|
||||
choice.Delta.Content = nil // compatible with other OpenAI derivative applications, like LobeOpenAICompatibleFactory ...
|
||||
choice.Delta.ToolCalls = tools
|
||||
@@ -261,23 +209,11 @@ func StreamResponseClaude2OpenAI(claudeResponse *StreamResponse) (*openai.ChatCo
|
||||
|
||||
func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
|
||||
var responseText string
|
||||
var reasoningText string
|
||||
|
||||
if len(claudeResponse.Content) > 0 {
|
||||
responseText = claudeResponse.Content[0].Text
|
||||
}
|
||||
tools := make([]model.Tool, 0)
|
||||
for _, v := range claudeResponse.Content {
|
||||
switch v.Type {
|
||||
case "thinking":
|
||||
if v.Thinking != nil {
|
||||
reasoningText += *v.Thinking
|
||||
} else {
|
||||
logger.Errorf(context.Background(), "thinking is nil in response")
|
||||
}
|
||||
case "text":
|
||||
responseText += v.Text
|
||||
default:
|
||||
logger.Warnf(context.Background(), "unknown response type %q", v.Type)
|
||||
}
|
||||
|
||||
if v.Type == "tool_use" {
|
||||
args, _ := json.Marshal(v.Input)
|
||||
tools = append(tools, model.Tool{
|
||||
@@ -290,13 +226,11 @@ func ResponseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
choice := openai.TextResponseChoice{
|
||||
Index: 0,
|
||||
Message: model.Message{
|
||||
Role: "assistant",
|
||||
Content: responseText,
|
||||
Reasoning: &reasoningText,
|
||||
Name: nil,
|
||||
ToolCalls: tools,
|
||||
},
|
||||
@@ -343,8 +277,6 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
|
||||
data = strings.TrimPrefix(data, "data:")
|
||||
data = strings.TrimSpace(data)
|
||||
|
||||
logger.Debugf(c.Request.Context(), "stream <- %q\n", data)
|
||||
|
||||
var claudeResponse StreamResponse
|
||||
err := json.Unmarshal([]byte(data), &claudeResponse)
|
||||
if err != nil {
|
||||
@@ -412,9 +344,6 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
|
||||
logger.Debugf(c.Request.Context(), "response <- %s\n", string(responseBody))
|
||||
|
||||
var claudeResponse Response
|
||||
err = json.Unmarshal(responseBody, &claudeResponse)
|
||||
if err != nil {
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package anthropic
|
||||
|
||||
import "github.com/songquanpeng/one-api/relay/model"
|
||||
|
||||
// https://docs.anthropic.com/claude/reference/messages_post
|
||||
|
||||
type Metadata struct {
|
||||
@@ -24,9 +22,6 @@ type Content struct {
|
||||
Input any `json:"input,omitempty"`
|
||||
Content string `json:"content,omitempty"`
|
||||
ToolUseId string `json:"tool_use_id,omitempty"`
|
||||
// https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#implementing-extended-thinking
|
||||
Thinking *string `json:"thinking,omitempty"`
|
||||
Signature *string `json:"signature,omitempty"`
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
@@ -59,7 +54,6 @@ type Request struct {
|
||||
Tools []Tool `json:"tools,omitempty"`
|
||||
ToolChoice any `json:"tool_choice,omitempty"`
|
||||
//Metadata `json:"metadata,omitempty"`
|
||||
Thinking *model.Thinking `json:"thinking,omitempty"`
|
||||
}
|
||||
|
||||
type Usage struct {
|
||||
@@ -90,8 +84,6 @@ type Delta struct {
|
||||
PartialJson string `json:"partial_json,omitempty"`
|
||||
StopReason *string `json:"stop_reason"`
|
||||
StopSequence *string `json:"stop_sequence"`
|
||||
Thinking *string `json:"thinking,omitempty"`
|
||||
Signature *string `json:"signature,omitempty"`
|
||||
}
|
||||
|
||||
type StreamResponse struct {
|
||||
|
||||
@@ -21,11 +21,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
|
||||
claudeReq, err := anthropic.ConvertRequest(c, *request)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "convert request")
|
||||
}
|
||||
|
||||
claudeReq := anthropic.ConvertRequest(*request)
|
||||
c.Set(ctxkey.RequestModel, request.Model)
|
||||
c.Set(ctxkey.ConvertedRequest, claudeReq)
|
||||
return claudeReq, nil
|
||||
|
||||
@@ -36,8 +36,6 @@ var AwsModelIDMap = map[string]string{
|
||||
"claude-3-5-sonnet-20241022": "anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"claude-3-5-sonnet-latest": "anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"claude-3-5-haiku-20241022": "anthropic.claude-3-5-haiku-20241022-v1:0",
|
||||
"claude-3-7-sonnet-latest": "anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"claude-3-7-sonnet-20250219": "anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
}
|
||||
|
||||
func awsModelID(requestModel string) (string, error) {
|
||||
@@ -49,14 +47,13 @@ func awsModelID(requestModel string) (string, error) {
|
||||
}
|
||||
|
||||
func Handler(c *gin.Context, awsCli *bedrockruntime.Client, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
|
||||
awsModelID, err := awsModelID(c.GetString(ctxkey.RequestModel))
|
||||
awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
|
||||
if err != nil {
|
||||
return utils.WrapErr(errors.Wrap(err, "awsModelID")), nil
|
||||
}
|
||||
|
||||
awsModelID = utils.ConvertModelID2CrossRegionProfile(awsModelID, awsCli.Options().Region)
|
||||
awsReq := &bedrockruntime.InvokeModelInput{
|
||||
ModelId: aws.String(awsModelID),
|
||||
ModelId: aws.String(awsModelId),
|
||||
Accept: aws.String("application/json"),
|
||||
ContentType: aws.String("application/json"),
|
||||
}
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
package aws
|
||||
|
||||
import (
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/anthropic"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
)
|
||||
import "github.com/songquanpeng/one-api/relay/adaptor/anthropic"
|
||||
|
||||
// Request is the request to AWS Claude
|
||||
//
|
||||
@@ -20,5 +17,4 @@ type Request struct {
|
||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||
Tools []anthropic.Tool `json:"tools,omitempty"`
|
||||
ToolChoice any `json:"tool_choice,omitempty"`
|
||||
Thinking *model.Thinking `json:"thinking,omitempty"`
|
||||
}
|
||||
|
||||
@@ -70,14 +70,13 @@ func ConvertRequest(textRequest relaymodel.GeneralOpenAIRequest) *Request {
|
||||
}
|
||||
|
||||
func Handler(c *gin.Context, awsCli *bedrockruntime.Client, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
|
||||
awsModelID, err := awsModelID(c.GetString(ctxkey.RequestModel))
|
||||
awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
|
||||
if err != nil {
|
||||
return utils.WrapErr(errors.Wrap(err, "awsModelID")), nil
|
||||
}
|
||||
|
||||
awsModelID = utils.ConvertModelID2CrossRegionProfile(awsModelID, awsCli.Options().Region)
|
||||
awsReq := &bedrockruntime.InvokeModelInput{
|
||||
ModelId: aws.String(awsModelID),
|
||||
ModelId: aws.String(awsModelId),
|
||||
Accept: aws.String("application/json"),
|
||||
ContentType: aws.String("application/json"),
|
||||
}
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
)
|
||||
|
||||
// CrossRegionInferences is a list of model IDs that support cross-region inference.
|
||||
//
|
||||
// https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html
|
||||
//
|
||||
// document.querySelectorAll('pre.programlisting code').forEach((e) => {console.log(e.innerHTML)})
|
||||
var CrossRegionInferences = []string{
|
||||
"us.amazon.nova-lite-v1:0",
|
||||
"us.amazon.nova-micro-v1:0",
|
||||
"us.amazon.nova-pro-v1:0",
|
||||
"us.anthropic.claude-3-5-haiku-20241022-v1:0",
|
||||
"us.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"us.anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"us.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
||||
"us.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"us.anthropic.claude-3-opus-20240229-v1:0",
|
||||
"us.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"us.meta.llama3-1-405b-instruct-v1:0",
|
||||
"us.meta.llama3-1-70b-instruct-v1:0",
|
||||
"us.meta.llama3-1-8b-instruct-v1:0",
|
||||
"us.meta.llama3-2-11b-instruct-v1:0",
|
||||
"us.meta.llama3-2-1b-instruct-v1:0",
|
||||
"us.meta.llama3-2-3b-instruct-v1:0",
|
||||
"us.meta.llama3-2-90b-instruct-v1:0",
|
||||
"us.meta.llama3-3-70b-instruct-v1:0",
|
||||
"us-gov.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"us-gov.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"eu.amazon.nova-lite-v1:0",
|
||||
"eu.amazon.nova-micro-v1:0",
|
||||
"eu.amazon.nova-pro-v1:0",
|
||||
"eu.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"eu.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"eu.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"eu.meta.llama3-2-1b-instruct-v1:0",
|
||||
"eu.meta.llama3-2-3b-instruct-v1:0",
|
||||
"apac.amazon.nova-lite-v1:0",
|
||||
"apac.amazon.nova-micro-v1:0",
|
||||
"apac.amazon.nova-pro-v1:0",
|
||||
"apac.anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0",
|
||||
"apac.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"apac.anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
}
|
||||
|
||||
// ConvertModelID2CrossRegionProfile converts the model ID to a cross-region profile ID.
|
||||
func ConvertModelID2CrossRegionProfile(model, region string) string {
|
||||
var regionPrefix string
|
||||
switch prefix := strings.Split(region, "-")[0]; prefix {
|
||||
case "us", "eu":
|
||||
regionPrefix = prefix
|
||||
case "ap":
|
||||
regionPrefix = "apac"
|
||||
default:
|
||||
// not supported, return original model
|
||||
return model
|
||||
}
|
||||
|
||||
newModelID := regionPrefix + "." + model
|
||||
if slices.Contains(CrossRegionInferences, newModelID) {
|
||||
logger.Debugf(context.TODO(), "convert model %s to cross-region profile %s", model, newModelID)
|
||||
return newModelID
|
||||
}
|
||||
|
||||
// not found, return original model
|
||||
return model
|
||||
}
|
||||
@@ -9,8 +9,6 @@ import (
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/alibailian"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/baiduv2"
|
||||
@@ -18,7 +16,6 @@ import (
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/geminiv2"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/minimax"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/novita"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openrouter"
|
||||
"github.com/songquanpeng/one-api/relay/channeltype"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
@@ -88,29 +85,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
|
||||
meta := meta.GetByContext(c)
|
||||
switch meta.ChannelType {
|
||||
case channeltype.OpenRouter:
|
||||
includeReasoning := true
|
||||
request.IncludeReasoning = &includeReasoning
|
||||
if request.Provider == nil || request.Provider.Sort == "" &&
|
||||
config.OpenrouterProviderSort != "" {
|
||||
if request.Provider == nil {
|
||||
request.Provider = &openrouter.RequestProvider{}
|
||||
}
|
||||
|
||||
request.Provider.Sort = config.OpenrouterProviderSort
|
||||
}
|
||||
default:
|
||||
}
|
||||
|
||||
if request.Stream && !config.EnforceIncludeUsage {
|
||||
logger.Warn(c.Request.Context(),
|
||||
"please set ENFORCE_INCLUDE_USAGE=true to ensure accurate billing in stream mode")
|
||||
}
|
||||
|
||||
if config.EnforceIncludeUsage && request.Stream {
|
||||
if request.Stream {
|
||||
// always return usage in stream mode
|
||||
if request.StreamOptions == nil {
|
||||
request.StreamOptions = &model.StreamOptions{}
|
||||
|
||||
@@ -8,11 +8,12 @@ import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/songquanpeng/one-api/common/render"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/conv"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/common/render"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||
)
|
||||
@@ -25,7 +26,6 @@ const (
|
||||
|
||||
func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) {
|
||||
responseText := ""
|
||||
reasoningText := ""
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
scanner.Split(bufio.ScanLines)
|
||||
var usage *model.Usage
|
||||
@@ -61,13 +61,6 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
|
||||
}
|
||||
render.StringData(c, data)
|
||||
for _, choice := range streamResponse.Choices {
|
||||
if choice.Delta.Reasoning != nil {
|
||||
reasoningText += *choice.Delta.Reasoning
|
||||
}
|
||||
if choice.Delta.ReasoningContent != nil {
|
||||
reasoningText += *choice.Delta.ReasoningContent
|
||||
}
|
||||
|
||||
responseText += conv.AsString(choice.Delta.Content)
|
||||
}
|
||||
if streamResponse.Usage != nil {
|
||||
@@ -100,7 +93,7 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
|
||||
return ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "", nil
|
||||
}
|
||||
|
||||
return nil, reasoningText + responseText, usage
|
||||
return nil, responseText, usage
|
||||
}
|
||||
|
||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||
@@ -143,17 +136,10 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
|
||||
return ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
|
||||
if textResponse.Usage.TotalTokens == 0 ||
|
||||
(textResponse.Usage.PromptTokens == 0 && textResponse.Usage.CompletionTokens == 0) {
|
||||
if textResponse.Usage.TotalTokens == 0 || (textResponse.Usage.PromptTokens == 0 && textResponse.Usage.CompletionTokens == 0) {
|
||||
completionTokens := 0
|
||||
for _, choice := range textResponse.Choices {
|
||||
completionTokens += CountTokenText(choice.Message.StringContent(), modelName)
|
||||
if choice.Message.Reasoning != nil {
|
||||
completionTokens += CountToken(*choice.Message.Reasoning)
|
||||
}
|
||||
if choice.ReasoningContent != nil {
|
||||
completionTokens += CountToken(*choice.ReasoningContent)
|
||||
}
|
||||
}
|
||||
textResponse.Usage = model.Usage{
|
||||
PromptTokens: promptTokens,
|
||||
@@ -161,6 +147,5 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
|
||||
TotalTokens: promptTokens + completionTokens,
|
||||
}
|
||||
}
|
||||
|
||||
return nil, &textResponse.Usage
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
package openrouter
|
||||
|
||||
// RequestProvider customize how your requests are routed using the provider object
|
||||
// in the request body for Chat Completions and Completions.
|
||||
//
|
||||
// https://openrouter.ai/docs/features/provider-routing
|
||||
type RequestProvider struct {
|
||||
// Order is list of provider names to try in order (e.g. ["Anthropic", "OpenAI"]). Default: empty
|
||||
Order []string `json:"order,omitempty"`
|
||||
// AllowFallbacks is whether to allow backup providers when the primary is unavailable. Default: true
|
||||
AllowFallbacks bool `json:"allow_fallbacks,omitempty"`
|
||||
// RequireParameters is only use providers that support all parameters in your request. Default: false
|
||||
RequireParameters bool `json:"require_parameters,omitempty"`
|
||||
// DataCollection is control whether to use providers that may store data ("allow" or "deny"). Default: "allow"
|
||||
DataCollection string `json:"data_collection,omitempty" binding:"omitempty,oneof=allow deny"`
|
||||
// Ignore is list of provider names to skip for this request. Default: empty
|
||||
Ignore []string `json:"ignore,omitempty"`
|
||||
// Quantizations is list of quantization levels to filter by (e.g. ["int4", "int8"]). Default: empty
|
||||
Quantizations []string `json:"quantizations,omitempty"`
|
||||
// Sort is sort providers by price or throughput (e.g. "price" or "throughput"). Default: empty
|
||||
Sort string `json:"sort,omitempty" binding:"omitempty,oneof=price throughput latency"`
|
||||
}
|
||||
@@ -19,7 +19,6 @@ var ModelList = []string{
|
||||
"claude-3-5-sonnet@20240620",
|
||||
"claude-3-5-sonnet-v2@20241022",
|
||||
"claude-3-5-haiku@20241022",
|
||||
"claude-3-7-sonnet@20250219",
|
||||
}
|
||||
|
||||
const anthropicVersion = "vertex-2023-10-16"
|
||||
@@ -32,11 +31,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
|
||||
claudeReq, err := anthropic.ConvertRequest(c, *request)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "convert request")
|
||||
}
|
||||
|
||||
claudeReq := anthropic.ConvertRequest(*request)
|
||||
req := Request{
|
||||
AnthropicVersion: anthropicVersion,
|
||||
// Model: claudeReq.Model,
|
||||
|
||||
@@ -98,8 +98,6 @@ var ModelRatio = map[string]float64{
|
||||
"claude-3-5-sonnet-20240620": 3.0 / 1000 * USD,
|
||||
"claude-3-5-sonnet-20241022": 3.0 / 1000 * USD,
|
||||
"claude-3-5-sonnet-latest": 3.0 / 1000 * USD,
|
||||
"claude-3-7-sonnet-20250219": 3.0 / 1000 * USD,
|
||||
"claude-3-7-sonnet-latest": 3.0 / 1000 * USD,
|
||||
"claude-3-opus-20240229": 15.0 / 1000 * USD,
|
||||
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7
|
||||
"ERNIE-4.0-8K": 0.120 * RMB,
|
||||
|
||||
@@ -102,9 +102,6 @@ func postConsumeQuota(ctx context.Context, usage *relaymodel.Usage, meta *meta.M
|
||||
var quota int64
|
||||
completionRatio := billingratio.GetCompletionRatio(textRequest.Model, meta.ChannelType)
|
||||
promptTokens := usage.PromptTokens
|
||||
// It appears that DeepSeek's official service automatically merges ReasoningTokens into CompletionTokens,
|
||||
// but the behavior of third-party providers may differ, so for now we do not add them manually.
|
||||
// completionTokens := usage.CompletionTokens + usage.CompletionTokensDetails.ReasoningTokens
|
||||
completionTokens := usage.CompletionTokens
|
||||
quota = int64(math.Ceil((float64(promptTokens) + float64(completionTokens)*completionRatio) * ratio))
|
||||
if ratio != 0 && quota <= 0 {
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
package model
|
||||
|
||||
import "github.com/songquanpeng/one-api/relay/adaptor/openrouter"
|
||||
|
||||
type ResponseFormat struct {
|
||||
Type string `json:"type,omitempty"`
|
||||
JsonSchema *JSONSchema `json:"json_schema,omitempty"`
|
||||
@@ -68,21 +66,6 @@ type GeneralOpenAIRequest struct {
|
||||
// Others
|
||||
Instruction string `json:"instruction,omitempty"`
|
||||
NumCtx int `json:"num_ctx,omitempty"`
|
||||
// -------------------------------------
|
||||
// Openrouter
|
||||
// -------------------------------------
|
||||
Provider *openrouter.RequestProvider `json:"provider,omitempty"`
|
||||
IncludeReasoning *bool `json:"include_reasoning,omitempty"`
|
||||
// -------------------------------------
|
||||
// Anthropic
|
||||
// -------------------------------------
|
||||
Thinking *Thinking `json:"thinking,omitempty"`
|
||||
}
|
||||
|
||||
// https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#implementing-extended-thinking
|
||||
type Thinking struct {
|
||||
Type string `json:"type"`
|
||||
BudgetTokens int `json:"budget_tokens" binding:"omitempty,min=1024"`
|
||||
}
|
||||
|
||||
func (r GeneralOpenAIRequest) ParseInput() []string {
|
||||
|
||||
@@ -1,35 +1,12 @@
|
||||
package model
|
||||
|
||||
type Message struct {
|
||||
Role string `json:"role,omitempty"`
|
||||
// Content is a string or a list of objects
|
||||
Content any `json:"content,omitempty"`
|
||||
Name *string `json:"name,omitempty"`
|
||||
ToolCalls []Tool `json:"tool_calls,omitempty"`
|
||||
ToolCallId string `json:"tool_call_id,omitempty"`
|
||||
Audio *messageAudio `json:"audio,omitempty"`
|
||||
// -------------------------------------
|
||||
// Deepseek 专有的一些字段
|
||||
// https://api-docs.deepseek.com/api/create-chat-completion
|
||||
// -------------------------------------
|
||||
// Prefix forces the model to begin its answer with the supplied prefix in the assistant message.
|
||||
// To enable this feature, set base_url to "https://api.deepseek.com/beta".
|
||||
Prefix *bool `json:"prefix,omitempty"` // ReasoningContent is Used for the deepseek-reasoner model in the Chat
|
||||
// Prefix Completion feature as the input for the CoT in the last assistant message.
|
||||
// When using this feature, the prefix parameter must be set to true.
|
||||
ReasoningContent *string `json:"reasoning_content,omitempty"`
|
||||
// -------------------------------------
|
||||
// Openrouter
|
||||
// -------------------------------------
|
||||
Reasoning *string `json:"reasoning,omitempty"`
|
||||
Refusal *bool `json:"refusal,omitempty"`
|
||||
}
|
||||
|
||||
type messageAudio struct {
|
||||
Id string `json:"id"`
|
||||
Data string `json:"data,omitempty"`
|
||||
ExpiredAt int `json:"expired_at,omitempty"`
|
||||
Transcript string `json:"transcript,omitempty"`
|
||||
Role string `json:"role,omitempty"`
|
||||
Content any `json:"content,omitempty"`
|
||||
ReasoningContent any `json:"reasoning_content,omitempty"`
|
||||
Name *string `json:"name,omitempty"`
|
||||
ToolCalls []Tool `json:"tool_calls,omitempty"`
|
||||
ToolCallId string `json:"tool_call_id,omitempty"`
|
||||
}
|
||||
|
||||
func (m Message) IsStringContent() bool {
|
||||
|
||||
@@ -4,12 +4,14 @@ type Usage struct {
|
||||
PromptTokens int `json:"prompt_tokens"`
|
||||
CompletionTokens int `json:"completion_tokens"`
|
||||
TotalTokens int `json:"total_tokens"`
|
||||
// PromptTokensDetails may be empty for some models
|
||||
PromptTokensDetails *usagePromptTokensDetails `gorm:"-" json:"prompt_tokens_details,omitempty"`
|
||||
// CompletionTokensDetails may be empty for some models
|
||||
CompletionTokensDetails *usageCompletionTokensDetails `gorm:"-" json:"completion_tokens_details,omitempty"`
|
||||
ServiceTier string `gorm:"-" json:"service_tier,omitempty"`
|
||||
SystemFingerprint string `gorm:"-" json:"system_fingerprint,omitempty"`
|
||||
|
||||
CompletionTokensDetails *CompletionTokensDetails `json:"completion_tokens_details,omitempty"`
|
||||
}
|
||||
|
||||
type CompletionTokensDetails struct {
|
||||
ReasoningTokens int `json:"reasoning_tokens"`
|
||||
AcceptedPredictionTokens int `json:"accepted_prediction_tokens"`
|
||||
RejectedPredictionTokens int `json:"rejected_prediction_tokens"`
|
||||
}
|
||||
|
||||
type Error struct {
|
||||
@@ -23,20 +25,3 @@ type ErrorWithStatusCode struct {
|
||||
Error
|
||||
StatusCode int `json:"status_code"`
|
||||
}
|
||||
|
||||
type usagePromptTokensDetails struct {
|
||||
CachedTokens int `json:"cached_tokens"`
|
||||
AudioTokens int `json:"audio_tokens"`
|
||||
// TextTokens could be zero for pure text chats
|
||||
TextTokens int `json:"text_tokens"`
|
||||
ImageTokens int `json:"image_tokens"`
|
||||
}
|
||||
|
||||
type usageCompletionTokensDetails struct {
|
||||
ReasoningTokens int `json:"reasoning_tokens"`
|
||||
AudioTokens int `json:"audio_tokens"`
|
||||
AcceptedPredictionTokens int `json:"accepted_prediction_tokens"`
|
||||
RejectedPredictionTokens int `json:"rejected_prediction_tokens"`
|
||||
// TextTokens could be zero for pure text chats
|
||||
TextTokens int `json:"text_tokens"`
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user