mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-09-17 17:16:38 +08:00
Merge branch 'main' into patch/gpt-4o-audio
This commit is contained in:
commit
7d3e75a0b5
@ -7,19 +7,25 @@ import (
|
|||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
|
||||||
"github.com/songquanpeng/one-api/common/config"
|
"github.com/songquanpeng/one-api/common/config"
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type loggerLevel string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
loggerDEBUG = "DEBUG"
|
loggerDEBUG loggerLevel = "DEBUG"
|
||||||
loggerINFO = "INFO"
|
loggerINFO loggerLevel = "INFO"
|
||||||
loggerWarn = "WARN"
|
loggerWarn loggerLevel = "WARN"
|
||||||
loggerError = "ERR"
|
loggerError loggerLevel = "ERROR"
|
||||||
|
loggerFatal loggerLevel = "FATAL"
|
||||||
)
|
)
|
||||||
|
|
||||||
var setupLogOnce sync.Once
|
var setupLogOnce sync.Once
|
||||||
@ -44,27 +50,26 @@ func SetupLogger() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func SysLog(s string) {
|
func SysLog(s string) {
|
||||||
t := time.Now()
|
logHelper(nil, loggerINFO, s)
|
||||||
_, _ = fmt.Fprintf(gin.DefaultWriter, "[SYS] %v | %s \n", t.Format("2006/01/02 - 15:04:05"), s)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func SysLogf(format string, a ...any) {
|
func SysLogf(format string, a ...any) {
|
||||||
SysLog(fmt.Sprintf(format, a...))
|
logHelper(nil, loggerINFO, fmt.Sprintf(format, a...))
|
||||||
}
|
}
|
||||||
|
|
||||||
func SysError(s string) {
|
func SysError(s string) {
|
||||||
t := time.Now()
|
logHelper(nil, loggerError, s)
|
||||||
_, _ = fmt.Fprintf(gin.DefaultErrorWriter, "[SYS] %v | %s \n", t.Format("2006/01/02 - 15:04:05"), s)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func SysErrorf(format string, a ...any) {
|
func SysErrorf(format string, a ...any) {
|
||||||
SysError(fmt.Sprintf(format, a...))
|
logHelper(nil, loggerError, fmt.Sprintf(format, a...))
|
||||||
}
|
}
|
||||||
|
|
||||||
func Debug(ctx context.Context, msg string) {
|
func Debug(ctx context.Context, msg string) {
|
||||||
if config.DebugEnabled {
|
if !config.DebugEnabled {
|
||||||
logHelper(ctx, loggerDEBUG, msg)
|
return
|
||||||
}
|
}
|
||||||
|
logHelper(ctx, loggerDEBUG, msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
func Info(ctx context.Context, msg string) {
|
func Info(ctx context.Context, msg string) {
|
||||||
@ -80,37 +85,65 @@ func Error(ctx context.Context, msg string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Debugf(ctx context.Context, format string, a ...any) {
|
func Debugf(ctx context.Context, format string, a ...any) {
|
||||||
Debug(ctx, fmt.Sprintf(format, a...))
|
logHelper(ctx, loggerDEBUG, fmt.Sprintf(format, a...))
|
||||||
}
|
}
|
||||||
|
|
||||||
func Infof(ctx context.Context, format string, a ...any) {
|
func Infof(ctx context.Context, format string, a ...any) {
|
||||||
Info(ctx, fmt.Sprintf(format, a...))
|
logHelper(ctx, loggerINFO, fmt.Sprintf(format, a...))
|
||||||
}
|
}
|
||||||
|
|
||||||
func Warnf(ctx context.Context, format string, a ...any) {
|
func Warnf(ctx context.Context, format string, a ...any) {
|
||||||
Warn(ctx, fmt.Sprintf(format, a...))
|
logHelper(ctx, loggerWarn, fmt.Sprintf(format, a...))
|
||||||
}
|
}
|
||||||
|
|
||||||
func Errorf(ctx context.Context, format string, a ...any) {
|
func Errorf(ctx context.Context, format string, a ...any) {
|
||||||
Error(ctx, fmt.Sprintf(format, a...))
|
logHelper(ctx, loggerError, fmt.Sprintf(format, a...))
|
||||||
}
|
}
|
||||||
|
|
||||||
func logHelper(ctx context.Context, level string, msg string) {
|
func FatalLog(s string) {
|
||||||
|
logHelper(nil, loggerFatal, s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func FatalLogf(format string, a ...any) {
|
||||||
|
logHelper(nil, loggerFatal, fmt.Sprintf(format, a...))
|
||||||
|
}
|
||||||
|
|
||||||
|
func logHelper(ctx context.Context, level loggerLevel, msg string) {
|
||||||
writer := gin.DefaultErrorWriter
|
writer := gin.DefaultErrorWriter
|
||||||
if level == loggerINFO {
|
if level == loggerINFO {
|
||||||
writer = gin.DefaultWriter
|
writer = gin.DefaultWriter
|
||||||
}
|
}
|
||||||
id := ctx.Value(helper.RequestIdKey)
|
var logId string
|
||||||
if id == nil {
|
if ctx != nil {
|
||||||
id = helper.GenRequestID()
|
rawLogId := ctx.Value(helper.RequestIdKey)
|
||||||
|
if rawLogId != nil {
|
||||||
|
logId = fmt.Sprintf(" | %s", rawLogId.(string))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
lineInfo, funcName := getLineInfo()
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
_, _ = fmt.Fprintf(writer, "[%s] %v | %s | %s \n", level, now.Format("2006/01/02 - 15:04:05"), id, msg)
|
_, _ = fmt.Fprintf(writer, "[%s] %v%s%s %s%s \n", level, now.Format("2006/01/02 - 15:04:05"), logId, lineInfo, funcName, msg)
|
||||||
SetupLogger()
|
SetupLogger()
|
||||||
|
if level == loggerFatal {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func FatalLog(v ...any) {
|
func getLineInfo() (string, string) {
|
||||||
t := time.Now()
|
funcName := "[unknown] "
|
||||||
_, _ = fmt.Fprintf(gin.DefaultErrorWriter, "[FATAL] %v | %v \n", t.Format("2006/01/02 - 15:04:05"), v)
|
pc, file, line, ok := runtime.Caller(3)
|
||||||
os.Exit(1)
|
if ok {
|
||||||
|
if fn := runtime.FuncForPC(pc); fn != nil {
|
||||||
|
parts := strings.Split(fn.Name(), ".")
|
||||||
|
funcName = "[" + parts[len(parts)-1] + "] "
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
file = "unknown"
|
||||||
|
line = 0
|
||||||
|
}
|
||||||
|
parts := strings.Split(file, "one-api/")
|
||||||
|
if len(parts) > 1 {
|
||||||
|
file = parts[1]
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(" | %s:%d", file, line), funcName
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/songquanpeng/one-api/common/config"
|
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
channelhelper "github.com/songquanpeng/one-api/relay/adaptor"
|
channelhelper "github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
@ -24,8 +23,11 @@ func (a *Adaptor) Init(meta *meta.Meta) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||||
defaultVersion := config.GeminiVersion
|
var defaultVersion string
|
||||||
if meta.ActualModelName == "gemini-2.0-flash-exp" {
|
switch meta.ActualModelName {
|
||||||
|
case "gemini-2.0-flash-exp",
|
||||||
|
"gemini-2.0-flash-thinking-exp",
|
||||||
|
"gemini-2.0-flash-thinking-exp-01-21":
|
||||||
defaultVersion = "v1beta"
|
defaultVersion = "v1beta"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,5 +7,5 @@ var ModelList = []string{
|
|||||||
"gemini-1.5-flash", "gemini-1.5-pro",
|
"gemini-1.5-flash", "gemini-1.5-pro",
|
||||||
"text-embedding-004", "aqa",
|
"text-embedding-004", "aqa",
|
||||||
"gemini-2.0-flash-exp",
|
"gemini-2.0-flash-exp",
|
||||||
"gemini-2.0-flash-thinking-exp",
|
"gemini-2.0-flash-thinking-exp", "gemini-2.0-flash-thinking-exp-01-21",
|
||||||
}
|
}
|
||||||
|
@ -2,16 +2,19 @@ package tencent
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/meta"
|
"github.com/songquanpeng/one-api/relay/meta"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
"io"
|
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||||
"net/http"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// https://cloud.tencent.com/document/api/1729/101837
|
// https://cloud.tencent.com/document/api/1729/101837
|
||||||
@ -52,10 +55,18 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
tencentRequest := ConvertRequest(*request)
|
var convertedRequest any
|
||||||
|
switch relayMode {
|
||||||
|
case relaymode.Embeddings:
|
||||||
|
a.Action = "GetEmbedding"
|
||||||
|
convertedRequest = ConvertEmbeddingRequest(*request)
|
||||||
|
default:
|
||||||
|
a.Action = "ChatCompletions"
|
||||||
|
convertedRequest = ConvertRequest(*request)
|
||||||
|
}
|
||||||
// we have to calculate the sign here
|
// we have to calculate the sign here
|
||||||
a.Sign = GetSign(*tencentRequest, a, secretId, secretKey)
|
a.Sign = GetSign(convertedRequest, a, secretId, secretKey)
|
||||||
return tencentRequest, nil
|
return convertedRequest, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||||
@ -75,8 +86,13 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
|
|||||||
err, responseText = StreamHandler(c, resp)
|
err, responseText = StreamHandler(c, resp)
|
||||||
usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
|
usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
|
||||||
} else {
|
} else {
|
||||||
|
switch meta.Mode {
|
||||||
|
case relaymode.Embeddings:
|
||||||
|
err, usage = EmbeddingHandler(c, resp)
|
||||||
|
default:
|
||||||
err, usage = Handler(c, resp)
|
err, usage = Handler(c, resp)
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,4 +6,5 @@ var ModelList = []string{
|
|||||||
"hunyuan-standard-256K",
|
"hunyuan-standard-256K",
|
||||||
"hunyuan-pro",
|
"hunyuan-pro",
|
||||||
"hunyuan-vision",
|
"hunyuan-vision",
|
||||||
|
"hunyuan-embedding",
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,6 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/songquanpeng/one-api/common/render"
|
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
@ -16,11 +15,14 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
|
||||||
"github.com/songquanpeng/one-api/common"
|
"github.com/songquanpeng/one-api/common"
|
||||||
"github.com/songquanpeng/one-api/common/conv"
|
"github.com/songquanpeng/one-api/common/conv"
|
||||||
|
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||||
"github.com/songquanpeng/one-api/common/helper"
|
"github.com/songquanpeng/one-api/common/helper"
|
||||||
"github.com/songquanpeng/one-api/common/logger"
|
"github.com/songquanpeng/one-api/common/logger"
|
||||||
"github.com/songquanpeng/one-api/common/random"
|
"github.com/songquanpeng/one-api/common/random"
|
||||||
|
"github.com/songquanpeng/one-api/common/render"
|
||||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||||
"github.com/songquanpeng/one-api/relay/constant"
|
"github.com/songquanpeng/one-api/relay/constant"
|
||||||
"github.com/songquanpeng/one-api/relay/model"
|
"github.com/songquanpeng/one-api/relay/model"
|
||||||
@ -44,8 +46,68 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
|
||||||
|
return &EmbeddingRequest{
|
||||||
|
InputList: request.ParseInput(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func EmbeddingHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||||
|
var tencentResponseP EmbeddingResponseP
|
||||||
|
err := json.NewDecoder(resp.Body).Decode(&tencentResponseP)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
tencentResponse := tencentResponseP.Response
|
||||||
|
if tencentResponse.Error.Code != "" {
|
||||||
|
return &model.ErrorWithStatusCode{
|
||||||
|
Error: model.Error{
|
||||||
|
Message: tencentResponse.Error.Message,
|
||||||
|
Code: tencentResponse.Error.Code,
|
||||||
|
},
|
||||||
|
StatusCode: resp.StatusCode,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
requestModel := c.GetString(ctxkey.RequestModel)
|
||||||
|
fullTextResponse := embeddingResponseTencent2OpenAI(&tencentResponse)
|
||||||
|
fullTextResponse.Model = requestModel
|
||||||
|
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||||
|
if err != nil {
|
||||||
|
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
c.Writer.Header().Set("Content-Type", "application/json")
|
||||||
|
c.Writer.WriteHeader(resp.StatusCode)
|
||||||
|
_, err = c.Writer.Write(jsonResponse)
|
||||||
|
return nil, &fullTextResponse.Usage
|
||||||
|
}
|
||||||
|
|
||||||
|
func embeddingResponseTencent2OpenAI(response *EmbeddingResponse) *openai.EmbeddingResponse {
|
||||||
|
openAIEmbeddingResponse := openai.EmbeddingResponse{
|
||||||
|
Object: "list",
|
||||||
|
Data: make([]openai.EmbeddingResponseItem, 0, len(response.Data)),
|
||||||
|
Model: "hunyuan-embedding",
|
||||||
|
Usage: model.Usage{TotalTokens: response.EmbeddingUsage.TotalTokens},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, item := range response.Data {
|
||||||
|
openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, openai.EmbeddingResponseItem{
|
||||||
|
Object: item.Object,
|
||||||
|
Index: item.Index,
|
||||||
|
Embedding: item.Embedding,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return &openAIEmbeddingResponse
|
||||||
|
}
|
||||||
|
|
||||||
func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse {
|
func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse {
|
||||||
fullTextResponse := openai.TextResponse{
|
fullTextResponse := openai.TextResponse{
|
||||||
|
Id: response.ReqID,
|
||||||
Object: "chat.completion",
|
Object: "chat.completion",
|
||||||
Created: helper.GetTimestamp(),
|
Created: helper.GetTimestamp(),
|
||||||
Usage: model.Usage{
|
Usage: model.Usage{
|
||||||
@ -148,7 +210,7 @@ func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *
|
|||||||
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
TencentResponse = responseP.Response
|
TencentResponse = responseP.Response
|
||||||
if TencentResponse.Error.Code != 0 {
|
if TencentResponse.Error.Code != "" {
|
||||||
return &model.ErrorWithStatusCode{
|
return &model.ErrorWithStatusCode{
|
||||||
Error: model.Error{
|
Error: model.Error{
|
||||||
Message: TencentResponse.Error.Message,
|
Message: TencentResponse.Error.Message,
|
||||||
@ -195,7 +257,7 @@ func hmacSha256(s, key string) string {
|
|||||||
return string(hashed.Sum(nil))
|
return string(hashed.Sum(nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetSign(req ChatRequest, adaptor *Adaptor, secId, secKey string) string {
|
func GetSign(req any, adaptor *Adaptor, secId, secKey string) string {
|
||||||
// build canonical request string
|
// build canonical request string
|
||||||
host := "hunyuan.tencentcloudapi.com"
|
host := "hunyuan.tencentcloudapi.com"
|
||||||
httpRequestMethod := "POST"
|
httpRequestMethod := "POST"
|
||||||
|
@ -35,16 +35,16 @@ type ChatRequest struct {
|
|||||||
// 1. 影响输出文本的多样性,取值越大,生成文本的多样性越强。
|
// 1. 影响输出文本的多样性,取值越大,生成文本的多样性越强。
|
||||||
// 2. 取值区间为 [0.0, 1.0],未传值时使用各模型推荐值。
|
// 2. 取值区间为 [0.0, 1.0],未传值时使用各模型推荐值。
|
||||||
// 3. 非必要不建议使用,不合理的取值会影响效果。
|
// 3. 非必要不建议使用,不合理的取值会影响效果。
|
||||||
TopP *float64 `json:"TopP"`
|
TopP *float64 `json:"TopP,omitempty"`
|
||||||
// 说明:
|
// 说明:
|
||||||
// 1. 较高的数值会使输出更加随机,而较低的数值会使其更加集中和确定。
|
// 1. 较高的数值会使输出更加随机,而较低的数值会使其更加集中和确定。
|
||||||
// 2. 取值区间为 [0.0, 2.0],未传值时使用各模型推荐值。
|
// 2. 取值区间为 [0.0, 2.0],未传值时使用各模型推荐值。
|
||||||
// 3. 非必要不建议使用,不合理的取值会影响效果。
|
// 3. 非必要不建议使用,不合理的取值会影响效果。
|
||||||
Temperature *float64 `json:"Temperature"`
|
Temperature *float64 `json:"Temperature,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type Error struct {
|
type Error struct {
|
||||||
Code int `json:"Code"`
|
Code string `json:"Code"`
|
||||||
Message string `json:"Message"`
|
Message string `json:"Message"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -67,9 +67,35 @@ type ChatResponse struct {
|
|||||||
Usage Usage `json:"Usage,omitempty"` // token 数量
|
Usage Usage `json:"Usage,omitempty"` // token 数量
|
||||||
Error Error `json:"Error,omitempty"` // 错误信息 注意:此字段可能返回 null,表示取不到有效值
|
Error Error `json:"Error,omitempty"` // 错误信息 注意:此字段可能返回 null,表示取不到有效值
|
||||||
Note string `json:"Note,omitempty"` // 注释
|
Note string `json:"Note,omitempty"` // 注释
|
||||||
ReqID string `json:"Req_id,omitempty"` // 唯一请求 Id,每次请求都会返回。用于反馈接口入参
|
ReqID string `json:"RequestId,omitempty"` // 唯一请求 Id,每次请求都会返回。用于反馈接口入参
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatResponseP struct {
|
type ChatResponseP struct {
|
||||||
Response ChatResponse `json:"Response,omitempty"`
|
Response ChatResponse `json:"Response,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type EmbeddingRequest struct {
|
||||||
|
InputList []string `json:"InputList"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type EmbeddingData struct {
|
||||||
|
Embedding []float64 `json:"Embedding"`
|
||||||
|
Index int `json:"Index"`
|
||||||
|
Object string `json:"Object"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type EmbeddingUsage struct {
|
||||||
|
PromptTokens int `json:"PromptTokens"`
|
||||||
|
TotalTokens int `json:"TotalTokens"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type EmbeddingResponse struct {
|
||||||
|
Data []EmbeddingData `json:"Data"`
|
||||||
|
EmbeddingUsage EmbeddingUsage `json:"Usage,omitempty"`
|
||||||
|
RequestId string `json:"RequestId,omitempty"`
|
||||||
|
Error Error `json:"Error,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type EmbeddingResponseP struct {
|
||||||
|
Response EmbeddingResponse `json:"Response,omitempty"`
|
||||||
|
}
|
||||||
|
@ -18,7 +18,8 @@ var ModelList = []string{
|
|||||||
"gemini-pro", "gemini-pro-vision",
|
"gemini-pro", "gemini-pro-vision",
|
||||||
"gemini-1.5-pro-001", "gemini-1.5-flash-001",
|
"gemini-1.5-pro-001", "gemini-1.5-flash-001",
|
||||||
"gemini-1.5-pro-002", "gemini-1.5-flash-002",
|
"gemini-1.5-pro-002", "gemini-1.5-flash-002",
|
||||||
"gemini-2.0-flash-exp", "gemini-2.0-flash-thinking-exp",
|
"gemini-2.0-flash-exp",
|
||||||
|
"gemini-2.0-flash-thinking-exp", "gemini-2.0-flash-thinking-exp-01-21",
|
||||||
}
|
}
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
const (
|
const (
|
||||||
USD2RMB = 7
|
USD2RMB = 7
|
||||||
USD = 500 // $0.002 = 1 -> $1 = 500
|
USD = 500 // $0.002 = 1 -> $1 = 500
|
||||||
|
MILLI_USD = 1.0 / 1000 * USD
|
||||||
RMB = USD / USD2RMB
|
RMB = USD / USD2RMB
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -123,6 +124,7 @@ var ModelRatio = map[string]float64{
|
|||||||
"gemini-1.5-flash-001": 1,
|
"gemini-1.5-flash-001": 1,
|
||||||
"gemini-2.0-flash-exp": 1,
|
"gemini-2.0-flash-exp": 1,
|
||||||
"gemini-2.0-flash-thinking-exp": 1,
|
"gemini-2.0-flash-thinking-exp": 1,
|
||||||
|
"gemini-2.0-flash-thinking-exp-01-21": 1,
|
||||||
"aqa": 1,
|
"aqa": 1,
|
||||||
// https://open.bigmodel.cn/pricing
|
// https://open.bigmodel.cn/pricing
|
||||||
"glm-4": 0.1 * RMB,
|
"glm-4": 0.1 * RMB,
|
||||||
@ -284,8 +286,8 @@ var ModelRatio = map[string]float64{
|
|||||||
"command-r": 0.5 / 1000 * USD,
|
"command-r": 0.5 / 1000 * USD,
|
||||||
"command-r-plus": 3.0 / 1000 * USD,
|
"command-r-plus": 3.0 / 1000 * USD,
|
||||||
// https://platform.deepseek.com/api-docs/pricing/
|
// https://platform.deepseek.com/api-docs/pricing/
|
||||||
"deepseek-chat": 1.0 / 1000 * RMB,
|
"deepseek-chat": 0.14 * MILLI_USD,
|
||||||
"deepseek-coder": 1.0 / 1000 * RMB,
|
"deepseek-reasoner": 0.55 * MILLI_USD,
|
||||||
// https://www.deepl.com/pro?cta=header-prices
|
// https://www.deepl.com/pro?cta=header-prices
|
||||||
"deepl-zh": 25.0 / 1000 * USD,
|
"deepl-zh": 25.0 / 1000 * USD,
|
||||||
"deepl-en": 25.0 / 1000 * USD,
|
"deepl-en": 25.0 / 1000 * USD,
|
||||||
@ -407,6 +409,9 @@ var CompletionRatio = map[string]float64{
|
|||||||
"llama3-70b-8192(33)": 0.0035 / 0.00265,
|
"llama3-70b-8192(33)": 0.0035 / 0.00265,
|
||||||
// whisper
|
// whisper
|
||||||
"whisper-1": 0, // only count input tokens
|
"whisper-1": 0, // only count input tokens
|
||||||
|
// deepseek
|
||||||
|
"deepseek-chat": 0.28 / 0.14,
|
||||||
|
"deepseek-reasoner": 2.19 / 0.55,
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { enqueueSnackbar } from 'notistack';
|
import {enqueueSnackbar} from 'notistack';
|
||||||
import { snackbarConstants } from 'constants/SnackbarConstants';
|
import {snackbarConstants} from 'constants/SnackbarConstants';
|
||||||
import { API } from './api';
|
import {API} from './api';
|
||||||
|
|
||||||
export function getSystemName() {
|
export function getSystemName() {
|
||||||
let system_name = localStorage.getItem('system_name');
|
let system_name = localStorage.getItem('system_name');
|
||||||
@ -13,15 +13,15 @@ export function isMobile() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line
|
// eslint-disable-next-line
|
||||||
export function SnackbarHTMLContent({ htmlContent }) {
|
export function SnackbarHTMLContent({htmlContent}) {
|
||||||
return <div dangerouslySetInnerHTML={{ __html: htmlContent }} />;
|
return <div dangerouslySetInnerHTML={{__html: htmlContent}}/>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getSnackbarOptions(variant) {
|
export function getSnackbarOptions(variant) {
|
||||||
let options = snackbarConstants.Common[variant];
|
let options = snackbarConstants.Common[variant];
|
||||||
if (isMobile()) {
|
if (isMobile()) {
|
||||||
// 合并 options 和 snackbarConstants.Mobile
|
// 合并 options 和 snackbarConstants.Mobile
|
||||||
options = { ...options, ...snackbarConstants.Mobile };
|
options = {...options, ...snackbarConstants.Mobile};
|
||||||
}
|
}
|
||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
@ -51,7 +51,7 @@ export function showError(error) {
|
|||||||
|
|
||||||
export function showNotice(message, isHTML = false) {
|
export function showNotice(message, isHTML = false) {
|
||||||
if (isHTML) {
|
if (isHTML) {
|
||||||
enqueueSnackbar(<SnackbarHTMLContent htmlContent={message} />, getSnackbarOptions('NOTICE'));
|
enqueueSnackbar(<SnackbarHTMLContent htmlContent={message}/>, getSnackbarOptions('NOTICE'));
|
||||||
} else {
|
} else {
|
||||||
enqueueSnackbar(message, getSnackbarOptions('NOTICE'));
|
enqueueSnackbar(message, getSnackbarOptions('NOTICE'));
|
||||||
}
|
}
|
||||||
@ -71,7 +71,7 @@ export function showInfo(message) {
|
|||||||
|
|
||||||
export async function getOAuthState() {
|
export async function getOAuthState() {
|
||||||
const res = await API.get('/api/oauth/state');
|
const res = await API.get('/api/oauth/state');
|
||||||
const { success, message, data } = res.data;
|
const {success, message, data} = res.data;
|
||||||
if (success) {
|
if (success) {
|
||||||
return data;
|
return data;
|
||||||
} else {
|
} else {
|
||||||
@ -107,8 +107,7 @@ export async function onOidcClicked(auth_url, client_id, openInNewTab = false) {
|
|||||||
const url = `${auth_url}?client_id=${client_id}&redirect_uri=${redirect_uri}&response_type=${response_type}&scope=${scope}&state=${state}`;
|
const url = `${auth_url}?client_id=${client_id}&redirect_uri=${redirect_uri}&response_type=${response_type}&scope=${scope}&state=${state}`;
|
||||||
if (openInNewTab) {
|
if (openInNewTab) {
|
||||||
window.open(url);
|
window.open(url);
|
||||||
} else
|
} else {
|
||||||
{
|
|
||||||
window.location.href = url;
|
window.location.href = url;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -193,7 +192,7 @@ export function renderQuotaWithPrompt(quota, digits) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function downloadTextAsFile(text, filename) {
|
export function downloadTextAsFile(text, filename) {
|
||||||
let blob = new Blob([text], { type: 'text/plain;charset=utf-8' });
|
let blob = new Blob([text], {type: 'text/plain;charset=utf-8'});
|
||||||
let url = URL.createObjectURL(blob);
|
let url = URL.createObjectURL(blob);
|
||||||
let a = document.createElement('a');
|
let a = document.createElement('a');
|
||||||
a.href = url;
|
a.href = url;
|
||||||
@ -210,9 +209,10 @@ export function removeTrailingSlash(url) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let channelModels = undefined;
|
let channelModels = undefined;
|
||||||
|
|
||||||
export async function loadChannelModels() {
|
export async function loadChannelModels() {
|
||||||
const res = await API.get('/api/models');
|
const res = await API.get('/api/models');
|
||||||
const { success, data } = res.data;
|
const {success, data} = res.data;
|
||||||
if (!success) {
|
if (!success) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -236,12 +236,25 @@ export function getChannelModels(type) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function copy(text, name = '') {
|
export function copy(text, name = '') {
|
||||||
try {
|
if (navigator.clipboard && navigator.clipboard.writeText) {
|
||||||
navigator.clipboard.writeText(text);
|
navigator.clipboard.writeText(text).then(() => {
|
||||||
} catch (error) {
|
showNotice(`复制${name}成功!`, true);
|
||||||
|
}, () => {
|
||||||
text = `复制${name}失败,请手动复制:<br /><br />${text}`;
|
text = `复制${name}失败,请手动复制:<br /><br />${text}`;
|
||||||
enqueueSnackbar(<SnackbarHTMLContent htmlContent={text} />, getSnackbarOptions('COPY'));
|
enqueueSnackbar(<SnackbarHTMLContent htmlContent={text}/>, getSnackbarOptions('COPY'));
|
||||||
return;
|
});
|
||||||
|
} else {
|
||||||
|
const textArea = document.createElement("textarea");
|
||||||
|
textArea.value = text;
|
||||||
|
document.body.appendChild(textArea);
|
||||||
|
textArea.select();
|
||||||
|
try {
|
||||||
|
document.execCommand('copy');
|
||||||
|
showNotice(`复制${name}成功!`, true);
|
||||||
|
} catch (err) {
|
||||||
|
text = `复制${name}失败,请手动复制:<br /><br />${text}`;
|
||||||
|
enqueueSnackbar(<SnackbarHTMLContent htmlContent={text}/>, getSnackbarOptions('COPY'));
|
||||||
|
}
|
||||||
|
document.body.removeChild(textArea);
|
||||||
}
|
}
|
||||||
showSuccess(`复制${name}成功!`);
|
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user