Compare commits

...

18 Commits

Author SHA1 Message Date
haochun
f967eaec1e Merge 8726729ade into 7e51b04221 2024-10-28 00:17:08 +08:00
JustSong
7e51b04221 feat: able to hide test model selector and balance col
Some checks failed
CI / Unit tests (push) Has been cancelled
CI / commit_lint (push) Has been cancelled
2024-10-27 18:31:43 +08:00
JustSong
f75a17f8eb feat: always return usage in stream mode 2024-10-27 17:58:44 +08:00
Wei Tingjiang
6f13a3bb3c feat: update Gemini adaptor to support custom response format (#1892) 2024-10-27 17:10:50 +08:00
shaoyun
f092eed1db feat: add support for Claude Sonnet 3.5 v2 (#1888) 2024-10-27 17:10:02 +08:00
longkeyy
629378691b feat: update groq model and price (#1864) 2024-10-27 17:07:24 +08:00
liangjs
3716e1b0e6 fix: use modelMap when testing a channel (#1855)
Co-authored-by: oliang <oliang@tencent.com>
2024-10-27 17:06:41 +08:00
Pan, Wen-Ming
a4d6e7a886 feat: add Vertex AI gemini-1.5-pro-002 and gemini-1.5-flash-002 (#1854) 2024-10-27 17:04:41 +08:00
千寻简
cb772e5d06 fix:unsuccessful lobechat redirection link (#1843) 2024-10-27 17:03:35 +08:00
lihangfu
e32cb0b844 feat: support SparkDesk-v3.5-32K (#1832)
Co-authored-by: lihangfu <hfli8@iflytek.com>
2024-10-27 17:02:54 +08:00
haochun
8726729ade 修改流式错误时的结构,兼容业务 2024-10-10 10:53:18 +08:00
haochun
9267c5f12e 修改流式的实现,按照流式的格式去输出,不直接return 2024-10-09 14:46:03 +08:00
haochun
5e3042752e fix: 修复阿里云官方的绿网响应未正常处理,导致业务则中断的问题,做了兼容处理 2024-10-08 10:51:44 +08:00
抒情熊
fdd7bf41c0 feat: support multipart/form-data format request (#1690)
Some checks failed
CI / Unit tests (push) Has been cancelled
CI / commit_lint (push) Has been cancelled
* "add parser multipart/form-data"

* chore: fix impl

* chore: update impl

---------

Co-authored-by: JustSong <songquanpeng@foxmail.com>
2024-09-22 17:32:47 +08:00
徐瑞东
29389ed44f fix: modify the type of token models to be text (#1761)
* fix: modify the type of token models to be text

* chore: update receiver name

---------

Co-authored-by: JustSong <songquanpeng@foxmail.com>
2024-09-22 16:51:16 +08:00
byte911
88acc5a614 fix: return the usage info if not null (#1792)
Usage is missing.
2024-09-22 16:41:10 +08:00
TimeTrapzz
a21681096a feat: add siliconflow usage (#1798) 2024-09-22 16:31:26 +08:00
lihangfu
32f90a79a8 feat: support SparkDesk-v3.1-128K (#1732)
* feat: 支持SparkDesk-v3.1-128K以及hunyuan-vision

* feat: 支持SparkDesk-v3.1-128K以及hunyuan-vision

---------

Co-authored-by: lihangfu <hfli8@iflytek.com>
2024-09-22 16:29:09 +08:00
31 changed files with 259 additions and 60 deletions

View File

@@ -5,15 +5,15 @@ COPY ./VERSION .
COPY ./web . COPY ./web .
WORKDIR /web/default WORKDIR /web/default
RUN npm install RUN npm config set registry https://mirrors.huaweicloud.com/repository/npm/ && npm install
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
WORKDIR /web/berry WORKDIR /web/berry
RUN npm install RUN npm config set registry https://mirrors.huaweicloud.com/repository/npm/ && npm install
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
WORKDIR /web/air WORKDIR /web/air
RUN npm install RUN npm config set registry https://mirrors.huaweicloud.com/repository/npm/ && npm install
RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build RUN DISABLE_ESLINT_PLUGIN='true' REACT_APP_VERSION=$(cat VERSION) npm run build
FROM golang:alpine AS builder2 FROM golang:alpine AS builder2

View File

@@ -31,15 +31,15 @@ func UnmarshalBodyReusable(c *gin.Context, v any) error {
contentType := c.Request.Header.Get("Content-Type") contentType := c.Request.Header.Get("Content-Type")
if strings.HasPrefix(contentType, "application/json") { if strings.HasPrefix(contentType, "application/json") {
err = json.Unmarshal(requestBody, &v) err = json.Unmarshal(requestBody, &v)
c.Request.Body = io.NopCloser(bytes.NewBuffer(requestBody))
} else { } else {
// skip for now c.Request.Body = io.NopCloser(bytes.NewBuffer(requestBody))
// TODO: someday non json request have variant model, we will need to implementation this err = c.ShouldBind(&v)
} }
if err != nil { if err != nil {
return err return err
} }
// Reset request body // Reset request body
c.Request.Body = io.NopCloser(bytes.NewBuffer(requestBody))
return nil return nil
} }

View File

@@ -81,6 +81,26 @@ type APGC2DGPTUsageResponse struct {
TotalUsed float64 `json:"total_used"` TotalUsed float64 `json:"total_used"`
} }
type SiliconFlowUsageResponse struct {
Code int `json:"code"`
Message string `json:"message"`
Status bool `json:"status"`
Data struct {
ID string `json:"id"`
Name string `json:"name"`
Image string `json:"image"`
Email string `json:"email"`
IsAdmin bool `json:"isAdmin"`
Balance string `json:"balance"`
Status string `json:"status"`
Introduction string `json:"introduction"`
Role string `json:"role"`
ChargeBalance string `json:"chargeBalance"`
TotalBalance string `json:"totalBalance"`
Category string `json:"category"`
} `json:"data"`
}
// GetAuthHeader get auth header // GetAuthHeader get auth header
func GetAuthHeader(token string) http.Header { func GetAuthHeader(token string) http.Header {
h := http.Header{} h := http.Header{}
@@ -203,6 +223,28 @@ func updateChannelAIGC2DBalance(channel *model.Channel) (float64, error) {
return response.TotalAvailable, nil return response.TotalAvailable, nil
} }
func updateChannelSiliconFlowBalance(channel *model.Channel) (float64, error) {
url := "https://api.siliconflow.cn/v1/user/info"
body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key))
if err != nil {
return 0, err
}
response := SiliconFlowUsageResponse{}
err = json.Unmarshal(body, &response)
if err != nil {
return 0, err
}
if response.Code != 20000 {
return 0, fmt.Errorf("code: %d, message: %s", response.Code, response.Message)
}
balance, err := strconv.ParseFloat(response.Data.Balance, 64)
if err != nil {
return 0, err
}
channel.UpdateBalance(balance)
return balance, nil
}
func updateChannelBalance(channel *model.Channel) (float64, error) { func updateChannelBalance(channel *model.Channel) (float64, error) {
baseURL := channeltype.ChannelBaseURLs[channel.Type] baseURL := channeltype.ChannelBaseURLs[channel.Type]
if channel.GetBaseURL() == "" { if channel.GetBaseURL() == "" {
@@ -227,6 +269,8 @@ func updateChannelBalance(channel *model.Channel) (float64, error) {
return updateChannelAPI2GPTBalance(channel) return updateChannelAPI2GPTBalance(channel)
case channeltype.AIGC2D: case channeltype.AIGC2D:
return updateChannelAIGC2DBalance(channel) return updateChannelAIGC2DBalance(channel)
case channeltype.SiliconFlow:
return updateChannelSiliconFlowBalance(channel)
default: default:
return 0, errors.New("尚未实现") return 0, errors.New("尚未实现")
} }

View File

@@ -76,9 +76,9 @@ func testChannel(channel *model.Channel, request *relaymodel.GeneralOpenAIReques
if len(modelNames) > 0 { if len(modelNames) > 0 {
modelName = modelNames[0] modelName = modelNames[0]
} }
if modelMap != nil && modelMap[modelName] != "" { }
modelName = modelMap[modelName] if modelMap != nil && modelMap[modelName] != "" {
} modelName = modelMap[modelName]
} }
meta.OriginModelName, meta.ActualModelName = request.Model, modelName meta.OriginModelName, meta.ActualModelName = request.Model, modelName
request.Model = modelName request.Model = modelName

View File

@@ -12,7 +12,7 @@ import (
) )
type ModelRequest struct { type ModelRequest struct {
Model string `json:"model"` Model string `json:"model" form:"model"`
} }
func Distribute() func(c *gin.Context) { func Distribute() func(c *gin.Context) {

View File

@@ -30,7 +30,7 @@ type Token struct {
RemainQuota int64 `json:"remain_quota" gorm:"bigint;default:0"` RemainQuota int64 `json:"remain_quota" gorm:"bigint;default:0"`
UnlimitedQuota bool `json:"unlimited_quota" gorm:"default:false"` UnlimitedQuota bool `json:"unlimited_quota" gorm:"default:false"`
UsedQuota int64 `json:"used_quota" gorm:"bigint;default:0"` // used quota UsedQuota int64 `json:"used_quota" gorm:"bigint;default:0"` // used quota
Models *string `json:"models" gorm:"default:''"` // allowed models Models *string `json:"models" gorm:"type:text"` // allowed models
Subnet *string `json:"subnet" gorm:"default:''"` // allowed subnet Subnet *string `json:"subnet" gorm:"default:''"` // allowed subnet
} }
@@ -121,30 +121,40 @@ func GetTokenById(id int) (*Token, error) {
return &token, err return &token, err
} }
func (token *Token) Insert() error { func (t *Token) Insert() error {
var err error var err error
err = DB.Create(token).Error err = DB.Create(t).Error
return err return err
} }
// Update Make sure your token's fields is completed, because this will update non-zero values // Update Make sure your token's fields is completed, because this will update non-zero values
func (token *Token) Update() error { func (t *Token) Update() error {
var err error var err error
err = DB.Model(token).Select("name", "status", "expired_time", "remain_quota", "unlimited_quota", "models", "subnet").Updates(token).Error err = DB.Model(t).Select("name", "status", "expired_time", "remain_quota", "unlimited_quota", "models", "subnet").Updates(t).Error
return err return err
} }
func (token *Token) SelectUpdate() error { func (t *Token) SelectUpdate() error {
// This can update zero values // This can update zero values
return DB.Model(token).Select("accessed_time", "status").Updates(token).Error return DB.Model(t).Select("accessed_time", "status").Updates(t).Error
} }
func (token *Token) Delete() error { func (t *Token) Delete() error {
var err error var err error
err = DB.Delete(token).Error err = DB.Delete(t).Error
return err return err
} }
func (t *Token) GetModels() string {
if t == nil {
return ""
}
if t.Models == nil {
return ""
}
return *t.Models
}
func DeleteTokenById(id int, userId int) (err error) { func DeleteTokenById(id int, userId int) (err error) {
// Why we need userId here? In case user want to delete other's token. // Why we need userId here? In case user want to delete other's token.
if id == 0 || userId == 0 { if id == 0 || userId == 0 {

BIN
one-api Executable file

Binary file not shown.

View File

@@ -149,7 +149,24 @@ func responseAli2OpenAI(response *ChatResponse) *openai.TextResponse {
return &fullTextResponse return &fullTextResponse
} }
func streamResponseAli2OpenAI(aliResponse *ChatResponse) *openai.ChatCompletionsStreamResponse { func streamResponseAli2OpenAI(aliResponse *ChatResponse) interface{} {
if aliResponse.Code != "" {
var choice openai.ChatCompletionsStreamResponseChoice
choice.Index = 0
choice.Delta = model.Message{
Role: "assistant",
Content: "",
}
response := openai.ChatCompletionsErrorStreamResponse{
Id: aliResponse.RequestId,
Object: "chat.completion.chunk",
Created: helper.GetTimestamp(),
Model: "qwen",
ErrorCode: aliResponse.Code,
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
return &response
}
if len(aliResponse.Output.Choices) == 0 { if len(aliResponse.Output.Choices) == 0 {
return nil return nil
} }
@@ -201,6 +218,19 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
logger.SysError("error unmarshalling stream response: " + err.Error()) logger.SysError("error unmarshalling stream response: " + err.Error())
continue continue
} }
// Check for known error codes and handle accordingly
if aliResponse.Code != "" {
response := streamResponseAli2OpenAI(&aliResponse)
err = render.ObjectData(c, response)
if err != nil {
logger.SysError(err.Error())
}
render.Done(c)
return nil, nil
}
if aliResponse.Usage.OutputTokens != 0 { if aliResponse.Usage.OutputTokens != 0 {
usage.PromptTokens = aliResponse.Usage.InputTokens usage.PromptTokens = aliResponse.Usage.InputTokens
usage.CompletionTokens = aliResponse.Usage.OutputTokens usage.CompletionTokens = aliResponse.Usage.OutputTokens
@@ -245,6 +275,8 @@ func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
} }
// Check for known error codes and handle accordingly
if aliResponse.Code != "" { if aliResponse.Code != "" {
return &model.ErrorWithStatusCode{ return &model.ErrorWithStatusCode{
Error: model.Error{ Error: model.Error{
@@ -256,6 +288,7 @@ func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *
StatusCode: resp.StatusCode, StatusCode: resp.StatusCode,
}, nil }, nil
} }
fullTextResponse := responseAli2OpenAI(&aliResponse) fullTextResponse := responseAli2OpenAI(&aliResponse)
fullTextResponse.Model = "qwen" fullTextResponse.Model = "qwen"
jsonResponse, err := json.Marshal(fullTextResponse) jsonResponse, err := json.Marshal(fullTextResponse)

View File

@@ -6,4 +6,5 @@ var ModelList = []string{
"claude-3-sonnet-20240229", "claude-3-sonnet-20240229",
"claude-3-opus-20240229", "claude-3-opus-20240229",
"claude-3-5-sonnet-20240620", "claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
} }

View File

@@ -31,6 +31,7 @@ var AwsModelIDMap = map[string]string{
"claude-2.1": "anthropic.claude-v2:1", "claude-2.1": "anthropic.claude-v2:1",
"claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229-v1:0", "claude-3-sonnet-20240229": "anthropic.claude-3-sonnet-20240229-v1:0",
"claude-3-5-sonnet-20240620": "anthropic.claude-3-5-sonnet-20240620-v1:0", "claude-3-5-sonnet-20240620": "anthropic.claude-3-5-sonnet-20240620-v1:0",
"claude-3-5-sonnet-20241022": "anthropic.claude-3-5-sonnet-20241022-v2:0",
"claude-3-opus-20240229": "anthropic.claude-3-opus-20240229-v1:0", "claude-3-opus-20240229": "anthropic.claude-3-opus-20240229-v1:0",
"claude-3-haiku-20240307": "anthropic.claude-3-haiku-20240307-v1:0", "claude-3-haiku-20240307": "anthropic.claude-3-haiku-20240307-v1:0",
} }

View File

@@ -4,11 +4,12 @@ import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/render"
"io" "io"
"net/http" "net/http"
"strings" "strings"
"github.com/songquanpeng/one-api/common/render"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
@@ -28,6 +29,11 @@ const (
VisionMaxImageNum = 16 VisionMaxImageNum = 16
) )
var mimeTypeMap = map[string]string{
"json_object": "application/json",
"text": "text/plain",
}
// Setting safety to the lowest possible values since Gemini is already powerless enough // Setting safety to the lowest possible values since Gemini is already powerless enough
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest { func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
geminiRequest := ChatRequest{ geminiRequest := ChatRequest{
@@ -56,6 +62,15 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
MaxOutputTokens: textRequest.MaxTokens, MaxOutputTokens: textRequest.MaxTokens,
}, },
} }
if textRequest.ResponseFormat != nil {
if mimeType, ok := mimeTypeMap[textRequest.ResponseFormat.Type]; ok {
geminiRequest.GenerationConfig.ResponseMimeType = mimeType
}
if textRequest.ResponseFormat.JsonSchema != nil {
geminiRequest.GenerationConfig.ResponseSchema = textRequest.ResponseFormat.JsonSchema.Schema
geminiRequest.GenerationConfig.ResponseMimeType = mimeTypeMap["json_object"]
}
}
if textRequest.Tools != nil { if textRequest.Tools != nil {
functions := make([]model.Function, 0, len(textRequest.Tools)) functions := make([]model.Function, 0, len(textRequest.Tools))
for _, tool := range textRequest.Tools { for _, tool := range textRequest.Tools {

View File

@@ -65,10 +65,12 @@ type ChatTools struct {
} }
type ChatGenerationConfig struct { type ChatGenerationConfig struct {
Temperature float64 `json:"temperature,omitempty"` ResponseMimeType string `json:"responseMimeType,omitempty"`
TopP float64 `json:"topP,omitempty"` ResponseSchema any `json:"responseSchema,omitempty"`
TopK float64 `json:"topK,omitempty"` Temperature float64 `json:"temperature,omitempty"`
MaxOutputTokens int `json:"maxOutputTokens,omitempty"` TopP float64 `json:"topP,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"` TopK float64 `json:"topK,omitempty"`
StopSequences []string `json:"stopSequences,omitempty"` MaxOutputTokens int `json:"maxOutputTokens,omitempty"`
CandidateCount int `json:"candidateCount,omitempty"`
StopSequences []string `json:"stopSequences,omitempty"`
} }

View File

@@ -4,14 +4,21 @@ package groq
var ModelList = []string{ var ModelList = []string{
"gemma-7b-it", "gemma-7b-it",
"mixtral-8x7b-32768",
"llama3-8b-8192",
"llama3-70b-8192",
"gemma2-9b-it", "gemma2-9b-it",
"llama-3.1-405b-reasoning",
"llama-3.1-70b-versatile", "llama-3.1-70b-versatile",
"llama-3.1-8b-instant", "llama-3.1-8b-instant",
"llama-3.2-11b-text-preview",
"llama-3.2-11b-vision-preview",
"llama-3.2-1b-preview",
"llama-3.2-3b-preview",
"llama-3.2-90b-text-preview",
"llama-guard-3-8b",
"llama3-70b-8192",
"llama3-8b-8192",
"llama3-groq-70b-8192-tool-use-preview", "llama3-groq-70b-8192-tool-use-preview",
"llama3-groq-8b-8192-tool-use-preview", "llama3-groq-8b-8192-tool-use-preview",
"llava-v1.5-7b-4096-preview",
"mixtral-8x7b-32768",
"distil-whisper-large-v3-en",
"whisper-large-v3", "whisper-large-v3",
} }

View File

@@ -75,6 +75,13 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
if request == nil { if request == nil {
return nil, errors.New("request is nil") return nil, errors.New("request is nil")
} }
if request.Stream {
// always return usage in stream mode
if request.StreamOptions == nil {
request.StreamOptions = &model.StreamOptions{}
}
request.StreamOptions.IncludeUsage = true
}
return request, nil return request, nil
} }

View File

@@ -55,8 +55,8 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
render.StringData(c, data) // if error happened, pass the data to client render.StringData(c, data) // if error happened, pass the data to client
continue // just ignore the error continue // just ignore the error
} }
if len(streamResponse.Choices) == 0 { if len(streamResponse.Choices) == 0 && streamResponse.Usage == nil {
// but for empty choice, we should not pass it to client, this is for azure // but for empty choice and no usage, we should not pass it to client, this is for azure
continue // just ignore empty choice continue // just ignore empty choice
} }
render.StringData(c, data) render.StringData(c, data)

View File

@@ -97,6 +97,16 @@ type TextResponse struct {
model.Usage `json:"usage"` model.Usage `json:"usage"`
} }
type ErrorTextResponse struct {
Id string `json:"id"`
Model string `json:"model,omitempty"`
Object string `json:"object"`
ErrorCode string `json:"error_code"`
Created int64 `json:"created"`
Choices []TextResponseChoice `json:"choices"`
model.Usage `json:"usage"`
}
type EmbeddingResponseItem struct { type EmbeddingResponseItem struct {
Object string `json:"object"` Object string `json:"object"`
Index int `json:"index"` Index int `json:"index"`
@@ -137,6 +147,16 @@ type ChatCompletionsStreamResponse struct {
Usage *model.Usage `json:"usage,omitempty"` Usage *model.Usage `json:"usage,omitempty"`
} }
type ChatCompletionsErrorStreamResponse struct {
Id string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
ErrorCode string `json:"error_code"`
Model string `json:"model"`
Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
Usage *model.Usage `json:"usage,omitempty"`
}
type CompletionsStreamResponse struct { type CompletionsStreamResponse struct {
Choices []struct { Choices []struct {
Text string `json:"text"` Text string `json:"text"`

View File

@@ -5,4 +5,5 @@ var ModelList = []string{
"hunyuan-standard", "hunyuan-standard",
"hunyuan-standard-256K", "hunyuan-standard-256K",
"hunyuan-pro", "hunyuan-pro",
"hunyuan-vision",
} }

View File

@@ -15,7 +15,7 @@ import (
) )
var ModelList = []string{ var ModelList = []string{
"gemini-1.5-pro-001", "gemini-1.5-flash-001", "gemini-pro", "gemini-pro-vision", "gemini-1.5-pro-001", "gemini-1.5-flash-001", "gemini-pro", "gemini-pro-vision", "gemini-1.5-pro-002", "gemini-1.5-flash-002",
} }
type Adaptor struct { type Adaptor struct {

View File

@@ -5,6 +5,8 @@ var ModelList = []string{
"SparkDesk-v1.1", "SparkDesk-v1.1",
"SparkDesk-v2.1", "SparkDesk-v2.1",
"SparkDesk-v3.1", "SparkDesk-v3.1",
"SparkDesk-v3.1-128K",
"SparkDesk-v3.5", "SparkDesk-v3.5",
"SparkDesk-v3.5-32K",
"SparkDesk-v4.0", "SparkDesk-v4.0",
} }

View File

@@ -272,9 +272,9 @@ func xunfeiMakeRequest(textRequest model.GeneralOpenAIRequest, domain, authUrl,
} }
func parseAPIVersionByModelName(modelName string) string { func parseAPIVersionByModelName(modelName string) string {
parts := strings.Split(modelName, "-") index := strings.IndexAny(modelName, "-")
if len(parts) == 2 { if index != -1 {
return parts[1] return modelName[index+1:]
} }
return "" return ""
} }
@@ -288,8 +288,12 @@ func apiVersion2domain(apiVersion string) string {
return "generalv2" return "generalv2"
case "v3.1": case "v3.1":
return "generalv3" return "generalv3"
case "v3.1-128K":
return "pro-128k"
case "v3.5": case "v3.5":
return "generalv3.5" return "generalv3.5"
case "v3.5-32K":
return "max-32k"
case "v4.0": case "v4.0":
return "4.0Ultra" return "4.0Ultra"
} }
@@ -297,7 +301,17 @@ func apiVersion2domain(apiVersion string) string {
} }
func getXunfeiAuthUrl(apiVersion string, apiKey string, apiSecret string) (string, string) { func getXunfeiAuthUrl(apiVersion string, apiKey string, apiSecret string) (string, string) {
var authUrl string
domain := apiVersion2domain(apiVersion) domain := apiVersion2domain(apiVersion)
authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret) switch apiVersion {
case "v3.1-128K":
authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/chat/pro-128k"), apiKey, apiSecret)
break
case "v3.5-32K":
authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/chat/max-32k"), apiKey, apiSecret)
break
default:
authUrl = buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
}
return domain, authUrl return domain, authUrl
} }

View File

@@ -81,6 +81,7 @@ var ModelRatio = map[string]float64{
"claude-3-haiku-20240307": 0.25 / 1000 * USD, "claude-3-haiku-20240307": 0.25 / 1000 * USD,
"claude-3-sonnet-20240229": 3.0 / 1000 * USD, "claude-3-sonnet-20240229": 3.0 / 1000 * USD,
"claude-3-5-sonnet-20240620": 3.0 / 1000 * USD, "claude-3-5-sonnet-20240620": 3.0 / 1000 * USD,
"claude-3-5-sonnet-20241022": 3.0 / 1000 * USD,
"claude-3-opus-20240229": 15.0 / 1000 * USD, "claude-3-opus-20240229": 15.0 / 1000 * USD,
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7 // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7
"ERNIE-4.0-8K": 0.120 * RMB, "ERNIE-4.0-8K": 0.120 * RMB,
@@ -128,7 +129,9 @@ var ModelRatio = map[string]float64{
"SparkDesk-v1.1": 1.2858, // ¥0.018 / 1k tokens "SparkDesk-v1.1": 1.2858, // ¥0.018 / 1k tokens
"SparkDesk-v2.1": 1.2858, // ¥0.018 / 1k tokens "SparkDesk-v2.1": 1.2858, // ¥0.018 / 1k tokens
"SparkDesk-v3.1": 1.2858, // ¥0.018 / 1k tokens "SparkDesk-v3.1": 1.2858, // ¥0.018 / 1k tokens
"SparkDesk-v3.1-128K": 1.2858, // ¥0.018 / 1k tokens
"SparkDesk-v3.5": 1.2858, // ¥0.018 / 1k tokens "SparkDesk-v3.5": 1.2858, // ¥0.018 / 1k tokens
"SparkDesk-v3.5-32K": 1.2858, // ¥0.018 / 1k tokens
"SparkDesk-v4.0": 1.2858, // ¥0.018 / 1k tokens "SparkDesk-v4.0": 1.2858, // ¥0.018 / 1k tokens
"360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens "360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens
"embedding-bert-512-v1": 0.0715, // ¥0.001 / 1k tokens "embedding-bert-512-v1": 0.0715, // ¥0.001 / 1k tokens
@@ -160,15 +163,21 @@ var ModelRatio = map[string]float64{
"mistral-embed": 0.1 / 1000 * USD, "mistral-embed": 0.1 / 1000 * USD,
// https://wow.groq.com/#:~:text=inquiries%C2%A0here.-,Model,-Current%20Speed // https://wow.groq.com/#:~:text=inquiries%C2%A0here.-,Model,-Current%20Speed
"gemma-7b-it": 0.07 / 1000000 * USD, "gemma-7b-it": 0.07 / 1000000 * USD,
"mixtral-8x7b-32768": 0.24 / 1000000 * USD,
"llama3-8b-8192": 0.05 / 1000000 * USD,
"llama3-70b-8192": 0.59 / 1000000 * USD,
"gemma2-9b-it": 0.20 / 1000000 * USD, "gemma2-9b-it": 0.20 / 1000000 * USD,
"llama-3.1-405b-reasoning": 0.89 / 1000000 * USD,
"llama-3.1-70b-versatile": 0.59 / 1000000 * USD, "llama-3.1-70b-versatile": 0.59 / 1000000 * USD,
"llama-3.1-8b-instant": 0.05 / 1000000 * USD, "llama-3.1-8b-instant": 0.05 / 1000000 * USD,
"llama-3.2-11b-text-preview": 0.05 / 1000000 * USD,
"llama-3.2-11b-vision-preview": 0.05 / 1000000 * USD,
"llama-3.2-1b-preview": 0.05 / 1000000 * USD,
"llama-3.2-3b-preview": 0.05 / 1000000 * USD,
"llama-3.2-90b-text-preview": 0.59 / 1000000 * USD,
"llama-guard-3-8b": 0.05 / 1000000 * USD,
"llama3-70b-8192": 0.59 / 1000000 * USD,
"llama3-8b-8192": 0.05 / 1000000 * USD,
"llama3-groq-70b-8192-tool-use-preview": 0.89 / 1000000 * USD, "llama3-groq-70b-8192-tool-use-preview": 0.89 / 1000000 * USD,
"llama3-groq-8b-8192-tool-use-preview": 0.19 / 1000000 * USD, "llama3-groq-8b-8192-tool-use-preview": 0.19 / 1000000 * USD,
"mixtral-8x7b-32768": 0.24 / 1000000 * USD,
// https://platform.lingyiwanwu.com/docs#-计费单元 // https://platform.lingyiwanwu.com/docs#-计费单元
"yi-34b-chat-0205": 2.5 / 1000 * RMB, "yi-34b-chat-0205": 2.5 / 1000 * RMB,
"yi-34b-chat-200k": 12.0 / 1000 * RMB, "yi-34b-chat-200k": 12.0 / 1000 * RMB,

View File

@@ -1,6 +1,7 @@
package model package model
const ( const (
ContentTypeText = "text" ContentTypeText = "text"
ContentTypeImageURL = "image_url" ContentTypeImageURL = "image_url"
ContentTypeInputAudio = "input_audio"
) )

View File

@@ -12,9 +12,20 @@ type JSONSchema struct {
Strict *bool `json:"strict,omitempty"` Strict *bool `json:"strict,omitempty"`
} }
type Audio struct {
Voice string `json:"voice,omitempty"`
Format string `json:"format,omitempty"`
}
type StreamOptions struct {
IncludeUsage bool `json:"include_usage,omitempty"`
}
type GeneralOpenAIRequest struct { type GeneralOpenAIRequest struct {
Messages []Message `json:"messages,omitempty"` Messages []Message `json:"messages,omitempty"`
Model string `json:"model,omitempty"` Model string `json:"model,omitempty"`
Modalities []string `json:"modalities,omitempty"`
Audio *Audio `json:"audio,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"` MaxTokens int `json:"max_tokens,omitempty"`
N int `json:"n,omitempty"` N int `json:"n,omitempty"`
@@ -23,6 +34,7 @@ type GeneralOpenAIRequest struct {
Seed float64 `json:"seed,omitempty"` Seed float64 `json:"seed,omitempty"`
Stop any `json:"stop,omitempty"` Stop any `json:"stop,omitempty"`
Stream bool `json:"stream,omitempty"` Stream bool `json:"stream,omitempty"`
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
Temperature float64 `json:"temperature,omitempty"` Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"` TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"` TopK int `json:"top_k,omitempty"`
@@ -37,7 +49,7 @@ type GeneralOpenAIRequest struct {
Dimensions int `json:"dimensions,omitempty"` Dimensions int `json:"dimensions,omitempty"`
Instruction string `json:"instruction,omitempty"` Instruction string `json:"instruction,omitempty"`
Size string `json:"size,omitempty"` Size string `json:"size,omitempty"`
NumCtx int `json:"num_ctx,omitempty"` NumCtx int `json:"num_ctx,omitempty"`
} }
func (r GeneralOpenAIRequest) ParseInput() []string { func (r GeneralOpenAIRequest) ParseInput() []string {

View File

@@ -395,7 +395,7 @@ const TokensTable = () => {
url = mjLink + `/#/?settings={"key":"sk-${key}","url":"${serverAddress}"}`; url = mjLink + `/#/?settings={"key":"sk-${key}","url":"${serverAddress}"}`;
break; break;
case 'lobechat': case 'lobechat':
url = chatLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}"/v1"}}}`; url = chatLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}/v1"}}}`;
break; break;
default: default:
if (!chatLink) { if (!chatLink) {

View File

@@ -63,7 +63,7 @@ const EditChannel = (props) => {
let localModels = []; let localModels = [];
switch (value) { switch (value) {
case 14: case 14:
localModels = ["claude-instant-1.2", "claude-2", "claude-2.0", "claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", "claude-3-5-sonnet-20240620"]; localModels = ["claude-instant-1.2", "claude-2", "claude-2.0", "claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", "claude-3-5-sonnet-20240620", "claude-3-5-sonnet-20241022"];
break; break;
case 11: case 11:
localModels = ['PaLM-2']; localModels = ['PaLM-2'];
@@ -78,7 +78,7 @@ const EditChannel = (props) => {
localModels = ['chatglm_pro', 'chatglm_std', 'chatglm_lite']; localModels = ['chatglm_pro', 'chatglm_std', 'chatglm_lite'];
break; break;
case 18: case 18:
localModels = ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.5', 'SparkDesk-v4.0']; localModels = ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.1-128K', 'SparkDesk-v3.5', 'SparkDesk-v3.5-32K', 'SparkDesk-v4.0'];
break; break;
case 19: case 19:
localModels = ['360GPT_S2_V9', 'embedding-bert-512-v1', 'embedding_s1_v1', 'semantic_similarity_s1_v1']; localModels = ['360GPT_S2_V9', 'embedding-bert-512-v1', 'embedding_s1_v1', 'semantic_similarity_s1_v1'];

View File

@@ -268,6 +268,8 @@ function renderBalance(type, balance) {
return <span>¥{balance.toFixed(2)}</span>; return <span>¥{balance.toFixed(2)}</span>;
case 13: // AIGC2D case 13: // AIGC2D
return <span>{renderNumber(balance)}</span>; return <span>{renderNumber(balance)}</span>;
case 44: // SiliconFlow
return <span>¥{balance.toFixed(2)}</span>;
default: default:
return <span>不支持</span>; return <span>不支持</span>;
} }

View File

@@ -91,7 +91,7 @@ const typeConfig = {
other: '版本号' other: '版本号'
}, },
input: { input: {
models: ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.5', 'SparkDesk-v4.0'] models: ['SparkDesk', 'SparkDesk-v1.1', 'SparkDesk-v2.1', 'SparkDesk-v3.1', 'SparkDesk-v3.1-128K', 'SparkDesk-v3.5', 'SparkDesk-v3.5-32K', 'SparkDesk-v4.0']
}, },
prompt: { prompt: {
key: '按照如下格式输入APPID|APISecret|APIKey', key: '按照如下格式输入APPID|APISecret|APIKey',

View File

@@ -33,7 +33,7 @@ const COPY_OPTIONS = [
}, },
{ key: 'ama', text: 'BotGem', url: 'ama://set-api-key?server={serverAddress}&key=sk-{key}', encode: true }, { key: 'ama', text: 'BotGem', url: 'ama://set-api-key?server={serverAddress}&key=sk-{key}', encode: true },
{ key: 'opencat', text: 'OpenCat', url: 'opencat://team/join?domain={serverAddress}&token=sk-{key}', encode: true }, { key: 'opencat', text: 'OpenCat', url: 'opencat://team/join?domain={serverAddress}&token=sk-{key}', encode: true },
{ key: 'lobechat', text: 'LobeChat', url: 'https://lobehub.com/?settings={"keyVaults":{"openai":{"apiKey":"user-key","baseURL":"https://your-proxy.com/v1"}}}', encode: true } { key: 'lobechat', text: 'LobeChat', url: 'https://lobehub.com/?settings={"keyVaults":{"openai":{"apiKey":"sk-{key}","baseURL":"{serverAddress}"}}}', encode: true }
]; ];
function replacePlaceholders(text, key, serverAddress) { function replacePlaceholders(text, key, serverAddress) {

View File

@@ -52,11 +52,19 @@ function renderBalance(type, balance) {
return <span>¥{balance.toFixed(2)}</span>; return <span>¥{balance.toFixed(2)}</span>;
case 13: // AIGC2D case 13: // AIGC2D
return <span>{renderNumber(balance)}</span>; return <span>{renderNumber(balance)}</span>;
case 44: // SiliconFlow
return <span>¥{balance.toFixed(2)}</span>;
default: default:
return <span>不支持</span>; return <span>不支持</span>;
} }
} }
function isShowDetail() {
return localStorage.getItem("show_detail") === "true";
}
const promptID = "detail"
const ChannelsTable = () => { const ChannelsTable = () => {
const [channels, setChannels] = useState([]); const [channels, setChannels] = useState([]);
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
@@ -64,7 +72,8 @@ const ChannelsTable = () => {
const [searchKeyword, setSearchKeyword] = useState(''); const [searchKeyword, setSearchKeyword] = useState('');
const [searching, setSearching] = useState(false); const [searching, setSearching] = useState(false);
const [updatingBalance, setUpdatingBalance] = useState(false); const [updatingBalance, setUpdatingBalance] = useState(false);
const [showPrompt, setShowPrompt] = useState(shouldShowPrompt("channel-test")); const [showPrompt, setShowPrompt] = useState(shouldShowPrompt(promptID));
const [showDetail, setShowDetail] = useState(isShowDetail());
const loadChannels = async (startIdx) => { const loadChannels = async (startIdx) => {
const res = await API.get(`/api/channel/?p=${startIdx}`); const res = await API.get(`/api/channel/?p=${startIdx}`);
@@ -118,6 +127,11 @@ const ChannelsTable = () => {
await loadChannels(activePage - 1); await loadChannels(activePage - 1);
}; };
const toggleShowDetail = () => {
setShowDetail(!showDetail);
localStorage.setItem("show_detail", (!showDetail).toString());
}
useEffect(() => { useEffect(() => {
loadChannels(0) loadChannels(0)
.then() .then()
@@ -362,11 +376,13 @@ const ChannelsTable = () => {
showPrompt && ( showPrompt && (
<Message onDismiss={() => { <Message onDismiss={() => {
setShowPrompt(false); setShowPrompt(false);
setPromptShown("channel-test"); setPromptShown(promptID);
}}> }}>
OpenAI 渠道已经不再支持通过 key 获取余额因此余额显示为 0对于支持的渠道类型请点击余额进行刷新 OpenAI 渠道已经不再支持通过 key 获取余额因此余额显示为 0对于支持的渠道类型请点击余额进行刷新
<br/> <br/>
渠道测试仅支持 chat 模型优先使用 gpt-3.5-turbo如果该模型不可用则使用你所配置的模型列表中的第一个模型 渠道测试仅支持 chat 模型优先使用 gpt-3.5-turbo如果该模型不可用则使用你所配置的模型列表中的第一个模型
<br/>
点击下方详情按钮可以显示余额以及设置额外的测试模型
</Message> </Message>
) )
} }
@@ -426,6 +442,7 @@ const ChannelsTable = () => {
onClick={() => { onClick={() => {
sortChannel('balance'); sortChannel('balance');
}} }}
hidden={!showDetail}
> >
余额 余额
</Table.HeaderCell> </Table.HeaderCell>
@@ -437,7 +454,7 @@ const ChannelsTable = () => {
> >
优先级 优先级
</Table.HeaderCell> </Table.HeaderCell>
<Table.HeaderCell>测试模型</Table.HeaderCell> <Table.HeaderCell hidden={!showDetail}>测试模型</Table.HeaderCell>
<Table.HeaderCell>操作</Table.HeaderCell> <Table.HeaderCell>操作</Table.HeaderCell>
</Table.Row> </Table.Row>
</Table.Header> </Table.Header>
@@ -465,7 +482,7 @@ const ChannelsTable = () => {
basic basic
/> />
</Table.Cell> </Table.Cell>
<Table.Cell> <Table.Cell hidden={!showDetail}>
<Popup <Popup
trigger={<span onClick={() => { trigger={<span onClick={() => {
updateChannelBalance(channel.id, channel.name, idx); updateChannelBalance(channel.id, channel.name, idx);
@@ -492,7 +509,7 @@ const ChannelsTable = () => {
basic basic
/> />
</Table.Cell> </Table.Cell>
<Table.Cell> <Table.Cell hidden={!showDetail}>
<Dropdown <Dropdown
placeholder='请选择测试模型' placeholder='请选择测试模型'
selection selection
@@ -571,7 +588,7 @@ const ChannelsTable = () => {
<Table.Footer> <Table.Footer>
<Table.Row> <Table.Row>
<Table.HeaderCell colSpan='9'> <Table.HeaderCell colSpan={showDetail ? "10" : "8"}>
<Button size='small' as={Link} to='/channel/add' loading={loading}> <Button size='small' as={Link} to='/channel/add' loading={loading}>
添加新的渠道 添加新的渠道
</Button> </Button>
@@ -609,6 +626,7 @@ const ChannelsTable = () => {
} }
/> />
<Button size='small' onClick={refresh} loading={loading}>刷新</Button> <Button size='small' onClick={refresh} loading={loading}>刷新</Button>
<Button size='small' onClick={toggleShowDetail}>{showDetail ? "隐藏详情" : "详情"}</Button>
</Table.HeaderCell> </Table.HeaderCell>
</Table.Row> </Table.Row>
</Table.Footer> </Table.Footer>

View File

@@ -117,7 +117,7 @@ const TokensTable = () => {
url = nextUrl; url = nextUrl;
break; break;
case 'lobechat': case 'lobechat':
url = nextLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}"/v1"}}}`; url = nextLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}/v1"}}}`;
break; break;
default: default:
url = `sk-${key}`; url = `sk-${key}`;
@@ -160,7 +160,7 @@ const TokensTable = () => {
break; break;
case 'lobechat': case 'lobechat':
url = chatLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}"/v1"}}}`; url = chatLink + `/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${serverAddress}/v1"}}}`;
break; break;
default: default:

View File

@@ -2,7 +2,7 @@ import React from 'react';
import { Header, Segment } from 'semantic-ui-react'; import { Header, Segment } from 'semantic-ui-react';
import ChannelsTable from '../../components/ChannelsTable'; import ChannelsTable from '../../components/ChannelsTable';
const File = () => ( const Channel = () => (
<> <>
<Segment> <Segment>
<Header as='h3'>管理渠道</Header> <Header as='h3'>管理渠道</Header>
@@ -11,4 +11,4 @@ const File = () => (
</> </>
); );
export default File; export default Channel;