mirror of
https://github.com/linux-do/new-api.git
synced 2025-09-19 00:46:37 +08:00
Merge remote-tracking branch 'upstream/main'
This commit is contained in:
commit
d34b601dae
@ -229,6 +229,7 @@ const (
|
|||||||
ChannelTypeLingYiWanWu = 31
|
ChannelTypeLingYiWanWu = 31
|
||||||
ChannelTypeAws = 33
|
ChannelTypeAws = 33
|
||||||
ChannelTypeCohere = 34
|
ChannelTypeCohere = 34
|
||||||
|
ChannelTypeMiniMax = 35
|
||||||
|
|
||||||
ChannelTypeDummy // this one is only for count, do not add any channel after this
|
ChannelTypeDummy // this one is only for count, do not add any channel after this
|
||||||
)
|
)
|
||||||
@ -269,4 +270,5 @@ var ChannelBaseURLs = []string{
|
|||||||
"", //32
|
"", //32
|
||||||
"", //33
|
"", //33
|
||||||
"https://api.cohere.ai", //34
|
"https://api.cohere.ai", //34
|
||||||
|
"https://api.minimax.chat", //35
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,13 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
//from songquanpeng/one-api
|
||||||
|
const (
|
||||||
|
USD2RMB = 7.3 // 暂定 1 USD = 7.3 RMB
|
||||||
|
USD = 500 // $0.002 = 1 -> $1 = 500
|
||||||
|
RMB = USD / USD2RMB
|
||||||
|
)
|
||||||
|
|
||||||
// modelRatio
|
// modelRatio
|
||||||
// https://platform.openai.com/docs/models/model-endpoint-compatibility
|
// https://platform.openai.com/docs/models/model-endpoint-compatibility
|
||||||
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf
|
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf
|
||||||
@ -116,9 +123,18 @@ var DefaultModelRatio = map[string]float64{
|
|||||||
"hunyuan": 7.143, // ¥0.1 / 1k tokens // https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0
|
"hunyuan": 7.143, // ¥0.1 / 1k tokens // https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0
|
||||||
// https://platform.lingyiwanwu.com/docs#-计费单元
|
// https://platform.lingyiwanwu.com/docs#-计费单元
|
||||||
// 已经按照 7.2 来换算美元价格
|
// 已经按照 7.2 来换算美元价格
|
||||||
"yi-34b-chat-0205": 0.018,
|
"yi-34b-chat-0205": 0.18,
|
||||||
"yi-34b-chat-200k": 0.0864,
|
"yi-34b-chat-200k": 0.864,
|
||||||
"yi-vl-plus": 0.0432,
|
"yi-vl-plus": 0.432,
|
||||||
|
"yi-large": 20.0 / 1000 * RMB,
|
||||||
|
"yi-medium": 2.5 / 1000 * RMB,
|
||||||
|
"yi-vision": 6.0 / 1000 * RMB,
|
||||||
|
"yi-medium-200k": 12.0 / 1000 * RMB,
|
||||||
|
"yi-spark": 1.0 / 1000 * RMB,
|
||||||
|
"yi-large-rag": 25.0 / 1000 * RMB,
|
||||||
|
"yi-large-turbo": 12.0 / 1000 * RMB,
|
||||||
|
"yi-large-preview": 20.0 / 1000 * RMB,
|
||||||
|
"yi-large-rag-preview": 25.0 / 1000 * RMB,
|
||||||
"command": 0.5,
|
"command": 0.5,
|
||||||
"command-nightly": 0.5,
|
"command-nightly": 0.5,
|
||||||
"command-light": 0.5,
|
"command-light": 0.5,
|
||||||
@ -127,6 +143,11 @@ var DefaultModelRatio = map[string]float64{
|
|||||||
"command-r-plus ": 1.5,
|
"command-r-plus ": 1.5,
|
||||||
"deepseek-chat": 0.07,
|
"deepseek-chat": 0.07,
|
||||||
"deepseek-coder": 0.07,
|
"deepseek-coder": 0.07,
|
||||||
|
// Perplexity online 模型对搜索额外收费,有需要应自行调整,此处不计入搜索费用
|
||||||
|
"llama-3-sonar-small-32k-chat": 0.2 / 1000 * USD,
|
||||||
|
"llama-3-sonar-small-32k-online": 0.2 / 1000 * USD,
|
||||||
|
"llama-3-sonar-large-32k-chat": 1 / 1000 * USD,
|
||||||
|
"llama-3-sonar-large-32k-online": 1 / 1000 * USD,
|
||||||
}
|
}
|
||||||
|
|
||||||
var DefaultModelPrice = map[string]float64{
|
var DefaultModelPrice = map[string]float64{
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"one-api/relay"
|
"one-api/relay"
|
||||||
"one-api/relay/channel/ai360"
|
"one-api/relay/channel/ai360"
|
||||||
"one-api/relay/channel/lingyiwanwu"
|
"one-api/relay/channel/lingyiwanwu"
|
||||||
|
"one-api/relay/channel/minimax"
|
||||||
"one-api/relay/channel/moonshot"
|
"one-api/relay/channel/moonshot"
|
||||||
relaycommon "one-api/relay/common"
|
relaycommon "one-api/relay/common"
|
||||||
relayconstant "one-api/relay/constant"
|
relayconstant "one-api/relay/constant"
|
||||||
@ -79,7 +80,7 @@ func init() {
|
|||||||
Id: modelName,
|
Id: modelName,
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: 1626777600,
|
Created: 1626777600,
|
||||||
OwnedBy: "moonshot",
|
OwnedBy: moonshot.ChannelName,
|
||||||
Permission: permission,
|
Permission: permission,
|
||||||
Root: modelName,
|
Root: modelName,
|
||||||
Parent: nil,
|
Parent: nil,
|
||||||
@ -90,7 +91,18 @@ func init() {
|
|||||||
Id: modelName,
|
Id: modelName,
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: 1626777600,
|
Created: 1626777600,
|
||||||
OwnedBy: "lingyiwanwu",
|
OwnedBy: lingyiwanwu.ChannelName,
|
||||||
|
Permission: permission,
|
||||||
|
Root: modelName,
|
||||||
|
Parent: nil,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for _, modelName := range minimax.ModelList {
|
||||||
|
openAIModels = append(openAIModels, dto.OpenAIModels{
|
||||||
|
Id: modelName,
|
||||||
|
Object: "model",
|
||||||
|
Created: 1626777600,
|
||||||
|
OwnedBy: minimax.ChannelName,
|
||||||
Permission: permission,
|
Permission: permission,
|
||||||
Root: modelName,
|
Root: modelName,
|
||||||
Parent: nil,
|
Parent: nil,
|
||||||
|
@ -3,7 +3,7 @@ package lingyiwanwu
|
|||||||
// https://platform.lingyiwanwu.com/docs
|
// https://platform.lingyiwanwu.com/docs
|
||||||
|
|
||||||
var ModelList = []string{
|
var ModelList = []string{
|
||||||
"yi-34b-chat-0205",
|
"yi-large", "yi-medium", "yi-vision", "yi-medium-200k", "yi-spark", "yi-large-rag", "yi-large-turbo", "yi-large-preview", "yi-large-rag-preview",
|
||||||
"yi-34b-chat-200k",
|
|
||||||
"yi-vl-plus",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var ChannelName = "lingyiwanwu"
|
||||||
|
13
relay/channel/minimax/constants.go
Normal file
13
relay/channel/minimax/constants.go
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
package minimax
|
||||||
|
|
||||||
|
// https://www.minimaxi.com/document/guides/chat-model/V2?id=65e0736ab2845de20908e2dd
|
||||||
|
|
||||||
|
var ModelList = []string{
|
||||||
|
"abab6.5-chat",
|
||||||
|
"abab6.5s-chat",
|
||||||
|
"abab6-chat",
|
||||||
|
"abab5.5-chat",
|
||||||
|
"abab5.5s-chat",
|
||||||
|
}
|
||||||
|
|
||||||
|
var ChannelName = "minimax"
|
10
relay/channel/minimax/relay-minimax.go
Normal file
10
relay/channel/minimax/relay-minimax.go
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
package minimax
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
relaycommon "one-api/relay/common"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
||||||
|
return fmt.Sprintf("%s/v1/text/chatcompletion_v2", info.BaseUrl), nil
|
||||||
|
}
|
@ -5,3 +5,5 @@ var ModelList = []string{
|
|||||||
"moonshot-v1-32k",
|
"moonshot-v1-32k",
|
||||||
"moonshot-v1-128k",
|
"moonshot-v1-128k",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var ChannelName = "moonshot"
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"one-api/relay/channel"
|
"one-api/relay/channel"
|
||||||
"one-api/relay/channel/ai360"
|
"one-api/relay/channel/ai360"
|
||||||
"one-api/relay/channel/lingyiwanwu"
|
"one-api/relay/channel/lingyiwanwu"
|
||||||
|
"one-api/relay/channel/minimax"
|
||||||
"one-api/relay/channel/moonshot"
|
"one-api/relay/channel/moonshot"
|
||||||
relaycommon "one-api/relay/common"
|
relaycommon "one-api/relay/common"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
@ -26,7 +27,8 @@ func (a *Adaptor) Init(info *relaycommon.RelayInfo, request dto.GeneralOpenAIReq
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
||||||
if info.ChannelType == common.ChannelTypeAzure {
|
switch info.ChannelType {
|
||||||
|
case common.ChannelTypeAzure:
|
||||||
// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
|
// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
|
||||||
requestURL := strings.Split(info.RequestURLPath, "?")[0]
|
requestURL := strings.Split(info.RequestURLPath, "?")[0]
|
||||||
requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, info.ApiVersion)
|
requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, info.ApiVersion)
|
||||||
@ -37,8 +39,15 @@ func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
|||||||
|
|
||||||
requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task)
|
requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task)
|
||||||
return relaycommon.GetFullRequestURL(info.BaseUrl, requestURL, info.ChannelType), nil
|
return relaycommon.GetFullRequestURL(info.BaseUrl, requestURL, info.ChannelType), nil
|
||||||
|
case common.ChannelTypeMiniMax:
|
||||||
|
return minimax.GetRequestURL(info)
|
||||||
|
//case common.ChannelTypeCustom:
|
||||||
|
// url := info.BaseUrl
|
||||||
|
// url = strings.Replace(url, "{model}", info.UpstreamModelName, -1)
|
||||||
|
// return url, nil
|
||||||
|
default:
|
||||||
|
return relaycommon.GetFullRequestURL(info.BaseUrl, info.RequestURLPath, info.ChannelType), nil
|
||||||
}
|
}
|
||||||
return relaycommon.GetFullRequestURL(info.BaseUrl, info.RequestURLPath, info.ChannelType), nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
|
||||||
@ -90,11 +99,24 @@ func (a *Adaptor) GetModelList() []string {
|
|||||||
return moonshot.ModelList
|
return moonshot.ModelList
|
||||||
case common.ChannelTypeLingYiWanWu:
|
case common.ChannelTypeLingYiWanWu:
|
||||||
return lingyiwanwu.ModelList
|
return lingyiwanwu.ModelList
|
||||||
|
case common.ChannelTypeMiniMax:
|
||||||
|
return minimax.ModelList
|
||||||
default:
|
default:
|
||||||
return ModelList
|
return ModelList
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetChannelName() string {
|
func (a *Adaptor) GetChannelName() string {
|
||||||
return ChannelName
|
switch a.ChannelType {
|
||||||
|
case common.ChannelType360:
|
||||||
|
return ai360.ChannelName
|
||||||
|
case common.ChannelTypeMoonshot:
|
||||||
|
return moonshot.ChannelName
|
||||||
|
case common.ChannelTypeLingYiWanWu:
|
||||||
|
return lingyiwanwu.ChannelName
|
||||||
|
case common.ChannelTypeMiniMax:
|
||||||
|
return minimax.ChannelName
|
||||||
|
default:
|
||||||
|
return ChannelName
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package perplexity
|
package perplexity
|
||||||
|
|
||||||
var ModelList = []string{
|
var ModelList = []string{
|
||||||
"sonar-small-chat", "sonar-small-online", "sonar-medium-chat", "sonar-medium-online", "mistral-7b-instruct", "mixtral-8x7b-instruct",
|
"llama-3-sonar-small-32k-chat", "llama-3-sonar-small-32k-online", "llama-3-sonar-large-32k-chat", "llama-3-sonar-large-32k-online", "llama-3-8b-instruct", "llama-3-70b-instruct", "mixtral-8x7b-instruct",
|
||||||
}
|
}
|
||||||
|
|
||||||
var ChannelName = "perplexity"
|
var ChannelName = "perplexity"
|
||||||
|
@ -69,7 +69,11 @@ func getTokenNum(tokenEncoder *tiktoken.Tiktoken, text string) int {
|
|||||||
return len(tokenEncoder.Encode(text, nil, nil))
|
return len(tokenEncoder.Encode(text, nil, nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
func getImageToken(imageUrl *dto.MessageImageUrl) (int, error) {
|
func getImageToken(imageUrl *dto.MessageImageUrl, model string, stream bool) (int, error) {
|
||||||
|
// TODO: 非流模式下不计算图片token数量
|
||||||
|
if model == "glm-4v" {
|
||||||
|
return 1047, nil
|
||||||
|
}
|
||||||
if imageUrl.Detail == "low" {
|
if imageUrl.Detail == "low" {
|
||||||
return 85, nil
|
return 85, nil
|
||||||
}
|
}
|
||||||
@ -125,7 +129,7 @@ func getImageToken(imageUrl *dto.MessageImageUrl) (int, error) {
|
|||||||
|
|
||||||
func CountTokenChatRequest(request dto.GeneralOpenAIRequest, model string, checkSensitive bool) (int, error, bool) {
|
func CountTokenChatRequest(request dto.GeneralOpenAIRequest, model string, checkSensitive bool) (int, error, bool) {
|
||||||
tkm := 0
|
tkm := 0
|
||||||
msgTokens, err, b := CountTokenMessages(request.Messages, model, checkSensitive)
|
msgTokens, err, b := CountTokenMessages(request.Messages, model, request.Stream, checkSensitive)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err, b
|
return 0, err, b
|
||||||
}
|
}
|
||||||
@ -158,7 +162,7 @@ func CountTokenChatRequest(request dto.GeneralOpenAIRequest, model string, check
|
|||||||
return tkm, nil, false
|
return tkm, nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
func CountTokenMessages(messages []dto.Message, model string, checkSensitive bool) (int, error, bool) {
|
func CountTokenMessages(messages []dto.Message, model string, stream bool, checkSensitive bool) (int, error, bool) {
|
||||||
//recover when panic
|
//recover when panic
|
||||||
tokenEncoder := getTokenEncoder(model)
|
tokenEncoder := getTokenEncoder(model)
|
||||||
// Reference:
|
// Reference:
|
||||||
@ -195,19 +199,13 @@ func CountTokenMessages(messages []dto.Message, model string, checkSensitive boo
|
|||||||
tokenNum += getTokenNum(tokenEncoder, *message.Name)
|
tokenNum += getTokenNum(tokenEncoder, *message.Name)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
var err error
|
|
||||||
arrayContent := message.ParseContent()
|
arrayContent := message.ParseContent()
|
||||||
for _, m := range arrayContent {
|
for _, m := range arrayContent {
|
||||||
if m.Type == "image_url" {
|
if m.Type == "image_url" {
|
||||||
var imageTokenNum int
|
imageUrl := m.ImageUrl.(dto.MessageImageUrl)
|
||||||
if model == "glm-4v" {
|
imageTokenNum, err := getImageToken(&imageUrl, model, stream)
|
||||||
imageTokenNum = 1047
|
if err != nil {
|
||||||
} else {
|
return 0, err, false
|
||||||
imageUrl := m.ImageUrl.(dto.MessageImageUrl)
|
|
||||||
imageTokenNum, err = getImageToken(&imageUrl)
|
|
||||||
if err != nil {
|
|
||||||
return 0, err, false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
tokenNum += imageTokenNum
|
tokenNum += imageTokenNum
|
||||||
log.Printf("image token num: %d", imageTokenNum)
|
log.Printf("image token num: %d", imageTokenNum)
|
||||||
|
@ -36,13 +36,6 @@ export const CHANNEL_OPTIONS = [
|
|||||||
color: 'teal',
|
color: 'teal',
|
||||||
label: 'Azure OpenAI',
|
label: 'Azure OpenAI',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
key: 11,
|
|
||||||
text: 'Google PaLM2',
|
|
||||||
value: 11,
|
|
||||||
color: 'orange',
|
|
||||||
label: 'Google PaLM2',
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
key: 24,
|
key: 24,
|
||||||
text: 'Google Gemini',
|
text: 'Google Gemini',
|
||||||
@ -92,10 +85,18 @@ export const CHANNEL_OPTIONS = [
|
|||||||
color: 'purple',
|
color: 'purple',
|
||||||
label: '智谱 GLM-4V',
|
label: '智谱 GLM-4V',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 11,
|
||||||
|
text: 'Google PaLM2',
|
||||||
|
value: 11,
|
||||||
|
color: 'orange',
|
||||||
|
label: 'Google PaLM2',
|
||||||
|
},
|
||||||
{ key: 25, text: 'Moonshot', value: 25, color: 'green', label: 'Moonshot' },
|
{ key: 25, text: 'Moonshot', value: 25, color: 'green', label: 'Moonshot' },
|
||||||
{ key: 19, text: '360 智脑', value: 19, color: 'blue', label: '360 智脑' },
|
{ key: 19, text: '360 智脑', value: 19, color: 'blue', label: '360 智脑' },
|
||||||
{ key: 23, text: '腾讯混元', value: 23, color: 'teal', label: '腾讯混元' },
|
{ key: 23, text: '腾讯混元', value: 23, color: 'teal', label: '腾讯混元' },
|
||||||
{ key: 31, text: '零一万物', value: 31, color: 'green', label: '零一万物' },
|
{ key: 31, text: '零一万物', value: 31, color: 'green', label: '零一万物' },
|
||||||
|
{ key: 35, text: 'MiniMax', value: 35, color: 'green', label: 'MiniMax' },
|
||||||
{ key: 8, text: '自定义渠道', value: 8, color: 'pink', label: '自定义渠道' },
|
{ key: 8, text: '自定义渠道', value: 8, color: 'pink', label: '自定义渠道' },
|
||||||
{
|
{
|
||||||
key: 22,
|
key: 22,
|
||||||
|
@ -135,7 +135,7 @@ export default function SettingsMagnification(props) {
|
|||||||
<Row gutter={16}>
|
<Row gutter={16}>
|
||||||
<Col span={16}>
|
<Col span={16}>
|
||||||
<Form.TextArea
|
<Form.TextArea
|
||||||
label={'模型补全倍率'}
|
label={'模型补全倍率(仅对自定义模型有效)'}
|
||||||
extraText={'仅对自定义模型有效'}
|
extraText={'仅对自定义模型有效'}
|
||||||
placeholder={'为一个 JSON 文本,键为模型名称,值为倍率'}
|
placeholder={'为一个 JSON 文本,键为模型名称,值为倍率'}
|
||||||
field={'CompletionRatio'}
|
field={'CompletionRatio'}
|
||||||
|
Loading…
Reference in New Issue
Block a user