mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-02 06:43:41 +08:00
Compare commits
6 Commits
v0.6.2-alp
...
v0.6.2-alp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6ebc99460e | ||
|
|
27ad8bfb98 | ||
|
|
8388aa537f | ||
|
|
2346bf70af | ||
|
|
f05b403ca5 | ||
|
|
b33616df44 |
@@ -78,6 +78,7 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
|
||||
+ [x] [百川大模型](https://platform.baichuan-ai.com)
|
||||
+ [ ] [字节云雀大模型](https://www.volcengine.com/product/ark) (WIP)
|
||||
+ [x] [MINIMAX](https://api.minimax.chat/)
|
||||
+ [x] [Groq](https://wow.groq.com/)
|
||||
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
||||
3. 支持通过**负载均衡**的方式访问多个渠道。
|
||||
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
||||
|
||||
29
common/blacklist/main.go
Normal file
29
common/blacklist/main.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package blacklist
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
)
|
||||
|
||||
var blackList sync.Map
|
||||
|
||||
func init() {
|
||||
blackList = sync.Map{}
|
||||
}
|
||||
|
||||
func userId2Key(id int) string {
|
||||
return fmt.Sprintf("userid_%d", id)
|
||||
}
|
||||
|
||||
func BanUser(id int) {
|
||||
blackList.Store(userId2Key(id), true)
|
||||
}
|
||||
|
||||
func UnbanUser(id int) {
|
||||
blackList.Delete(userId2Key(id))
|
||||
}
|
||||
|
||||
func IsUserBanned(id int) bool {
|
||||
_, ok := blackList.Load(userId2Key(id))
|
||||
return ok
|
||||
}
|
||||
@@ -52,6 +52,7 @@ var EmailDomainWhitelist = []string{
|
||||
}
|
||||
|
||||
var DebugEnabled = os.Getenv("DEBUG") == "true"
|
||||
var DebugSQLEnabled = os.Getenv("DEBUG_SQL") == "true"
|
||||
var MemoryCacheEnabled = os.Getenv("MEMORY_CACHE_ENABLED") == "true"
|
||||
|
||||
var LogConsumeEnabled = true
|
||||
|
||||
@@ -15,6 +15,7 @@ const (
|
||||
const (
|
||||
UserStatusEnabled = 1 // don't use 0, 0 is the default value!
|
||||
UserStatusDisabled = 2 // also don't use 0
|
||||
UserStatusDeleted = 3
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -67,6 +68,7 @@ const (
|
||||
ChannelTypeBaichuan
|
||||
ChannelTypeMinimax
|
||||
ChannelTypeMistral
|
||||
ChannelTypeGroq
|
||||
|
||||
ChannelTypeDummy
|
||||
)
|
||||
@@ -101,6 +103,7 @@ var ChannelBaseURLs = []string{
|
||||
"https://api.baichuan-ai.com", // 26
|
||||
"https://api.minimax.chat", // 27
|
||||
"https://api.mistral.ai", // 28
|
||||
"https://api.groq.com/openai", // 29
|
||||
}
|
||||
|
||||
const (
|
||||
|
||||
@@ -125,6 +125,11 @@ var ModelRatio = map[string]float64{
|
||||
"mistral-medium-latest": 2.7 / 1000 * USD,
|
||||
"mistral-large-latest": 8.0 / 1000 * USD,
|
||||
"mistral-embed": 0.1 / 1000 * USD,
|
||||
// https://wow.groq.com/
|
||||
"llama2-70b-4096": 0.7 / 1000 * USD,
|
||||
"llama2-7b-2048": 0.1 / 1000 * USD,
|
||||
"mixtral-8x7b-32768": 0.27 / 1000 * USD,
|
||||
"gemma-7b-it": 0.1 / 1000 * USD,
|
||||
}
|
||||
|
||||
var CompletionRatio = map[string]float64{}
|
||||
@@ -209,7 +214,7 @@ func GetCompletionRatio(name string) float64 {
|
||||
return 2
|
||||
}
|
||||
}
|
||||
return 1.333333
|
||||
return 4.0 / 3.0
|
||||
}
|
||||
if strings.HasPrefix(name, "gpt-4") {
|
||||
if strings.HasSuffix(name, "preview") {
|
||||
@@ -226,5 +231,9 @@ func GetCompletionRatio(name string) float64 {
|
||||
if strings.HasPrefix(name, "mistral-") {
|
||||
return 3
|
||||
}
|
||||
switch name {
|
||||
case "llama2-70b-4096":
|
||||
return 0.8 / 0.7
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -4,11 +4,7 @@ import (
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/relay/channel/ai360"
|
||||
"github.com/songquanpeng/one-api/relay/channel/baichuan"
|
||||
"github.com/songquanpeng/one-api/relay/channel/minimax"
|
||||
"github.com/songquanpeng/one-api/relay/channel/mistral"
|
||||
"github.com/songquanpeng/one-api/relay/channel/moonshot"
|
||||
"github.com/songquanpeng/one-api/relay/channel/openai"
|
||||
"github.com/songquanpeng/one-api/relay/constant"
|
||||
"github.com/songquanpeng/one-api/relay/helper"
|
||||
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
||||
@@ -83,60 +79,22 @@ func init() {
|
||||
})
|
||||
}
|
||||
}
|
||||
for _, modelName := range ai360.ModelList {
|
||||
openAIModels = append(openAIModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
OwnedBy: "360",
|
||||
Permission: permission,
|
||||
Root: modelName,
|
||||
Parent: nil,
|
||||
})
|
||||
}
|
||||
for _, modelName := range moonshot.ModelList {
|
||||
openAIModels = append(openAIModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
OwnedBy: "moonshot",
|
||||
Permission: permission,
|
||||
Root: modelName,
|
||||
Parent: nil,
|
||||
})
|
||||
}
|
||||
for _, modelName := range baichuan.ModelList {
|
||||
openAIModels = append(openAIModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
OwnedBy: "baichuan",
|
||||
Permission: permission,
|
||||
Root: modelName,
|
||||
Parent: nil,
|
||||
})
|
||||
}
|
||||
for _, modelName := range minimax.ModelList {
|
||||
openAIModels = append(openAIModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
OwnedBy: "minimax",
|
||||
Permission: permission,
|
||||
Root: modelName,
|
||||
Parent: nil,
|
||||
})
|
||||
}
|
||||
for _, modelName := range mistral.ModelList {
|
||||
openAIModels = append(openAIModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
OwnedBy: "mistralai",
|
||||
Permission: permission,
|
||||
Root: modelName,
|
||||
Parent: nil,
|
||||
})
|
||||
for _, channelType := range openai.CompatibleChannels {
|
||||
if channelType == common.ChannelTypeAzure {
|
||||
continue
|
||||
}
|
||||
channelName, channelModelList := openai.GetCompatibleChannelMeta(channelType)
|
||||
for _, modelName := range channelModelList {
|
||||
openAIModels = append(openAIModels, OpenAIModels{
|
||||
Id: modelName,
|
||||
Object: "model",
|
||||
Created: 1626777600,
|
||||
OwnedBy: channelName,
|
||||
Permission: permission,
|
||||
Root: modelName,
|
||||
Parent: nil,
|
||||
})
|
||||
}
|
||||
}
|
||||
openAIModelsMap = make(map[string]OpenAIModels)
|
||||
for _, model := range openAIModels {
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"github.com/gin-contrib/sessions"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/blacklist"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
"net/http"
|
||||
"strings"
|
||||
@@ -42,11 +43,14 @@ func authHelper(c *gin.Context, minRole int) {
|
||||
return
|
||||
}
|
||||
}
|
||||
if status.(int) == common.UserStatusDisabled {
|
||||
if status.(int) == common.UserStatusDisabled || blacklist.IsUserBanned(id.(int)) {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"success": false,
|
||||
"message": "用户已被封禁",
|
||||
})
|
||||
session := sessions.Default(c)
|
||||
session.Clear()
|
||||
_ = session.Save()
|
||||
c.Abort()
|
||||
return
|
||||
}
|
||||
@@ -99,7 +103,7 @@ func TokenAuth() func(c *gin.Context) {
|
||||
abortWithMessage(c, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
if !userEnabled {
|
||||
if !userEnabled || blacklist.IsUserBanned(token.UserId) {
|
||||
abortWithMessage(c, http.StatusForbidden, "用户已被封禁")
|
||||
return
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ func chooseDB() (*gorm.DB, error) {
|
||||
func InitDB() (err error) {
|
||||
db, err := chooseDB()
|
||||
if err == nil {
|
||||
if config.DebugEnabled {
|
||||
if config.DebugSQLEnabled {
|
||||
db = db.Debug()
|
||||
}
|
||||
DB = db
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/blacklist"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
@@ -40,7 +41,7 @@ func GetMaxUserId() int {
|
||||
}
|
||||
|
||||
func GetAllUsers(startIdx int, num int) (users []*User, err error) {
|
||||
err = DB.Order("id desc").Limit(num).Offset(startIdx).Omit("password").Find(&users).Error
|
||||
err = DB.Order("id desc").Limit(num).Offset(startIdx).Omit("password").Where("status != ?", common.UserStatusDeleted).Find(&users).Error
|
||||
return users, err
|
||||
}
|
||||
|
||||
@@ -123,6 +124,11 @@ func (user *User) Update(updatePassword bool) error {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if user.Status == common.UserStatusDisabled {
|
||||
blacklist.BanUser(user.Id)
|
||||
} else if user.Status == common.UserStatusEnabled {
|
||||
blacklist.UnbanUser(user.Id)
|
||||
}
|
||||
err = DB.Model(user).Updates(user).Error
|
||||
return err
|
||||
}
|
||||
@@ -131,7 +137,10 @@ func (user *User) Delete() error {
|
||||
if user.Id == 0 {
|
||||
return errors.New("id 为空!")
|
||||
}
|
||||
err := DB.Delete(user).Error
|
||||
blacklist.BanUser(user.Id)
|
||||
user.Username = fmt.Sprintf("deleted_%s", helper.GetUUID())
|
||||
user.Status = common.UserStatusDeleted
|
||||
err := DB.Model(user).Updates(user).Error
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ package baidu
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/relay/channel"
|
||||
"github.com/songquanpeng/one-api/relay/constant"
|
||||
@@ -9,6 +10,7 @@ import (
|
||||
"github.com/songquanpeng/one-api/relay/util"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Adaptor struct {
|
||||
@@ -20,25 +22,33 @@ func (a *Adaptor) Init(meta *util.RelayMeta) {
|
||||
|
||||
func (a *Adaptor) GetRequestURL(meta *util.RelayMeta) (string, error) {
|
||||
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/clntwmv7t
|
||||
var fullRequestURL string
|
||||
switch meta.ActualModelName {
|
||||
case "ERNIE-Bot-4":
|
||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro"
|
||||
case "ERNIE-Bot-8K":
|
||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_bot_8k"
|
||||
case "ERNIE-Bot":
|
||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions"
|
||||
case "ERNIE-Speed":
|
||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_speed"
|
||||
case "ERNIE-Bot-turbo":
|
||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant"
|
||||
case "BLOOMZ-7B":
|
||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1"
|
||||
case "Embedding-V1":
|
||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/embedding-v1"
|
||||
default:
|
||||
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/" + meta.ActualModelName
|
||||
suffix := "chat/"
|
||||
if strings.HasPrefix("Embedding", meta.ActualModelName) {
|
||||
suffix = "embeddings/"
|
||||
}
|
||||
switch meta.ActualModelName {
|
||||
case "ERNIE-4.0":
|
||||
suffix += "completions_pro"
|
||||
case "ERNIE-Bot-4":
|
||||
suffix += "completions_pro"
|
||||
case "ERNIE-3.5-8K":
|
||||
suffix += "completions"
|
||||
case "ERNIE-Bot-8K":
|
||||
suffix += "ernie_bot_8k"
|
||||
case "ERNIE-Bot":
|
||||
suffix += "completions"
|
||||
case "ERNIE-Speed":
|
||||
suffix += "ernie_speed"
|
||||
case "ERNIE-Bot-turbo":
|
||||
suffix += "eb-instant"
|
||||
case "BLOOMZ-7B":
|
||||
suffix += "bloomz_7b1"
|
||||
case "Embedding-V1":
|
||||
suffix += "embedding-v1"
|
||||
default:
|
||||
suffix += meta.ActualModelName
|
||||
}
|
||||
fullRequestURL := fmt.Sprintf("%s/rpc/2.0/ai_custom/v1/wenxinworkshop/%s", meta.BaseURL, suffix)
|
||||
var accessToken string
|
||||
var err error
|
||||
if accessToken, err = GetAccessToken(meta.APIKey); err != nil {
|
||||
|
||||
10
relay/channel/groq/constants.go
Normal file
10
relay/channel/groq/constants.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package groq
|
||||
|
||||
// https://console.groq.com/docs/models
|
||||
|
||||
var ModelList = []string{
|
||||
"gemma-7b-it",
|
||||
"llama2-7b-2048",
|
||||
"llama2-70b-4096",
|
||||
"mixtral-8x7b-32768",
|
||||
}
|
||||
@@ -6,11 +6,7 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/relay/channel"
|
||||
"github.com/songquanpeng/one-api/relay/channel/ai360"
|
||||
"github.com/songquanpeng/one-api/relay/channel/baichuan"
|
||||
"github.com/songquanpeng/one-api/relay/channel/minimax"
|
||||
"github.com/songquanpeng/one-api/relay/channel/mistral"
|
||||
"github.com/songquanpeng/one-api/relay/channel/moonshot"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"github.com/songquanpeng/one-api/relay/util"
|
||||
"io"
|
||||
@@ -86,37 +82,11 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *util.Rel
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetModelList() []string {
|
||||
switch a.ChannelType {
|
||||
case common.ChannelType360:
|
||||
return ai360.ModelList
|
||||
case common.ChannelTypeMoonshot:
|
||||
return moonshot.ModelList
|
||||
case common.ChannelTypeBaichuan:
|
||||
return baichuan.ModelList
|
||||
case common.ChannelTypeMinimax:
|
||||
return minimax.ModelList
|
||||
case common.ChannelTypeMistral:
|
||||
return mistral.ModelList
|
||||
default:
|
||||
return ModelList
|
||||
}
|
||||
_, modelList := GetCompatibleChannelMeta(a.ChannelType)
|
||||
return modelList
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetChannelName() string {
|
||||
switch a.ChannelType {
|
||||
case common.ChannelTypeAzure:
|
||||
return "azure"
|
||||
case common.ChannelType360:
|
||||
return "360"
|
||||
case common.ChannelTypeMoonshot:
|
||||
return "moonshot"
|
||||
case common.ChannelTypeBaichuan:
|
||||
return "baichuan"
|
||||
case common.ChannelTypeMinimax:
|
||||
return "minimax"
|
||||
case common.ChannelTypeMistral:
|
||||
return "mistralai"
|
||||
default:
|
||||
return "openai"
|
||||
}
|
||||
channelName, _ := GetCompatibleChannelMeta(a.ChannelType)
|
||||
return channelName
|
||||
}
|
||||
|
||||
42
relay/channel/openai/compatible.go
Normal file
42
relay/channel/openai/compatible.go
Normal file
@@ -0,0 +1,42 @@
|
||||
package openai
|
||||
|
||||
import (
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/relay/channel/ai360"
|
||||
"github.com/songquanpeng/one-api/relay/channel/baichuan"
|
||||
"github.com/songquanpeng/one-api/relay/channel/groq"
|
||||
"github.com/songquanpeng/one-api/relay/channel/minimax"
|
||||
"github.com/songquanpeng/one-api/relay/channel/mistral"
|
||||
"github.com/songquanpeng/one-api/relay/channel/moonshot"
|
||||
)
|
||||
|
||||
var CompatibleChannels = []int{
|
||||
common.ChannelTypeAzure,
|
||||
common.ChannelType360,
|
||||
common.ChannelTypeMoonshot,
|
||||
common.ChannelTypeBaichuan,
|
||||
common.ChannelTypeMinimax,
|
||||
common.ChannelTypeMistral,
|
||||
common.ChannelTypeGroq,
|
||||
}
|
||||
|
||||
func GetCompatibleChannelMeta(channelType int) (string, []string) {
|
||||
switch channelType {
|
||||
case common.ChannelTypeAzure:
|
||||
return "azure", ModelList
|
||||
case common.ChannelType360:
|
||||
return "360", ai360.ModelList
|
||||
case common.ChannelTypeMoonshot:
|
||||
return "moonshot", moonshot.ModelList
|
||||
case common.ChannelTypeBaichuan:
|
||||
return "baichuan", baichuan.ModelList
|
||||
case common.ChannelTypeMinimax:
|
||||
return "minimax", minimax.ModelList
|
||||
case common.ChannelTypeMistral:
|
||||
return "mistralai", mistral.ModelList
|
||||
case common.ChannelTypeGroq:
|
||||
return "groq", groq.ModelList
|
||||
default:
|
||||
return "openai", ModelList
|
||||
}
|
||||
}
|
||||
@@ -28,17 +28,6 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
|
||||
messages := make([]Message, 0, len(request.Messages))
|
||||
for i := 0; i < len(request.Messages); i++ {
|
||||
message := request.Messages[i]
|
||||
if message.Role == "system" {
|
||||
messages = append(messages, Message{
|
||||
Role: "user",
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
messages = append(messages, Message{
|
||||
Role: "assistant",
|
||||
Content: "Okay",
|
||||
})
|
||||
continue
|
||||
}
|
||||
messages = append(messages, Message{
|
||||
Content: message.StringContent(),
|
||||
Role: message.Role,
|
||||
|
||||
@@ -27,21 +27,10 @@ import (
|
||||
func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string, domain string) *ChatRequest {
|
||||
messages := make([]Message, 0, len(request.Messages))
|
||||
for _, message := range request.Messages {
|
||||
if message.Role == "system" {
|
||||
messages = append(messages, Message{
|
||||
Role: "user",
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
messages = append(messages, Message{
|
||||
Role: "assistant",
|
||||
Content: "Okay",
|
||||
})
|
||||
} else {
|
||||
messages = append(messages, Message{
|
||||
Role: message.Role,
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
}
|
||||
messages = append(messages, Message{
|
||||
Role: message.Role,
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
}
|
||||
xunfeiRequest := ChatRequest{}
|
||||
xunfeiRequest.Header.AppId = xunfeiAppId
|
||||
|
||||
@@ -76,21 +76,10 @@ func GetToken(apikey string) string {
|
||||
func ConvertRequest(request model.GeneralOpenAIRequest) *Request {
|
||||
messages := make([]Message, 0, len(request.Messages))
|
||||
for _, message := range request.Messages {
|
||||
if message.Role == "system" {
|
||||
messages = append(messages, Message{
|
||||
Role: "system",
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
messages = append(messages, Message{
|
||||
Role: "user",
|
||||
Content: "Okay",
|
||||
})
|
||||
} else {
|
||||
messages = append(messages, Message{
|
||||
Role: message.Role,
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
}
|
||||
messages = append(messages, Message{
|
||||
Role: message.Role,
|
||||
Content: message.StringContent(),
|
||||
})
|
||||
}
|
||||
return &Request{
|
||||
Prompt: messages,
|
||||
|
||||
@@ -83,11 +83,12 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
|
||||
logger.Errorf(ctx, "DoRequest failed: %s", err.Error())
|
||||
return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
|
||||
}
|
||||
meta.IsStream = meta.IsStream || strings.HasPrefix(resp.Header.Get("Content-Type"), "text/event-stream")
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
errorHappened := (resp.StatusCode != http.StatusOK) || (meta.IsStream && resp.Header.Get("Content-Type") == "application/json")
|
||||
if errorHappened {
|
||||
util.ReturnPreConsumedQuota(ctx, preConsumedQuota, meta.TokenId)
|
||||
return util.RelayErrorHandler(resp)
|
||||
}
|
||||
meta.IsStream = meta.IsStream || strings.HasPrefix(resp.Header.Get("Content-Type"), "text/event-stream")
|
||||
|
||||
// do response
|
||||
usage, respErr := adaptor.DoResponse(c, resp, meta)
|
||||
|
||||
@@ -15,7 +15,7 @@ export const CHANNEL_OPTIONS = {
|
||||
key: 3,
|
||||
text: 'Azure OpenAI',
|
||||
value: 3,
|
||||
color: 'orange'
|
||||
color: 'secondary'
|
||||
},
|
||||
11: {
|
||||
key: 11,
|
||||
@@ -89,6 +89,12 @@ export const CHANNEL_OPTIONS = {
|
||||
value: 27,
|
||||
color: 'default'
|
||||
},
|
||||
29: {
|
||||
key: 29,
|
||||
text: 'Groq',
|
||||
value: 29,
|
||||
color: 'default'
|
||||
},
|
||||
8: {
|
||||
key: 8,
|
||||
text: '自定义渠道',
|
||||
|
||||
@@ -163,6 +163,9 @@ const typeConfig = {
|
||||
},
|
||||
modelGroup: "minimax",
|
||||
},
|
||||
29: {
|
||||
modelGroup: "groq",
|
||||
},
|
||||
};
|
||||
|
||||
export { defaultConfig, typeConfig };
|
||||
|
||||
@@ -14,6 +14,7 @@ export const CHANNEL_OPTIONS = [
|
||||
{ key: 23, text: '腾讯混元', value: 23, color: 'teal' },
|
||||
{ key: 26, text: '百川大模型', value: 26, color: 'orange' },
|
||||
{ key: 27, text: 'MiniMax', value: 27, color: 'red' },
|
||||
{ key: 29, text: 'Groq', value: 29, color: 'orange' },
|
||||
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
||||
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
||||
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { Button, Form, Header, Input, Message, Segment } from 'semantic-ui-react';
|
||||
import { useNavigate, useParams } from 'react-router-dom';
|
||||
import { API, getChannelModels, showError, showInfo, showSuccess, verifyJSON } from '../../helpers';
|
||||
import { API, copy, getChannelModels, showError, showInfo, showSuccess, verifyJSON } from '../../helpers';
|
||||
import { CHANNEL_OPTIONS } from '../../constants';
|
||||
|
||||
const MODEL_MAPPING_EXAMPLE = {
|
||||
@@ -214,6 +214,7 @@ const EditChannel = () => {
|
||||
label='类型'
|
||||
name='type'
|
||||
required
|
||||
search
|
||||
options={CHANNEL_OPTIONS}
|
||||
value={inputs.type}
|
||||
onChange={handleInputChange}
|
||||
@@ -342,6 +343,8 @@ const EditChannel = () => {
|
||||
required
|
||||
fluid
|
||||
multiple
|
||||
search
|
||||
onLabelClick={(e, { value }) => {copy(value).then()}}
|
||||
selection
|
||||
onChange={handleInputChange}
|
||||
value={inputs.models}
|
||||
|
||||
Reference in New Issue
Block a user