Merge commit '3d149fedf45472eff92910324974c762fc37dad6'

This commit is contained in:
Laisky.Cai 2024-04-21 15:05:13 +00:00
commit 7047d9605e
45 changed files with 649 additions and 223 deletions

View File

@ -1,12 +0,0 @@
package config
const (
KeyPrefix = "cfg_"
KeyAPIVersion = KeyPrefix + "api_version"
KeyLibraryID = KeyPrefix + "library_id"
KeyPlugin = KeyPrefix + "plugin"
KeySK = KeyPrefix + "sk"
KeyAK = KeyPrefix + "ak"
KeyRegion = KeyPrefix + "region"
)

View File

@ -4,12 +4,3 @@ import "time"
var StartTime = time.Now().Unix() // unit: second
var Version = "v0.0.0" // this hard coding will be replaced automatically when building, no need to manually change
var (
// CtxKeyChannel is the key to store the channel in the context
CtxKeyChannel string = "channel_docu"
CtxKeyRequestModel string = "request_model"
CtxKeyRawRequest string = "raw_request"
CtxKeyConvertedRequest string = "converted_request"
CtxKeyOriginModel string = "origin_model"
)

13
common/ctxkey/config.go Normal file
View File

@ -0,0 +1,13 @@
package ctxkey
const (
ConfigPrefix = "cfg_"
ConfigAPIVersion = ConfigPrefix + "api_version"
ConfigLibraryID = ConfigPrefix + "library_id"
ConfigPlugin = ConfigPrefix + "plugin"
ConfigSK = ConfigPrefix + "sk"
ConfigAK = ConfigPrefix + "ak"
ConfigRegion = ConfigPrefix + "region"
ConfigUserID = ConfigPrefix + "user_id"
)

View File

@ -1,7 +1,23 @@
package ctxkey
var (
const (
Id = "id"
Username = "username"
Role = "role"
Status = "status"
ChannelModel = "channel_model"
ChannelRatio = "channel_ratio"
Channel = "channel"
ChannelId = "channel_id"
SpecificChannelId = "specific_channel_id"
RequestModel = "request_model"
ConvertedRequest = "converted_request"
OriginalModel = "original_model"
Group = "group"
ModelMapping = "model_mapping"
ChannelName = "channel_name"
TokenId = "token_id"
TokenName = "token_name"
BaseURL = "base_url"
AvailableModels = "available_models"
)

View File

@ -3,15 +3,16 @@ package logger
import (
"context"
"fmt"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/helper"
"github.com/gin-gonic/gin"
"io"
"log"
"os"
"path/filepath"
"sync"
"time"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/helper"
"github.com/gin-gonic/gin"
)
const (
@ -21,20 +22,11 @@ const (
loggerError = "ERR"
)
var setupLogLock sync.Mutex
var setupLogWorking bool
var setupLogOnce sync.Once
func SetupLogger() {
setupLogOnce.Do(func() {
if LogDir != "" {
ok := setupLogLock.TryLock()
if !ok {
log.Println("setup log is already working")
return
}
defer func() {
setupLogLock.Unlock()
setupLogWorking = false
}()
logPath := filepath.Join(LogDir, fmt.Sprintf("oneapi-%s.log", time.Now().Format("20060102")))
fd, err := os.OpenFile(logPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
@ -43,6 +35,7 @@ func SetupLogger() {
gin.DefaultWriter = io.MultiWriter(os.Stdout, fd)
gin.DefaultErrorWriter = io.MultiWriter(os.Stderr, fd)
}
})
}
func SysLog(s string) {
@ -100,12 +93,7 @@ func logHelper(ctx context.Context, level string, msg string) {
}
now := time.Now()
_, _ = fmt.Fprintf(writer, "[%s] %v | %s | %s \n", level, now.Format("2006/01/02 - 15:04:05"), id, msg)
if !setupLogWorking {
setupLogWorking = true
go func() {
SetupLogger()
}()
}
}
func FatalLog(v ...any) {

View File

@ -3,14 +3,16 @@ package auth
import (
"encoding/json"
"fmt"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/controller"
"github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
"net/http"
"strconv"
"time"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/controller"
"github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
)
type wechatLoginResponse struct {
@ -136,7 +138,7 @@ func WeChatBind(c *gin.Context) {
})
return
}
id := c.GetInt("id")
id := c.GetInt(ctxkey.Id)
user := model.User{
Id: id,
}

View File

@ -2,6 +2,7 @@ package controller
import (
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/model"
relaymodel "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
@ -14,13 +15,13 @@ func GetSubscription(c *gin.Context) {
var token *model.Token
var expiredTime int64
if config.DisplayTokenStatEnabled {
tokenId := c.GetInt("token_id")
tokenId := c.GetInt(ctxkey.TokenId)
token, err = model.GetTokenById(tokenId)
expiredTime = token.ExpiredTime
remainQuota = token.RemainQuota
usedQuota = token.UsedQuota
} else {
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
remainQuota, err = model.GetUserQuota(userId)
if err != nil {
usedQuota, err = model.GetUserUsedQuota(userId)
@ -64,11 +65,11 @@ func GetUsage(c *gin.Context) {
var err error
var token *model.Token
if config.DisplayTokenStatEnabled {
tokenId := c.GetInt("token_id")
tokenId := c.GetInt(ctxkey.TokenId)
token, err = model.GetTokenById(tokenId)
quota = token.UsedQuota
} else {
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
quota, err = model.GetUserUsedQuota(userId)
}
if err != nil {

View File

@ -15,6 +15,7 @@ import (
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/common/message"
"github.com/Laisky/one-api/middleware"
@ -26,7 +27,6 @@ import (
"github.com/Laisky/one-api/relay/meta"
relaymodel "github.com/Laisky/one-api/relay/model"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
)
@ -55,8 +55,8 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
}
c.Request.Header.Set("Authorization", "Bearer "+channel.Key)
c.Request.Header.Set("Content-Type", "application/json")
c.Set("channel", channel.Type)
c.Set("base_url", channel.GetBaseURL())
c.Set(ctxkey.Channel, channel.Type)
c.Set(ctxkey.BaseURL, channel.GetBaseURL())
middleware.SetupContextForSelectedChannel(c, channel, "")
meta := meta.GetByContext(c)
apiType := channeltype.ToAPIType(channel.Type)
@ -65,8 +65,12 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
return errors.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
}
adaptor.Init(meta)
modelName := adaptor.GetModelList()[0]
if !strings.Contains(channel.Models, modelName) {
var modelName string
modelList := adaptor.GetModelList()
if len(modelList) != 0 {
modelName = modelList[0]
}
if modelName == "" || !strings.Contains(channel.Models, modelName) {
modelNames := strings.Split(channel.Models, ",")
if len(modelNames) > 0 {
modelName = modelNames[0]
@ -83,6 +87,7 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
if err != nil {
return err, nil
}
logger.SysLog(string(jsonData))
requestBody := bytes.NewBuffer(jsonData)
c.Request.Body = io.NopCloser(requestBody)
resp, err := adaptor.DoRequest(c, meta, requestBody)

View File

@ -1,11 +1,13 @@
package controller
import (
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
"net/http"
"strconv"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
)
func GetAllLogs(c *gin.Context) {
@ -41,7 +43,7 @@ func GetUserLogs(c *gin.Context) {
if p < 0 {
p = 0
}
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
logType, _ := strconv.Atoi(c.Query("type"))
startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64)
endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)
@ -83,7 +85,7 @@ func SearchAllLogs(c *gin.Context) {
func SearchUserLogs(c *gin.Context) {
keyword := c.Query("keyword")
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
logs, err := model.SearchUserLogs(userId, keyword)
if err != nil {
c.JSON(http.StatusOK, gin.H{
@ -122,7 +124,7 @@ func GetLogsStat(c *gin.Context) {
}
func GetLogsSelfStat(c *gin.Context) {
username := c.GetString("username")
username := c.GetString(ctxkey.Username)
logType, _ := strconv.Atoi(c.Query("type"))
startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64)
endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)

View File

@ -2,6 +2,10 @@ package controller
import (
"fmt"
"net/http"
"strings"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/model"
relay "github.com/Laisky/one-api/relay"
"github.com/Laisky/one-api/relay/adaptor/openai"
@ -10,8 +14,6 @@ import (
"github.com/Laisky/one-api/relay/meta"
relaymodel "github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
"net/http"
"strings"
)
// https://platform.openai.com/docs/api-reference/models/list
@ -139,10 +141,10 @@ func ListAllModels(c *gin.Context) {
func ListModels(c *gin.Context) {
ctx := c.Request.Context()
var availableModels []string
if c.GetString("available_models") != "" {
availableModels = strings.Split(c.GetString("available_models"), ",")
if c.GetString(ctxkey.AvailableModels) != "" {
availableModels = strings.Split(c.GetString(ctxkey.AvailableModels), ",")
} else {
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
userGroup, _ := model.CacheGetUserGroup(userId)
availableModels, _ = model.CacheGetGroupModels(ctx, userGroup)
}
@ -194,7 +196,7 @@ func RetrieveModel(c *gin.Context) {
func GetUserAvailableModels(c *gin.Context) {
ctx := c.Request.Context()
id := c.GetInt("id")
id := c.GetInt(ctxkey.Id)
userGroup, err := model.CacheGetUserGroup(id)
if err != nil {
c.JSON(http.StatusOK, gin.H{

View File

@ -1,13 +1,15 @@
package controller
import (
"net/http"
"strconv"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/helper"
"github.com/Laisky/one-api/common/random"
"github.com/Laisky/one-api/model"
"github.com/gin-gonic/gin"
"net/http"
"strconv"
)
func GetAllRedemptions(c *gin.Context) {
@ -109,7 +111,7 @@ func AddRedemption(c *gin.Context) {
for i := 0; i < redemption.Count; i++ {
key := random.GetUUID()
cleanRedemption := model.Redemption{
UserId: c.GetInt("id"),
UserId: c.GetInt(ctxkey.Id),
Name: redemption.Name,
Key: key,
CreatedTime: helper.GetTimestamp(),

View File

@ -48,15 +48,15 @@ func Relay(c *gin.Context) {
requestBody, _ := common.GetRequestBody(c)
logger.Debugf(ctx, "request body: %s", string(requestBody))
}
channelId := c.GetInt("channel_id")
channelId := c.GetInt(ctxkey.ChannelId)
bizErr := relayHelper(c, relayMode)
if bizErr == nil {
monitor.Emit(channelId, true)
return
}
lastFailedChannelId := channelId
channelName := c.GetString("channel_name")
group := c.GetString("group")
channelName := c.GetString(ctxkey.ChannelName)
group := c.GetString(ctxkey.Group)
originalModel := c.GetString(ctxkey.OriginalModel)
go processChannelRelayError(ctx, channelId, channelName, bizErr)
requestId := c.GetString(logger.RequestIdKey)
@ -82,9 +82,9 @@ func Relay(c *gin.Context) {
if bizErr == nil {
return
}
channelId := c.GetInt("channel_id")
channelId := c.GetInt(ctxkey.ChannelId)
lastFailedChannelId = channelId
channelName := c.GetString("channel_name")
channelName := c.GetString(ctxkey.ChannelName)
go processChannelRelayError(ctx, channelId, channelName, bizErr)
}
if bizErr != nil {
@ -100,14 +100,14 @@ func Relay(c *gin.Context) {
// shouldRetry returns nil if should retry, otherwise returns error
func shouldRetry(c *gin.Context, statusCode int) error {
if v, ok := c.Get("specific_channel_id"); ok {
if v, ok := c.Get(ctxkey.SpecificChannelId); ok {
return errors.Errorf("specific channel = %v", v)
}
if statusCode == http.StatusBadRequest {
if statusCode == http.StatusTooManyRequests {
return errors.Errorf("status code = %d", statusCode)
}
if statusCode/100 == 2 {
if statusCode/100 == 5 {
return errors.Errorf("status code = %d", statusCode)
}

View File

@ -7,6 +7,7 @@ import (
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/helper"
"github.com/Laisky/one-api/common/network"
"github.com/Laisky/one-api/common/random"
@ -16,7 +17,7 @@ import (
)
func GetAllTokens(c *gin.Context) {
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
p, _ := strconv.Atoi(c.Query("p"))
if p < 0 {
p = 0
@ -41,7 +42,7 @@ func GetAllTokens(c *gin.Context) {
}
func SearchTokens(c *gin.Context) {
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
keyword := c.Query("keyword")
tokens, err := model.SearchUserTokens(userId, keyword)
if err != nil {
@ -61,7 +62,7 @@ func SearchTokens(c *gin.Context) {
func GetToken(c *gin.Context) {
id, err := strconv.Atoi(c.Param("id"))
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
@ -86,8 +87,8 @@ func GetToken(c *gin.Context) {
}
func GetTokenStatus(c *gin.Context) {
tokenId := c.GetInt("token_id")
userId := c.GetInt("id")
tokenId := c.GetInt(ctxkey.TokenId)
userId := c.GetInt(ctxkey.Id)
token, err := model.GetTokenByIds(tokenId, userId)
if err != nil {
c.JSON(http.StatusOK, gin.H{
@ -145,7 +146,7 @@ func AddToken(c *gin.Context) {
}
cleanToken := model.Token{
UserId: c.GetInt("id"),
UserId: c.GetInt(ctxkey.Id),
Name: token.Name,
Key: random.GenerateKey(),
CreatedTime: helper.GetTimestamp(),
@ -174,7 +175,7 @@ func AddToken(c *gin.Context) {
func DeleteToken(c *gin.Context) {
id, _ := strconv.Atoi(c.Param("id"))
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
err := model.DeleteTokenById(id, userId)
if err != nil {
c.JSON(http.StatusOK, gin.H{
@ -198,7 +199,7 @@ type updateTokenDto struct {
}
func UpdateToken(c *gin.Context) {
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
statusOnly := c.Query("status_only")
tokenPatch := new(updateTokenDto)
err := c.ShouldBindJSON(tokenPatch)

View File

@ -3,14 +3,15 @@ package controller
import (
"encoding/json"
"fmt"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/random"
"github.com/Laisky/one-api/model"
"net/http"
"strconv"
"time"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/random"
"github.com/Laisky/one-api/model"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
)
@ -244,7 +245,7 @@ func GetUser(c *gin.Context) {
})
return
}
myRole := c.GetInt("role")
myRole := c.GetInt(ctxkey.Role)
if myRole <= user.Role && myRole != model.RoleRootUser {
c.JSON(http.StatusOK, gin.H{
"success": false,
@ -261,7 +262,7 @@ func GetUser(c *gin.Context) {
}
func GetUserDashboard(c *gin.Context) {
id := c.GetInt("id")
id := c.GetInt(ctxkey.Id)
now := time.Now()
startOfDay := now.Truncate(24*time.Hour).AddDate(0, 0, -6).Unix()
endOfDay := now.Truncate(24 * time.Hour).Add(24*time.Hour - time.Second).Unix()
@ -284,7 +285,7 @@ func GetUserDashboard(c *gin.Context) {
}
func GenerateAccessToken(c *gin.Context) {
id := c.GetInt("id")
id := c.GetInt(ctxkey.Id)
user, err := model.GetUserById(id, true)
if err != nil {
c.JSON(http.StatusOK, gin.H{
@ -320,7 +321,7 @@ func GenerateAccessToken(c *gin.Context) {
}
func GetAffCode(c *gin.Context) {
id := c.GetInt("id")
id := c.GetInt(ctxkey.Id)
user, err := model.GetUserById(id, true)
if err != nil {
c.JSON(http.StatusOK, gin.H{
@ -358,7 +359,7 @@ func GetSelfByToken(c *gin.Context) {
}
func GetSelf(c *gin.Context) {
id := c.GetInt("id")
id := c.GetInt(ctxkey.Id)
user, err := model.GetUserById(id, false)
if err != nil {
c.JSON(http.StatusOK, gin.H{
@ -403,7 +404,7 @@ func UpdateUser(c *gin.Context) {
})
return
}
myRole := c.GetInt("role")
myRole := c.GetInt(ctxkey.Role)
if myRole <= originUser.Role && myRole != model.RoleRootUser {
c.JSON(http.StatusOK, gin.H{
"success": false,
@ -461,7 +462,7 @@ func UpdateSelf(c *gin.Context) {
}
cleanUser := model.User{
Id: c.GetInt("id"),
Id: c.GetInt(ctxkey.Id),
Username: user.Username,
Password: user.Password,
DisplayName: user.DisplayName,

View File

@ -73,7 +73,7 @@ func main() {
}
if config.MemoryCacheEnabled {
logger.SysLog("memory cache enabled")
logger.SysError(fmt.Sprintf("sync frequency: %d seconds", config.SyncFrequency))
logger.SysLog(fmt.Sprintf("sync frequency: %d seconds", config.SyncFrequency))
model.InitChannelCache()
}
if config.MemoryCacheEnabled {

View File

@ -2,14 +2,16 @@ package middleware
import (
"fmt"
"net/http"
"strings"
"github.com/Laisky/one-api/common/blacklist"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/common/network"
"github.com/Laisky/one-api/model"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
"net/http"
"strings"
)
func authHelper(c *gin.Context, minRole int) {
@ -123,20 +125,20 @@ func TokenAuth() func(c *gin.Context) {
abortWithMessage(c, http.StatusBadRequest, err.Error())
return
}
c.Set("request_model", requestModel)
c.Set(ctxkey.RequestModel, requestModel)
if token.Models != nil && *token.Models != "" {
c.Set("available_models", *token.Models)
c.Set(ctxkey.AvailableModels, *token.Models)
if requestModel != "" && !isModelInList(requestModel, *token.Models) {
abortWithMessage(c, http.StatusForbidden, fmt.Sprintf("该令牌无权使用模型:%s", requestModel))
return
}
}
c.Set("id", token.UserId)
c.Set("token_id", token.Id)
c.Set("token_name", token.Name)
c.Set(ctxkey.Id, token.UserId)
c.Set(ctxkey.TokenId, token.Id)
c.Set(ctxkey.TokenName, token.Name)
if len(parts) > 1 {
if model.IsAdmin(token.UserId) {
c.Set("specific_channel_id", parts[1])
c.Set(ctxkey.SpecificChannelId, parts[1])
} else {
abortWithMessage(c, http.StatusForbidden, "普通用户不支持指定渠道")
return

View File

@ -6,8 +6,6 @@ import (
"strconv"
"strings"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/model"
@ -22,12 +20,12 @@ type ModelRequest struct {
func Distribute() func(c *gin.Context) {
return func(c *gin.Context) {
userId := c.GetInt("id")
userId := c.GetInt(ctxkey.Id)
userGroup, _ := model.CacheGetUserGroup(userId)
c.Set("group", userGroup)
c.Set(ctxkey.Group, userGroup)
var requestModel string
var channel *model.Channel
channelId, ok := c.Get("specific_channel_id")
channelId, ok := c.Get(ctxkey.SpecificChannelId)
if ok {
id, err := strconv.Atoi(channelId.(string))
if err != nil {
@ -44,7 +42,7 @@ func Distribute() func(c *gin.Context) {
return
}
} else {
requestModel = c.GetString("request_model")
requestModel = c.GetString(ctxkey.RequestModel)
var err error
channel, err = model.CacheGetRandomSatisfiedChannel(userGroup, requestModel, false)
if err != nil {
@ -74,30 +72,31 @@ func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, mode
}
}
logger.Info(c.Request.Context(), fmt.Sprintf("set channel %s ratio to %f", channel.Name, minimalRatio))
c.Set("channel_ratio", minimalRatio)
c.Set(common.CtxKeyChannel, channel)
c.Set("channel", channel.Type)
c.Set("channel_id", channel.Id)
c.Set("channel_name", channel.Name)
c.Set("model_mapping", channel.GetModelMapping())
c.Set(ctxkey.ChannelRatio, minimalRatio)
c.Set(ctxkey.ChannelModel, channel)
c.Set(ctxkey.Channel, channel.Type)
c.Set(ctxkey.ChannelId, channel.Id)
c.Set(ctxkey.ChannelName, channel.Name)
c.Set(ctxkey.ModelMapping, channel.GetModelMapping())
c.Set(ctxkey.OriginalModel, modelName) // for retry
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
c.Set("base_url", channel.GetBaseURL())
c.Set(ctxkey.BaseURL, channel.GetBaseURL())
// this is for backward compatibility
switch channel.Type {
case channeltype.Azure:
c.Set(config.KeyAPIVersion, channel.Other)
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
case channeltype.Xunfei:
c.Set(config.KeyAPIVersion, channel.Other)
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
case channeltype.Gemini:
c.Set(config.KeyAPIVersion, channel.Other)
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
case channeltype.AIProxyLibrary:
c.Set(config.KeyLibraryID, channel.Other)
c.Set(ctxkey.ConfigLibraryID, channel.Other)
case channeltype.Ali:
c.Set(config.KeyPlugin, channel.Other)
c.Set(ctxkey.ConfigPlugin, channel.Other)
}
cfg, _ := channel.LoadConfig()
for k, v := range cfg {
c.Set(config.KeyPrefix+k, v)
c.Set(ctxkey.ConfigPrefix+k, v)
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/Laisky/one-api/relay/adaptor/anthropic"
"github.com/Laisky/one-api/relay/adaptor/aws"
"github.com/Laisky/one-api/relay/adaptor/baidu"
"github.com/Laisky/one-api/relay/adaptor/coze"
"github.com/Laisky/one-api/relay/adaptor/gemini"
"github.com/Laisky/one-api/relay/adaptor/ollama"
"github.com/Laisky/one-api/relay/adaptor/openai"
@ -43,6 +44,8 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
return &zhipu.Adaptor{}
case apitype.Ollama:
return &ollama.Adaptor{}
case apitype.Coze:
return &coze.Adaptor{}
}
return nil

View File

@ -2,14 +2,15 @@ package aiproxy
import (
"fmt"
"io"
"net/http"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/relay/adaptor"
"github.com/Laisky/one-api/relay/meta"
"github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
"io"
"net/http"
)
type Adaptor struct {
@ -34,7 +35,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
return nil, errors.New("request is nil")
}
aiProxyLibraryRequest := ConvertRequest(*request)
aiProxyLibraryRequest.LibraryId = c.GetString(config.KeyLibraryID)
aiProxyLibraryRequest.LibraryId = c.GetString(ctxkey.ConfigLibraryID)
return aiProxyLibraryRequest, nil
}

View File

@ -6,7 +6,7 @@ import (
"io"
"net/http"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/relay/adaptor"
"github.com/Laisky/one-api/relay/meta"
"github.com/Laisky/one-api/relay/model"
@ -48,8 +48,8 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
if meta.Mode == relaymode.ImagesGenerations {
req.Header.Set("X-DashScope-Async", "enable")
}
if c.GetString(config.KeyPlugin) != "" {
req.Header.Set("X-DashScope-Plugin", c.GetString(config.KeyPlugin))
if c.GetString(ctxkey.ConfigPlugin) != "" {
req.Header.Set("X-DashScope-Plugin", c.GetString(ctxkey.ConfigPlugin))
}
return nil
}

View File

@ -9,7 +9,6 @@ import (
"net/http"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/helper"
"github.com/Laisky/one-api/common/logger"
@ -25,9 +24,9 @@ import (
)
func newAwsClient(c *gin.Context) (*bedrockruntime.Client, error) {
ak := c.GetString(config.KeyAK)
sk := c.GetString(config.KeySK)
region := c.GetString(config.KeyRegion)
ak := c.GetString(ctxkey.ConfigAK)
sk := c.GetString(ctxkey.ConfigSK)
region := c.GetString(ctxkey.ConfigRegion)
client := bedrockruntime.New(bedrockruntime.Options{
Region: region,
Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(ak, sk, "")),

View File

@ -1,7 +1,7 @@
package azure
import (
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/gin-gonic/gin"
)
@ -9,7 +9,7 @@ func GetAPIVersion(c *gin.Context) string {
query := c.Request.URL.Query()
apiVersion := query.Get("api-version")
if apiVersion == "" {
apiVersion = c.GetString(config.KeyAPIVersion)
apiVersion = c.GetString(ctxkey.ConfigAPIVersion)
}
return apiVersion
}

View File

@ -0,0 +1,76 @@
package coze
import (
"errors"
"fmt"
"io"
"net/http"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/relay/adaptor"
"github.com/Laisky/one-api/relay/adaptor/openai"
"github.com/Laisky/one-api/relay/meta"
"github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
)
type Adaptor struct {
}
func (a *Adaptor) Init(meta *meta.Meta) {
}
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
return fmt.Sprintf("%s/open_api/v2/chat", meta.BaseURL), nil
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta)
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
return nil
}
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
request.User = c.GetString(ctxkey.ConfigUserID)
return ConvertRequest(*request), nil
}
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
return request, nil
}
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return adaptor.DoRequestHelper(a, c, meta, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
var responseText *string
if meta.IsStream {
err, responseText = StreamHandler(c, resp)
} else {
err, responseText = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
}
if responseText != nil {
usage = openai.ResponseText2Usage(*responseText, meta.ActualModelName, meta.PromptTokens)
} else {
usage = &model.Usage{}
}
usage.PromptTokens = meta.PromptTokens
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
return
}
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return "coze"
}

View File

@ -0,0 +1,5 @@
package contenttype
const (
Text = "text"
)

View File

@ -0,0 +1,7 @@
package event
const (
Message = "message"
Done = "done"
Error = "error"
)

View File

@ -0,0 +1,6 @@
package messagetype
const (
Answer = "answer"
FollowUp = "follow_up"
)

View File

@ -0,0 +1,3 @@
package coze
var ModelList = []string{}

View File

@ -0,0 +1,10 @@
package coze
import "github.com/Laisky/one-api/relay/adaptor/coze/constant/event"
func event2StopReason(e *string) string {
if e == nil || *e == event.Message {
return ""
}
return "stop"
}

215
relay/adaptor/coze/main.go Normal file
View File

@ -0,0 +1,215 @@
package coze
import (
"bufio"
"encoding/json"
"fmt"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/conv"
"github.com/Laisky/one-api/common/helper"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/relay/adaptor/coze/constant/messagetype"
"github.com/Laisky/one-api/relay/adaptor/openai"
"github.com/Laisky/one-api/relay/model"
"github.com/gin-gonic/gin"
"io"
"net/http"
"strings"
)
// https://www.coze.com/open
func stopReasonCoze2OpenAI(reason *string) string {
if reason == nil {
return ""
}
switch *reason {
case "end_turn":
return "stop"
case "stop_sequence":
return "stop"
case "max_tokens":
return "length"
default:
return *reason
}
}
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
cozeRequest := Request{
Stream: textRequest.Stream,
User: textRequest.User,
BotId: strings.TrimPrefix(textRequest.Model, "bot-"),
}
for i, message := range textRequest.Messages {
if i == len(textRequest.Messages)-1 {
cozeRequest.Query = message.StringContent()
continue
}
cozeMessage := Message{
Role: message.Role,
Content: message.StringContent(),
}
cozeRequest.ChatHistory = append(cozeRequest.ChatHistory, cozeMessage)
}
return &cozeRequest
}
func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
var response *Response
var stopReason string
var choice openai.ChatCompletionsStreamResponseChoice
if cozeResponse.Message != nil {
if cozeResponse.Message.Type != messagetype.Answer {
return nil, nil
}
choice.Delta.Content = cozeResponse.Message.Content
}
choice.Delta.Role = "assistant"
finishReason := stopReasonCoze2OpenAI(&stopReason)
if finishReason != "null" {
choice.FinishReason = &finishReason
}
var openaiResponse openai.ChatCompletionsStreamResponse
openaiResponse.Object = "chat.completion.chunk"
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
openaiResponse.Id = cozeResponse.ConversationId
return &openaiResponse, response
}
func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
var responseText string
for _, message := range cozeResponse.Messages {
if message.Type == messagetype.Answer {
responseText = message.Content
break
}
}
choice := openai.TextResponseChoice{
Index: 0,
Message: model.Message{
Role: "assistant",
Content: responseText,
Name: nil,
},
FinishReason: "stop",
}
fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", cozeResponse.ConversationId),
Model: "coze-bot",
Object: "chat.completion",
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
}
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
var responseText string
createdTime := helper.GetTimestamp()
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() {
data := scanner.Text()
if len(data) < 5 {
continue
}
if !strings.HasPrefix(data, "data:") {
continue
}
data = strings.TrimPrefix(data, "data:")
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
var modelName string
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r")
var cozeResponse StreamResponse
err := json.Unmarshal([]byte(data), &cozeResponse)
if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
response, _ := StreamResponseCoze2OpenAI(&cozeResponse)
if response == nil {
return true
}
for _, choice := range response.Choices {
responseText += conv.AsString(choice.Delta.Content)
}
response.Model = modelName
response.Created = createdTime
jsonStr, err := json.Marshal(response)
if err != nil {
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
}
})
_ = resp.Body.Close()
return nil, &responseText
}
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
var cozeResponse Response
err = json.Unmarshal(responseBody, &cozeResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if cozeResponse.Code != 0 {
return &model.ErrorWithStatusCode{
Error: model.Error{
Message: cozeResponse.Msg,
Code: cozeResponse.Code,
},
StatusCode: resp.StatusCode,
}, nil
}
fullTextResponse := ResponseCoze2OpenAI(&cozeResponse)
fullTextResponse.Model = modelName
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
var responseText string
if len(fullTextResponse.Choices) > 0 {
responseText = fullTextResponse.Choices[0].Message.StringContent()
}
return nil, &responseText
}

View File

@ -0,0 +1,38 @@
package coze
type Message struct {
Role string `json:"role"`
Type string `json:"type"`
Content string `json:"content"`
ContentType string `json:"content_type"`
}
type ErrorInformation struct {
Code int `json:"code"`
Msg string `json:"msg"`
}
type Request struct {
ConversationId string `json:"conversation_id,omitempty"`
BotId string `json:"bot_id"`
User string `json:"user"`
Query string `json:"query"`
ChatHistory []Message `json:"chat_history,omitempty"`
Stream bool `json:"stream"`
}
type Response struct {
ConversationId string `json:"conversation_id,omitempty"`
Messages []Message `json:"messages,omitempty"`
Code int `json:"code,omitempty"`
Msg string `json:"msg,omitempty"`
}
type StreamResponse struct {
Event string `json:"event,omitempty"`
Message *Message `json:"message,omitempty"`
IsFinish bool `json:"is_finish,omitempty"`
Index int `json:"index,omitempty"`
ConversationId string `json:"conversation_id,omitempty"`
ErrorInformation *ErrorInformation `json:"error_information,omitempty"`
}

View File

@ -7,4 +7,6 @@ var ModelList = []string{
"llama2-7b-2048",
"llama2-70b-4096",
"mixtral-8x7b-32768",
"llama3-8b-8192",
"llama3-70b-8192",
}

View File

@ -16,6 +16,12 @@ import (
"github.com/gin-gonic/gin"
)
const (
dataPrefix = "data: "
done = "[DONE]"
dataPrefixLength = len(dataPrefix)
)
func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) {
responseText := ""
scanner := bufio.NewScanner(resp.Body)
@ -37,23 +43,30 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
go func() {
for scanner.Scan() {
data := scanner.Text()
if len(data) < 6 { // ignore blank line or wrong format
if len(data) < dataPrefixLength { // ignore blank line or wrong format
continue
}
if data[:6] != "data: " && data[:6] != "[DONE]" {
if data[:dataPrefixLength] != dataPrefix && data[:dataPrefixLength] != done {
continue
}
if strings.HasPrefix(data[dataPrefixLength:], done) {
dataChan <- data
data = data[6:]
if !strings.HasPrefix(data, "[DONE]") {
continue
}
switch relayMode {
case relaymode.ChatCompletions:
var streamResponse ChatCompletionsStreamResponse
err := json.Unmarshal([]byte(data), &streamResponse)
err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error())
dataChan <- data // if error happened, pass the data to client
continue // just ignore the error
}
if len(streamResponse.Choices) == 0 {
// but for empty choice, we should not pass it to client, this is for azure
continue // just ignore empty choice
}
dataChan <- data
for _, choice := range streamResponse.Choices {
responseText += conv.AsString(choice.Delta.Content)
}
@ -61,8 +74,9 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
usage = streamResponse.Usage
}
case relaymode.Completions:
dataChan <- data
var streamResponse CompletionsStreamResponse
err := json.Unmarshal([]byte(data), &streamResponse)
err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error())
continue
@ -72,7 +86,6 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
}
}
}
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)

View File

@ -13,7 +13,7 @@ import (
"time"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/helper"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/common/random"
@ -281,7 +281,7 @@ func getAPIVersion(c *gin.Context, modelName string) string {
return apiVersion
}
apiVersion = c.GetString(config.KeyAPIVersion)
apiVersion = c.GetString(ctxkey.ConfigAPIVersion)
if apiVersion != "" {
return apiVersion
}

View File

@ -13,6 +13,7 @@ const (
Gemini
Ollama
AwsClaude
Coze
Dummy // this one is only for count, do not add any channel after this
)

View File

@ -148,11 +148,13 @@ var ModelRatio = map[string]float64{
"mistral-medium-latest": 2.7 / 1000 * USD,
"mistral-large-latest": 8.0 / 1000 * USD,
"mistral-embed": 0.1 / 1000 * USD,
// https://wow.groq.com/
"llama2-70b-4096": 0.7 / 1000 * USD,
"llama2-7b-2048": 0.1 / 1000 * USD,
// https://wow.groq.com/#:~:text=inquiries%C2%A0here.-,Model,-Current%20Speed
"llama3-70b-8192": 0.59 / 1000 * USD,
"mixtral-8x7b-32768": 0.27 / 1000 * USD,
"llama3-8b-8192": 0.05 / 1000 * USD,
"gemma-7b-it": 0.1 / 1000 * USD,
"llama2-70b-4096": 0.64 / 1000 * USD,
"llama2-7b-2048": 0.1 / 1000 * USD,
// https://platform.lingyiwanwu.com/docs#-计费单元
"yi-34b-chat-0205": 2.5 / 1000 * RMB,
"yi-34b-chat-200k": 12.0 / 1000 * RMB,
@ -262,7 +264,7 @@ func GetCompletionRatio(name string) float64 {
return 4.0 / 3.0
}
if strings.HasPrefix(name, "gpt-4") {
if strings.HasPrefix(name, "gpt-4-turbo") {
if strings.HasPrefix(name, "gpt-4-turbo") || strings.HasSuffix(name, "preview") {
return 3
}
return 2
@ -281,7 +283,11 @@ func GetCompletionRatio(name string) float64 {
}
switch name {
case "llama2-70b-4096":
return 0.8 / 0.7
return 0.8 / 0.64
case "llama3-8b-8192":
return 2
case "llama3-70b-8192":
return 0.79 / 0.59
}
return 1
}

View File

@ -35,6 +35,7 @@ const (
LingYiWanWu
StepFun
AwsClaude
Coze
Dummy
)

View File

@ -27,6 +27,8 @@ func ToAPIType(channelType int) int {
apiType = apitype.Ollama
case AwsClaude:
apiType = apitype.AwsClaude
case Coze:
apiType = apitype.Coze
}
return apiType

View File

@ -35,6 +35,7 @@ var ChannelBaseURLs = []string{
"https://api.lingyiwanwu.com", // 31
"https://api.stepfun.com", // 32
"", // 33
"https://api.coze.com", // 34
}
func init() {

View File

@ -6,9 +6,14 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/model"
"github.com/Laisky/one-api/relay/adaptor/azure"
@ -20,21 +25,18 @@ import (
relaymodel "github.com/Laisky/one-api/relay/model"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
"io"
"net/http"
"strings"
)
func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode {
ctx := c.Request.Context()
audioModel := "whisper-1"
tokenId := c.GetInt("token_id")
channelType := c.GetInt("channel")
channelId := c.GetInt("channel_id")
userId := c.GetInt("id")
// group := c.GetString("group")
tokenName := c.GetString("token_name")
tokenId := c.GetInt(ctxkey.TokenId)
channelType := c.GetInt(ctxkey.Channel)
channelId := c.GetInt(ctxkey.ChannelId)
userId := c.GetInt(ctxkey.Id)
// group := c.GetString(ctxkey.Group)
tokenName := c.GetString(ctxkey.TokenName)
var ttsRequest openai.TextToSpeechRequest
if relayMode == relaymode.AudioSpeech {
@ -53,7 +55,7 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
modelRatio := billingratio.GetModelRatio(audioModel)
// groupRatio := billingratio.GetGroupRatio(group)
groupRatio := c.GetFloat64("channel_ratio") // get minimal ratio from multiple groups
groupRatio := c.GetFloat64(ctxkey.ChannelRatio) // get minimal ratio from multiple groups
ratio := modelRatio * groupRatio
var quota int64
@ -109,7 +111,7 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
}()
// map model name
modelMapping := c.GetString("model_mapping")
modelMapping := c.GetString(ctxkey.ModelMapping)
if modelMapping != "" {
modelMap := make(map[string]string)
err := json.Unmarshal([]byte(modelMapping), &modelMap)
@ -123,8 +125,8 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
baseURL := channeltype.ChannelBaseURLs[channelType]
requestURL := c.Request.URL.String()
if c.GetString("base_url") != "" {
baseURL = c.GetString("base_url")
if c.GetString(ctxkey.BaseURL) != "" {
baseURL = c.GetString(ctxkey.BaseURL)
}
fullRequestURL := openai.GetFullRequestURL(baseURL, requestURL, channelType)

View File

@ -9,6 +9,7 @@ import (
"net/http"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/model"
"github.com/Laisky/one-api/relay"
@ -90,7 +91,7 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
modelRatio := billingratio.GetModelRatio(imageRequest.Model)
// groupRatio := billingratio.GetGroupRatio(meta.Group)
groupRatio := c.GetFloat64("channel_ratio") // pre-selected cheapest channel ratio
groupRatio := c.GetFloat64(ctxkey.ChannelRatio) // pre-selected cheapest channel ratio
ratio := modelRatio * groupRatio
userQuota, err := model.CacheGetUserQuota(ctx, meta.UserId)
@ -122,11 +123,11 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
logger.SysError("error update user quota cache: " + err.Error())
}
if quota >= 0 {
tokenName := c.GetString("token_name")
tokenName := c.GetString(ctxkey.TokenName)
logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio)
model.RecordConsumeLog(ctx, meta.UserId, meta.ChannelId, 0, 0, imageRequest.Model, tokenName, quota, logContent)
model.UpdateUserUsedQuotaAndRequestCount(meta.UserId, quota)
channelId := c.GetInt("channel_id")
channelId := c.GetInt(ctxkey.ChannelId)
model.UpdateChannelUsedQuota(channelId, quota)
}
}(c.Request.Context())

View File

@ -1,12 +1,13 @@
package meta
import (
"github.com/Laisky/one-api/common/config"
"strings"
"github.com/Laisky/one-api/common/ctxkey"
"github.com/Laisky/one-api/relay/adaptor/azure"
"github.com/Laisky/one-api/relay/channeltype"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
"strings"
)
type Meta struct {
@ -34,19 +35,19 @@ type Meta struct {
func GetByContext(c *gin.Context) *Meta {
meta := Meta{
Mode: relaymode.GetByPath(c.Request.URL.Path),
ChannelType: c.GetInt("channel"),
ChannelId: c.GetInt("channel_id"),
TokenId: c.GetInt("token_id"),
TokenName: c.GetString("token_name"),
UserId: c.GetInt("id"),
Group: c.GetString("group"),
ModelMapping: c.GetStringMapString("model_mapping"),
BaseURL: c.GetString("base_url"),
APIVersion: c.GetString(config.KeyAPIVersion),
ChannelType: c.GetInt(ctxkey.Channel),
ChannelId: c.GetInt(ctxkey.ChannelId),
TokenId: c.GetInt(ctxkey.TokenId),
TokenName: c.GetString(ctxkey.TokenName),
UserId: c.GetInt(ctxkey.Id),
Group: c.GetString(ctxkey.Group),
ModelMapping: c.GetStringMapString(ctxkey.ModelMapping),
BaseURL: c.GetString(ctxkey.BaseURL),
APIVersion: c.GetString(ctxkey.ConfigAPIVersion),
APIKey: strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "),
Config: nil,
RequestURLPath: c.Request.URL.String(),
ChannelRatio: c.GetFloat64("channel_ratio"),
ChannelRatio: c.GetFloat64(ctxkey.ChannelRatio),
}
if meta.ChannelType == channeltype.Azure {
meta.APIVersion = azure.GetAPIVersion(c)

View File

@ -33,7 +33,7 @@ function renderType(type) {
}
type2label[0] = { value: 0, text: '未知类型', color: 'grey' };
}
return <Label basic color={type2label[type]?.color}>{type2label[type]?.text}</Label>;
return <Label basic color={type2label[type]?.color}>{type2label[type] ? type2label[type].text : type}</Label>;
}
function renderBalance(type, balance) {

View File

@ -12,6 +12,7 @@ const COPY_OPTIONS = [
];
const OPEN_LINK_OPTIONS = [
{ key: 'next', text: 'ChatGPT Next Web', value: 'next' },
{ key: 'ama', text: 'BotGem', value: 'ama' },
{ key: 'opencat', text: 'OpenCat', value: 'opencat' },
];

View File

@ -19,6 +19,7 @@ export const CHANNEL_OPTIONS = [
{ key: 30, text: 'Ollama', value: 30, color: 'black' },
{ key: 31, text: '零一万物', value: 31, color: 'green' },
{ key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
{ key: 34, text: 'Coze', value: 34, color: 'blue' },
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
{ key: 22, text: '知识库FastGPT', value: 22, color: 'blue' },
{ key: 21, text: '知识库AI Proxy', value: 21, color: 'purple' },

View File

@ -57,7 +57,8 @@ const EditChannel = () => {
const [config, setConfig] = useState({
region: '',
sk: '',
ak: ''
ak: '',
user_id: ''
});
const handleInputChange = (e, { name, value }) => {
setInputs((inputs) => ({ ...inputs, [name]: value }));
@ -156,13 +157,11 @@ const EditChannel = () => {
}, []);
const submit = async () => {
// some provider as AWS need both AK and SK rather than a single key,
// so we need to combine them into a single key to achieve the best compatibility.
if (inputs.ak && inputs.sk) {
console.log(`combine ak ${inputs.ak} and sk ${inputs.sk}`, inputs.ak, inputs.sk);
inputs.key = `${inputs.ak}\n${inputs.sk}`;
if (inputs.key === '') {
if (config.ak !== '' && config.sk !== '' && config.region !== '') {
inputs.key = `${config.ak}|${config.sk}|${config.region}`;
}
}
if (!isEdit && (inputs.name === '' || inputs.key === '')) {
showInfo('请填写渠道名称和渠道密钥!');
return;
@ -356,6 +355,13 @@ const EditChannel = () => {
</Form.Field>
)
}
{
inputs.type === 34 && (
<Message>
对于 Coze 而言模型名称即 Bot ID你可以添加一个前缀 `bot-`例如`bot-123456`
</Message>
)
}
<Form.Field>
<Form.Dropdown
label='模型'
@ -446,6 +452,18 @@ const EditChannel = () => {
</Form.Field>
)
}
{
inputs.type === 34 && (
<Form.Input
label='User ID'
name='user_id'
required
placeholder={'生成该密钥的用户 ID'}
onChange={handleConfigChange}
value={config.user_id}
autoComplete=''
/>)
}
{
inputs.type !== 33 && (batch ? <Form.Field>
<Form.TextArea