mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-10-28 12:23:42 +08:00
Compare commits
9 Commits
v0.6.6-alp
...
v0.6.6-alp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
24f026d18e | ||
|
|
cb33e8aad5 | ||
|
|
779b747e9e | ||
|
|
3d149fedf4 | ||
|
|
83517f687c | ||
|
|
e30ebda0fe | ||
|
|
d87c55f542 | ||
|
|
e5b3e37c46 | ||
|
|
8de489cf06 |
@@ -82,6 +82,8 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
|
||||
+ [x] [Ollama](https://github.com/ollama/ollama)
|
||||
+ [x] [零一万物](https://platform.lingyiwanwu.com/)
|
||||
+ [x] [阶跃星辰](https://platform.stepfun.com/)
|
||||
+ [x] [Coze](https://www.coze.com/)
|
||||
+ [x] [Cohere](https://cohere.com/)
|
||||
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
|
||||
3. 支持通过**负载均衡**的方式访问多个渠道。
|
||||
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
package config
|
||||
|
||||
const (
|
||||
KeyPrefix = "cfg_"
|
||||
|
||||
KeyAPIVersion = KeyPrefix + "api_version"
|
||||
KeyLibraryID = KeyPrefix + "library_id"
|
||||
KeyPlugin = KeyPrefix + "plugin"
|
||||
KeySK = KeyPrefix + "sk"
|
||||
KeyAK = KeyPrefix + "ak"
|
||||
KeyRegion = KeyPrefix + "region"
|
||||
)
|
||||
13
common/ctxkey/config.go
Normal file
13
common/ctxkey/config.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package ctxkey
|
||||
|
||||
const (
|
||||
ConfigPrefix = "cfg_"
|
||||
|
||||
ConfigAPIVersion = ConfigPrefix + "api_version"
|
||||
ConfigLibraryID = ConfigPrefix + "library_id"
|
||||
ConfigPlugin = ConfigPrefix + "plugin"
|
||||
ConfigSK = ConfigPrefix + "sk"
|
||||
ConfigAK = ConfigPrefix + "ak"
|
||||
ConfigRegion = ConfigPrefix + "region"
|
||||
ConfigUserID = ConfigPrefix + "user_id"
|
||||
)
|
||||
@@ -1,7 +1,21 @@
|
||||
package ctxkey
|
||||
|
||||
var (
|
||||
RequestModel = "request_model"
|
||||
ConvertedRequest = "converted_request"
|
||||
OriginalModel = "original_model"
|
||||
const (
|
||||
Id = "id"
|
||||
Username = "username"
|
||||
Role = "role"
|
||||
Status = "status"
|
||||
Channel = "channel"
|
||||
ChannelId = "channel_id"
|
||||
SpecificChannelId = "specific_channel_id"
|
||||
RequestModel = "request_model"
|
||||
ConvertedRequest = "converted_request"
|
||||
OriginalModel = "original_model"
|
||||
Group = "group"
|
||||
ModelMapping = "model_mapping"
|
||||
ChannelName = "channel_name"
|
||||
TokenId = "token_id"
|
||||
TokenName = "token_name"
|
||||
BaseURL = "base_url"
|
||||
AvailableModels = "available_models"
|
||||
)
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/controller"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
"net/http"
|
||||
@@ -136,7 +137,7 @@ func WeChatBind(c *gin.Context) {
|
||||
})
|
||||
return
|
||||
}
|
||||
id := c.GetInt("id")
|
||||
id := c.GetInt(ctxkey.Id)
|
||||
user := model.User{
|
||||
Id: id,
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package controller
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
relaymodel "github.com/songquanpeng/one-api/relay/model"
|
||||
)
|
||||
@@ -14,13 +15,13 @@ func GetSubscription(c *gin.Context) {
|
||||
var token *model.Token
|
||||
var expiredTime int64
|
||||
if config.DisplayTokenStatEnabled {
|
||||
tokenId := c.GetInt("token_id")
|
||||
tokenId := c.GetInt(ctxkey.TokenId)
|
||||
token, err = model.GetTokenById(tokenId)
|
||||
expiredTime = token.ExpiredTime
|
||||
remainQuota = token.RemainQuota
|
||||
usedQuota = token.UsedQuota
|
||||
} else {
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
remainQuota, err = model.GetUserQuota(userId)
|
||||
if err != nil {
|
||||
usedQuota, err = model.GetUserUsedQuota(userId)
|
||||
@@ -64,11 +65,11 @@ func GetUsage(c *gin.Context) {
|
||||
var err error
|
||||
var token *model.Token
|
||||
if config.DisplayTokenStatEnabled {
|
||||
tokenId := c.GetInt("token_id")
|
||||
tokenId := c.GetInt(ctxkey.TokenId)
|
||||
token, err = model.GetTokenById(tokenId)
|
||||
quota = token.UsedQuota
|
||||
} else {
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
quota, err = model.GetUserUsedQuota(userId)
|
||||
}
|
||||
if err != nil {
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/common/message"
|
||||
"github.com/songquanpeng/one-api/middleware"
|
||||
@@ -54,8 +55,8 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
||||
}
|
||||
c.Request.Header.Set("Authorization", "Bearer "+channel.Key)
|
||||
c.Request.Header.Set("Content-Type", "application/json")
|
||||
c.Set("channel", channel.Type)
|
||||
c.Set("base_url", channel.GetBaseURL())
|
||||
c.Set(ctxkey.Channel, channel.Type)
|
||||
c.Set(ctxkey.BaseURL, channel.GetBaseURL())
|
||||
middleware.SetupContextForSelectedChannel(c, channel, "")
|
||||
meta := meta.GetByContext(c)
|
||||
apiType := channeltype.ToAPIType(channel.Type)
|
||||
@@ -64,8 +65,12 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
||||
return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
|
||||
}
|
||||
adaptor.Init(meta)
|
||||
modelName := adaptor.GetModelList()[0]
|
||||
if !strings.Contains(channel.Models, modelName) {
|
||||
var modelName string
|
||||
modelList := adaptor.GetModelList()
|
||||
if len(modelList) != 0 {
|
||||
modelName = modelList[0]
|
||||
}
|
||||
if modelName == "" || !strings.Contains(channel.Models, modelName) {
|
||||
modelNames := strings.Split(channel.Models, ",")
|
||||
if len(modelNames) > 0 {
|
||||
modelName = modelNames[0]
|
||||
@@ -82,6 +87,7 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
|
||||
if err != nil {
|
||||
return err, nil
|
||||
}
|
||||
logger.SysLog(string(jsonData))
|
||||
requestBody := bytes.NewBuffer(jsonData)
|
||||
c.Request.Body = io.NopCloser(requestBody)
|
||||
resp, err := adaptor.DoRequest(c, meta, requestBody)
|
||||
|
||||
@@ -3,6 +3,7 @@ package controller
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
"net/http"
|
||||
"strconv"
|
||||
@@ -41,7 +42,7 @@ func GetUserLogs(c *gin.Context) {
|
||||
if p < 0 {
|
||||
p = 0
|
||||
}
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
logType, _ := strconv.Atoi(c.Query("type"))
|
||||
startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64)
|
||||
endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)
|
||||
@@ -83,7 +84,7 @@ func SearchAllLogs(c *gin.Context) {
|
||||
|
||||
func SearchUserLogs(c *gin.Context) {
|
||||
keyword := c.Query("keyword")
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
logs, err := model.SearchUserLogs(userId, keyword)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -122,7 +123,7 @@ func GetLogsStat(c *gin.Context) {
|
||||
}
|
||||
|
||||
func GetLogsSelfStat(c *gin.Context) {
|
||||
username := c.GetString("username")
|
||||
username := c.GetString(ctxkey.Username)
|
||||
logType, _ := strconv.Atoi(c.Query("type"))
|
||||
startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64)
|
||||
endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)
|
||||
|
||||
@@ -3,6 +3,7 @@ package controller
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
relay "github.com/songquanpeng/one-api/relay"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
@@ -131,10 +132,10 @@ func ListAllModels(c *gin.Context) {
|
||||
func ListModels(c *gin.Context) {
|
||||
ctx := c.Request.Context()
|
||||
var availableModels []string
|
||||
if c.GetString("available_models") != "" {
|
||||
availableModels = strings.Split(c.GetString("available_models"), ",")
|
||||
if c.GetString(ctxkey.AvailableModels) != "" {
|
||||
availableModels = strings.Split(c.GetString(ctxkey.AvailableModels), ",")
|
||||
} else {
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
userGroup, _ := model.CacheGetUserGroup(userId)
|
||||
availableModels, _ = model.CacheGetGroupModels(ctx, userGroup)
|
||||
}
|
||||
@@ -186,7 +187,7 @@ func RetrieveModel(c *gin.Context) {
|
||||
|
||||
func GetUserAvailableModels(c *gin.Context) {
|
||||
ctx := c.Request.Context()
|
||||
id := c.GetInt("id")
|
||||
id := c.GetInt(ctxkey.Id)
|
||||
userGroup, err := model.CacheGetUserGroup(id)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
|
||||
@@ -3,6 +3,7 @@ package controller
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/random"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
@@ -109,7 +110,7 @@ func AddRedemption(c *gin.Context) {
|
||||
for i := 0; i < redemption.Count; i++ {
|
||||
key := random.GetUUID()
|
||||
cleanRedemption := model.Redemption{
|
||||
UserId: c.GetInt("id"),
|
||||
UserId: c.GetInt(ctxkey.Id),
|
||||
Name: redemption.Name,
|
||||
Key: key,
|
||||
CreatedTime: helper.GetTimestamp(),
|
||||
|
||||
@@ -46,15 +46,15 @@ func Relay(c *gin.Context) {
|
||||
requestBody, _ := common.GetRequestBody(c)
|
||||
logger.Debugf(ctx, "request body: %s", string(requestBody))
|
||||
}
|
||||
channelId := c.GetInt("channel_id")
|
||||
channelId := c.GetInt(ctxkey.ChannelId)
|
||||
bizErr := relayHelper(c, relayMode)
|
||||
if bizErr == nil {
|
||||
monitor.Emit(channelId, true)
|
||||
return
|
||||
}
|
||||
lastFailedChannelId := channelId
|
||||
channelName := c.GetString("channel_name")
|
||||
group := c.GetString("group")
|
||||
channelName := c.GetString(ctxkey.ChannelName)
|
||||
group := c.GetString(ctxkey.Group)
|
||||
originalModel := c.GetString(ctxkey.OriginalModel)
|
||||
go processChannelRelayError(ctx, channelId, channelName, bizErr)
|
||||
requestId := c.GetString(logger.RequestIdKey)
|
||||
@@ -80,9 +80,9 @@ func Relay(c *gin.Context) {
|
||||
if bizErr == nil {
|
||||
return
|
||||
}
|
||||
channelId := c.GetInt("channel_id")
|
||||
channelId := c.GetInt(ctxkey.ChannelId)
|
||||
lastFailedChannelId = channelId
|
||||
channelName := c.GetString("channel_name")
|
||||
channelName := c.GetString(ctxkey.ChannelName)
|
||||
go processChannelRelayError(ctx, channelId, channelName, bizErr)
|
||||
}
|
||||
if bizErr != nil {
|
||||
@@ -97,7 +97,7 @@ func Relay(c *gin.Context) {
|
||||
}
|
||||
|
||||
func shouldRetry(c *gin.Context, statusCode int) bool {
|
||||
if _, ok := c.Get("specific_channel_id"); ok {
|
||||
if _, ok := c.Get(ctxkey.SpecificChannelId); ok {
|
||||
return false
|
||||
}
|
||||
if statusCode == http.StatusTooManyRequests {
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/network"
|
||||
"github.com/songquanpeng/one-api/common/random"
|
||||
@@ -13,7 +14,7 @@ import (
|
||||
)
|
||||
|
||||
func GetAllTokens(c *gin.Context) {
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
p, _ := strconv.Atoi(c.Query("p"))
|
||||
if p < 0 {
|
||||
p = 0
|
||||
@@ -38,7 +39,7 @@ func GetAllTokens(c *gin.Context) {
|
||||
}
|
||||
|
||||
func SearchTokens(c *gin.Context) {
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
keyword := c.Query("keyword")
|
||||
tokens, err := model.SearchUserTokens(userId, keyword)
|
||||
if err != nil {
|
||||
@@ -58,7 +59,7 @@ func SearchTokens(c *gin.Context) {
|
||||
|
||||
func GetToken(c *gin.Context) {
|
||||
id, err := strconv.Atoi(c.Param("id"))
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"success": false,
|
||||
@@ -83,8 +84,8 @@ func GetToken(c *gin.Context) {
|
||||
}
|
||||
|
||||
func GetTokenStatus(c *gin.Context) {
|
||||
tokenId := c.GetInt("token_id")
|
||||
userId := c.GetInt("id")
|
||||
tokenId := c.GetInt(ctxkey.TokenId)
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
token, err := model.GetTokenByIds(tokenId, userId)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -139,7 +140,7 @@ func AddToken(c *gin.Context) {
|
||||
}
|
||||
|
||||
cleanToken := model.Token{
|
||||
UserId: c.GetInt("id"),
|
||||
UserId: c.GetInt(ctxkey.Id),
|
||||
Name: token.Name,
|
||||
Key: random.GenerateKey(),
|
||||
CreatedTime: helper.GetTimestamp(),
|
||||
@@ -168,7 +169,7 @@ func AddToken(c *gin.Context) {
|
||||
|
||||
func DeleteToken(c *gin.Context) {
|
||||
id, _ := strconv.Atoi(c.Param("id"))
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
err := model.DeleteTokenById(id, userId)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -185,7 +186,7 @@ func DeleteToken(c *gin.Context) {
|
||||
}
|
||||
|
||||
func UpdateToken(c *gin.Context) {
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
statusOnly := c.Query("status_only")
|
||||
token := model.Token{}
|
||||
err := c.ShouldBindJSON(&token)
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/random"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
"net/http"
|
||||
@@ -238,7 +239,7 @@ func GetUser(c *gin.Context) {
|
||||
})
|
||||
return
|
||||
}
|
||||
myRole := c.GetInt("role")
|
||||
myRole := c.GetInt(ctxkey.Role)
|
||||
if myRole <= user.Role && myRole != model.RoleRootUser {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"success": false,
|
||||
@@ -255,7 +256,7 @@ func GetUser(c *gin.Context) {
|
||||
}
|
||||
|
||||
func GetUserDashboard(c *gin.Context) {
|
||||
id := c.GetInt("id")
|
||||
id := c.GetInt(ctxkey.Id)
|
||||
now := time.Now()
|
||||
startOfDay := now.Truncate(24*time.Hour).AddDate(0, 0, -6).Unix()
|
||||
endOfDay := now.Truncate(24 * time.Hour).Add(24*time.Hour - time.Second).Unix()
|
||||
@@ -278,7 +279,7 @@ func GetUserDashboard(c *gin.Context) {
|
||||
}
|
||||
|
||||
func GenerateAccessToken(c *gin.Context) {
|
||||
id := c.GetInt("id")
|
||||
id := c.GetInt(ctxkey.Id)
|
||||
user, err := model.GetUserById(id, true)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -314,7 +315,7 @@ func GenerateAccessToken(c *gin.Context) {
|
||||
}
|
||||
|
||||
func GetAffCode(c *gin.Context) {
|
||||
id := c.GetInt("id")
|
||||
id := c.GetInt(ctxkey.Id)
|
||||
user, err := model.GetUserById(id, true)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -342,7 +343,7 @@ func GetAffCode(c *gin.Context) {
|
||||
}
|
||||
|
||||
func GetSelf(c *gin.Context) {
|
||||
id := c.GetInt("id")
|
||||
id := c.GetInt(ctxkey.Id)
|
||||
user, err := model.GetUserById(id, false)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
@@ -387,7 +388,7 @@ func UpdateUser(c *gin.Context) {
|
||||
})
|
||||
return
|
||||
}
|
||||
myRole := c.GetInt("role")
|
||||
myRole := c.GetInt(ctxkey.Role)
|
||||
if myRole <= originUser.Role && myRole != model.RoleRootUser {
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"success": false,
|
||||
@@ -445,7 +446,7 @@ func UpdateSelf(c *gin.Context) {
|
||||
}
|
||||
|
||||
cleanUser := model.User{
|
||||
Id: c.GetInt("id"),
|
||||
Id: c.GetInt(ctxkey.Id),
|
||||
Username: user.Username,
|
||||
Password: user.Password,
|
||||
DisplayName: user.DisplayName,
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"github.com/gin-contrib/sessions"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/blacklist"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/network"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
"net/http"
|
||||
@@ -120,20 +121,20 @@ func TokenAuth() func(c *gin.Context) {
|
||||
abortWithMessage(c, http.StatusBadRequest, err.Error())
|
||||
return
|
||||
}
|
||||
c.Set("request_model", requestModel)
|
||||
c.Set(ctxkey.RequestModel, requestModel)
|
||||
if token.Models != nil && *token.Models != "" {
|
||||
c.Set("available_models", *token.Models)
|
||||
c.Set(ctxkey.AvailableModels, *token.Models)
|
||||
if requestModel != "" && !isModelInList(requestModel, *token.Models) {
|
||||
abortWithMessage(c, http.StatusForbidden, fmt.Sprintf("该令牌无权使用模型:%s", requestModel))
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set("id", token.UserId)
|
||||
c.Set("token_id", token.Id)
|
||||
c.Set("token_name", token.Name)
|
||||
c.Set(ctxkey.Id, token.UserId)
|
||||
c.Set(ctxkey.TokenId, token.Id)
|
||||
c.Set(ctxkey.TokenName, token.Name)
|
||||
if len(parts) > 1 {
|
||||
if model.IsAdmin(token.UserId) {
|
||||
c.Set("specific_channel_id", parts[1])
|
||||
c.Set(ctxkey.SpecificChannelId, parts[1])
|
||||
} else {
|
||||
abortWithMessage(c, http.StatusForbidden, "普通用户不支持指定渠道")
|
||||
return
|
||||
|
||||
@@ -3,7 +3,6 @@ package middleware
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
@@ -18,12 +17,12 @@ type ModelRequest struct {
|
||||
|
||||
func Distribute() func(c *gin.Context) {
|
||||
return func(c *gin.Context) {
|
||||
userId := c.GetInt("id")
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
userGroup, _ := model.CacheGetUserGroup(userId)
|
||||
c.Set("group", userGroup)
|
||||
c.Set(ctxkey.Group, userGroup)
|
||||
var requestModel string
|
||||
var channel *model.Channel
|
||||
channelId, ok := c.Get("specific_channel_id")
|
||||
channelId, ok := c.Get(ctxkey.SpecificChannelId)
|
||||
if ok {
|
||||
id, err := strconv.Atoi(channelId.(string))
|
||||
if err != nil {
|
||||
@@ -40,7 +39,7 @@ func Distribute() func(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
requestModel = c.GetString("request_model")
|
||||
requestModel = c.GetString(ctxkey.RequestModel)
|
||||
var err error
|
||||
channel, err = model.CacheGetRandomSatisfiedChannel(userGroup, requestModel, false)
|
||||
if err != nil {
|
||||
@@ -59,28 +58,28 @@ func Distribute() func(c *gin.Context) {
|
||||
}
|
||||
|
||||
func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, modelName string) {
|
||||
c.Set("channel", channel.Type)
|
||||
c.Set("channel_id", channel.Id)
|
||||
c.Set("channel_name", channel.Name)
|
||||
c.Set("model_mapping", channel.GetModelMapping())
|
||||
c.Set(ctxkey.Channel, channel.Type)
|
||||
c.Set(ctxkey.ChannelId, channel.Id)
|
||||
c.Set(ctxkey.ChannelName, channel.Name)
|
||||
c.Set(ctxkey.ModelMapping, channel.GetModelMapping())
|
||||
c.Set(ctxkey.OriginalModel, modelName) // for retry
|
||||
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
|
||||
c.Set("base_url", channel.GetBaseURL())
|
||||
c.Set(ctxkey.BaseURL, channel.GetBaseURL())
|
||||
// this is for backward compatibility
|
||||
switch channel.Type {
|
||||
case channeltype.Azure:
|
||||
c.Set(config.KeyAPIVersion, channel.Other)
|
||||
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
|
||||
case channeltype.Xunfei:
|
||||
c.Set(config.KeyAPIVersion, channel.Other)
|
||||
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
|
||||
case channeltype.Gemini:
|
||||
c.Set(config.KeyAPIVersion, channel.Other)
|
||||
c.Set(ctxkey.ConfigAPIVersion, channel.Other)
|
||||
case channeltype.AIProxyLibrary:
|
||||
c.Set(config.KeyLibraryID, channel.Other)
|
||||
c.Set(ctxkey.ConfigLibraryID, channel.Other)
|
||||
case channeltype.Ali:
|
||||
c.Set(config.KeyPlugin, channel.Other)
|
||||
c.Set(ctxkey.ConfigPlugin, channel.Other)
|
||||
}
|
||||
cfg, _ := channel.LoadConfig()
|
||||
for k, v := range cfg {
|
||||
c.Set(config.KeyPrefix+k, v)
|
||||
c.Set(ctxkey.ConfigPrefix+k, v)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import (
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/anthropic"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/aws"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/baidu"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/cohere"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/coze"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/gemini"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/ollama"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
@@ -43,6 +45,10 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
|
||||
return &zhipu.Adaptor{}
|
||||
case apitype.Ollama:
|
||||
return &ollama.Adaptor{}
|
||||
case apitype.Coze:
|
||||
return &coze.Adaptor{}
|
||||
case apitype.Cohere:
|
||||
return &cohere.Adaptor{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
@@ -34,7 +34,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
aiProxyLibraryRequest := ConvertRequest(*request)
|
||||
aiProxyLibraryRequest.LibraryId = c.GetString(config.KeyLibraryID)
|
||||
aiProxyLibraryRequest.LibraryId = c.GetString(ctxkey.ConfigLibraryID)
|
||||
return aiProxyLibraryRequest, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
@@ -47,8 +47,8 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
|
||||
if meta.Mode == relaymode.ImagesGenerations {
|
||||
req.Header.Set("X-DashScope-Async", "enable")
|
||||
}
|
||||
if c.GetString(config.KeyPlugin) != "" {
|
||||
req.Header.Set("X-DashScope-Plugin", c.GetString(config.KeyPlugin))
|
||||
if c.GetString(ctxkey.ConfigPlugin) != "" {
|
||||
req.Header.Set("X-DashScope-Plugin", c.GetString(ctxkey.ConfigPlugin))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"io"
|
||||
"net/http"
|
||||
@@ -25,9 +24,9 @@ import (
|
||||
)
|
||||
|
||||
func newAwsClient(c *gin.Context) (*bedrockruntime.Client, error) {
|
||||
ak := c.GetString(config.KeyAK)
|
||||
sk := c.GetString(config.KeySK)
|
||||
region := c.GetString(config.KeyRegion)
|
||||
ak := c.GetString(ctxkey.ConfigAK)
|
||||
sk := c.GetString(ctxkey.ConfigSK)
|
||||
region := c.GetString(ctxkey.ConfigRegion)
|
||||
client := bedrockruntime.New(bedrockruntime.Options{
|
||||
Region: region,
|
||||
Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(ak, sk, "")),
|
||||
|
||||
@@ -2,14 +2,14 @@ package azure
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
)
|
||||
|
||||
func GetAPIVersion(c *gin.Context) string {
|
||||
query := c.Request.URL.Query()
|
||||
apiVersion := query.Get("api-version")
|
||||
if apiVersion == "" {
|
||||
apiVersion = c.GetString(config.KeyAPIVersion)
|
||||
apiVersion = c.GetString(ctxkey.ConfigAPIVersion)
|
||||
}
|
||||
return apiVersion
|
||||
}
|
||||
|
||||
64
relay/adaptor/cohere/adaptor.go
Normal file
64
relay/adaptor/cohere/adaptor.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package cohere
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
)
|
||||
|
||||
type Adaptor struct{}
|
||||
|
||||
// ConvertImageRequest implements adaptor.Adaptor.
|
||||
func (*Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||
return nil, errors.New("not implemented")
|
||||
}
|
||||
|
||||
// ConvertImageRequest implements adaptor.Adaptor.
|
||||
|
||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||
return fmt.Sprintf("%s/v1/chat", meta.BaseURL), nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||
adaptor.SetupCommonRequestHeader(c, req, meta)
|
||||
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
return ConvertRequest(*request), nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
|
||||
return adaptor.DoRequestHelper(a, c, meta, requestBody)
|
||||
}
|
||||
|
||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||
if meta.IsStream {
|
||||
err, usage = StreamHandler(c, resp)
|
||||
} else {
|
||||
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetModelList() []string {
|
||||
return ModelList
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetChannelName() string {
|
||||
return "Cohere"
|
||||
}
|
||||
7
relay/adaptor/cohere/constant.go
Normal file
7
relay/adaptor/cohere/constant.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package cohere
|
||||
|
||||
var ModelList = []string{
|
||||
"command", "command-nightly",
|
||||
"command-light", "command-light-nightly",
|
||||
"command-r", "command-r-plus",
|
||||
}
|
||||
233
relay/adaptor/cohere/main.go
Normal file
233
relay/adaptor/cohere/main.go
Normal file
@@ -0,0 +1,233 @@
|
||||
package cohere
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
)
|
||||
|
||||
func stopReasonCohere2OpenAI(reason *string) string {
|
||||
if reason == nil {
|
||||
return ""
|
||||
}
|
||||
switch *reason {
|
||||
case "COMPLETE":
|
||||
return "stop"
|
||||
default:
|
||||
return *reason
|
||||
}
|
||||
}
|
||||
|
||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
||||
cohereRequest := Request{
|
||||
Model: textRequest.Model,
|
||||
Message: "",
|
||||
MaxTokens: textRequest.MaxTokens,
|
||||
Temperature: textRequest.Temperature,
|
||||
P: textRequest.TopP,
|
||||
K: textRequest.TopK,
|
||||
Stream: textRequest.Stream,
|
||||
FrequencyPenalty: textRequest.FrequencyPenalty,
|
||||
PresencePenalty: textRequest.FrequencyPenalty,
|
||||
Seed: int(textRequest.Seed),
|
||||
}
|
||||
if cohereRequest.Model == "" {
|
||||
cohereRequest.Model = "command-r"
|
||||
}
|
||||
for _, message := range textRequest.Messages {
|
||||
if message.Role == "user" {
|
||||
cohereRequest.Message = message.Content.(string)
|
||||
} else {
|
||||
var role string
|
||||
if message.Role == "assistant" {
|
||||
role = "CHATBOT"
|
||||
} else if message.Role == "system" {
|
||||
role = "SYSTEM"
|
||||
} else {
|
||||
role = "USER"
|
||||
}
|
||||
cohereRequest.ChatHistory = append(cohereRequest.ChatHistory, ChatMessage{
|
||||
Role: role,
|
||||
Message: message.Content.(string),
|
||||
})
|
||||
}
|
||||
}
|
||||
return &cohereRequest
|
||||
}
|
||||
|
||||
func StreamResponseCohere2OpenAI(cohereResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
|
||||
var response *Response
|
||||
var responseText string
|
||||
var finishReason string
|
||||
|
||||
switch cohereResponse.EventType {
|
||||
case "stream-start":
|
||||
return nil, nil
|
||||
case "text-generation":
|
||||
responseText += cohereResponse.Text
|
||||
case "stream-end":
|
||||
usage := cohereResponse.Response.Meta.Tokens
|
||||
response = &Response{
|
||||
Meta: Meta{
|
||||
Tokens: Usage{
|
||||
InputTokens: usage.InputTokens,
|
||||
OutputTokens: usage.OutputTokens,
|
||||
},
|
||||
},
|
||||
}
|
||||
finishReason = *cohereResponse.Response.FinishReason
|
||||
default:
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var choice openai.ChatCompletionsStreamResponseChoice
|
||||
choice.Delta.Content = responseText
|
||||
choice.Delta.Role = "assistant"
|
||||
if finishReason != "" {
|
||||
choice.FinishReason = &finishReason
|
||||
}
|
||||
var openaiResponse openai.ChatCompletionsStreamResponse
|
||||
openaiResponse.Object = "chat.completion.chunk"
|
||||
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
|
||||
return &openaiResponse, response
|
||||
}
|
||||
|
||||
func ResponseCohere2OpenAI(cohereResponse *Response) *openai.TextResponse {
|
||||
choice := openai.TextResponseChoice{
|
||||
Index: 0,
|
||||
Message: model.Message{
|
||||
Role: "assistant",
|
||||
Content: cohereResponse.Text,
|
||||
Name: nil,
|
||||
},
|
||||
FinishReason: stopReasonCohere2OpenAI(cohereResponse.FinishReason),
|
||||
}
|
||||
fullTextResponse := openai.TextResponse{
|
||||
Id: fmt.Sprintf("chatcmpl-%s", cohereResponse.ResponseID),
|
||||
Model: "model",
|
||||
Object: "chat.completion",
|
||||
Created: helper.GetTimestamp(),
|
||||
Choices: []openai.TextResponseChoice{choice},
|
||||
}
|
||||
return &fullTextResponse
|
||||
}
|
||||
|
||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||
createdTime := helper.GetTimestamp()
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||
if atEOF && len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
if i := bytes.IndexByte(data, '\n'); i >= 0 {
|
||||
return i + 1, data[0:i], nil
|
||||
}
|
||||
if atEOF {
|
||||
return len(data), data, nil
|
||||
}
|
||||
return 0, nil, nil
|
||||
})
|
||||
|
||||
dataChan := make(chan string)
|
||||
stopChan := make(chan bool)
|
||||
go func() {
|
||||
for scanner.Scan() {
|
||||
data := scanner.Text()
|
||||
dataChan <- data
|
||||
}
|
||||
stopChan <- true
|
||||
}()
|
||||
common.SetEventStreamHeaders(c)
|
||||
var usage model.Usage
|
||||
c.Stream(func(w io.Writer) bool {
|
||||
select {
|
||||
case data := <-dataChan:
|
||||
// some implementations may add \r at the end of data
|
||||
data = strings.TrimSuffix(data, "\r")
|
||||
var cohereResponse StreamResponse
|
||||
err := json.Unmarshal([]byte(data), &cohereResponse)
|
||||
if err != nil {
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
return true
|
||||
}
|
||||
response, meta := StreamResponseCohere2OpenAI(&cohereResponse)
|
||||
if meta != nil {
|
||||
usage.PromptTokens += meta.Meta.Tokens.InputTokens
|
||||
usage.CompletionTokens += meta.Meta.Tokens.OutputTokens
|
||||
return true
|
||||
}
|
||||
if response == nil {
|
||||
return true
|
||||
}
|
||||
response.Id = fmt.Sprintf("chatcmpl-%d", createdTime)
|
||||
response.Model = c.GetString("original_model")
|
||||
response.Created = createdTime
|
||||
jsonStr, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
logger.SysError("error marshalling stream response: " + err.Error())
|
||||
return true
|
||||
}
|
||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
|
||||
return true
|
||||
case <-stopChan:
|
||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||
return false
|
||||
}
|
||||
})
|
||||
_ = resp.Body.Close()
|
||||
return nil, &usage
|
||||
}
|
||||
|
||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
|
||||
responseBody, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
err = resp.Body.Close()
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
var cohereResponse Response
|
||||
err = json.Unmarshal(responseBody, &cohereResponse)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
if cohereResponse.ResponseID == "" {
|
||||
return &model.ErrorWithStatusCode{
|
||||
Error: model.Error{
|
||||
Message: cohereResponse.Message,
|
||||
Type: cohereResponse.Message,
|
||||
Param: "",
|
||||
Code: resp.StatusCode,
|
||||
},
|
||||
StatusCode: resp.StatusCode,
|
||||
}, nil
|
||||
}
|
||||
fullTextResponse := ResponseCohere2OpenAI(&cohereResponse)
|
||||
fullTextResponse.Model = modelName
|
||||
usage := model.Usage{
|
||||
PromptTokens: cohereResponse.Meta.Tokens.InputTokens,
|
||||
CompletionTokens: cohereResponse.Meta.Tokens.OutputTokens,
|
||||
TotalTokens: cohereResponse.Meta.Tokens.InputTokens + cohereResponse.Meta.Tokens.OutputTokens,
|
||||
}
|
||||
fullTextResponse.Usage = usage
|
||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
c.Writer.Header().Set("Content-Type", "application/json")
|
||||
c.Writer.WriteHeader(resp.StatusCode)
|
||||
_, err = c.Writer.Write(jsonResponse)
|
||||
return nil, &usage
|
||||
}
|
||||
147
relay/adaptor/cohere/model.go
Normal file
147
relay/adaptor/cohere/model.go
Normal file
@@ -0,0 +1,147 @@
|
||||
package cohere
|
||||
|
||||
type Request struct {
|
||||
Message string `json:"message" required:"true"`
|
||||
Model string `json:"model,omitempty"` // 默认值为"command-r"
|
||||
Stream bool `json:"stream,omitempty"` // 默认值为false
|
||||
Preamble string `json:"preamble,omitempty"`
|
||||
ChatHistory []ChatMessage `json:"chat_history,omitempty"`
|
||||
ConversationID string `json:"conversation_id,omitempty"`
|
||||
PromptTruncation string `json:"prompt_truncation,omitempty"` // 默认值为"AUTO"
|
||||
Connectors []Connector `json:"connectors,omitempty"`
|
||||
Documents []Document `json:"documents,omitempty"`
|
||||
Temperature float64 `json:"temperature,omitempty"` // 默认值为0.3
|
||||
MaxTokens int `json:"max_tokens,omitempty"`
|
||||
MaxInputTokens int `json:"max_input_tokens,omitempty"`
|
||||
K int `json:"k,omitempty"` // 默认值为0
|
||||
P float64 `json:"p,omitempty"` // 默认值为0.75
|
||||
Seed int `json:"seed,omitempty"`
|
||||
StopSequences []string `json:"stop_sequences,omitempty"`
|
||||
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` // 默认值为0.0
|
||||
PresencePenalty float64 `json:"presence_penalty,omitempty"` // 默认值为0.0
|
||||
Tools []Tool `json:"tools,omitempty"`
|
||||
ToolResults []ToolResult `json:"tool_results,omitempty"`
|
||||
}
|
||||
|
||||
type ChatMessage struct {
|
||||
Role string `json:"role" required:"true"`
|
||||
Message string `json:"message" required:"true"`
|
||||
}
|
||||
|
||||
type Tool struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
Description string `json:"description" required:"true"`
|
||||
ParameterDefinitions map[string]ParameterSpec `json:"parameter_definitions"`
|
||||
}
|
||||
|
||||
type ParameterSpec struct {
|
||||
Description string `json:"description"`
|
||||
Type string `json:"type" required:"true"`
|
||||
Required bool `json:"required"`
|
||||
}
|
||||
|
||||
type ToolResult struct {
|
||||
Call ToolCall `json:"call"`
|
||||
Outputs []map[string]interface{} `json:"outputs"`
|
||||
}
|
||||
|
||||
type ToolCall struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
Parameters map[string]interface{} `json:"parameters" required:"true"`
|
||||
}
|
||||
|
||||
type StreamResponse struct {
|
||||
IsFinished bool `json:"is_finished"`
|
||||
EventType string `json:"event_type"`
|
||||
GenerationID string `json:"generation_id,omitempty"`
|
||||
SearchQueries []*SearchQuery `json:"search_queries,omitempty"`
|
||||
SearchResults []*SearchResult `json:"search_results,omitempty"`
|
||||
Documents []*Document `json:"documents,omitempty"`
|
||||
Text string `json:"text,omitempty"`
|
||||
Citations []*Citation `json:"citations,omitempty"`
|
||||
Response *Response `json:"response,omitempty"`
|
||||
FinishReason string `json:"finish_reason,omitempty"`
|
||||
}
|
||||
|
||||
type SearchQuery struct {
|
||||
Text string `json:"text"`
|
||||
GenerationID string `json:"generation_id"`
|
||||
}
|
||||
|
||||
type SearchResult struct {
|
||||
SearchQuery *SearchQuery `json:"search_query"`
|
||||
DocumentIDs []string `json:"document_ids"`
|
||||
Connector *Connector `json:"connector"`
|
||||
}
|
||||
|
||||
type Connector struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
|
||||
type Document struct {
|
||||
ID string `json:"id"`
|
||||
Snippet string `json:"snippet"`
|
||||
Timestamp string `json:"timestamp"`
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
type Citation struct {
|
||||
Start int `json:"start"`
|
||||
End int `json:"end"`
|
||||
Text string `json:"text"`
|
||||
DocumentIDs []string `json:"document_ids"`
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
ResponseID string `json:"response_id"`
|
||||
Text string `json:"text"`
|
||||
GenerationID string `json:"generation_id"`
|
||||
ChatHistory []*Message `json:"chat_history"`
|
||||
FinishReason *string `json:"finish_reason"`
|
||||
Meta Meta `json:"meta"`
|
||||
Citations []*Citation `json:"citations"`
|
||||
Documents []*Document `json:"documents"`
|
||||
SearchResults []*SearchResult `json:"search_results"`
|
||||
SearchQueries []*SearchQuery `json:"search_queries"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Version struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
type Units struct {
|
||||
InputTokens int `json:"input_tokens"`
|
||||
OutputTokens int `json:"output_tokens"`
|
||||
}
|
||||
|
||||
type ChatEntry struct {
|
||||
Role string `json:"role"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Meta struct {
|
||||
APIVersion APIVersion `json:"api_version"`
|
||||
BilledUnits BilledUnits `json:"billed_units"`
|
||||
Tokens Usage `json:"tokens"`
|
||||
}
|
||||
|
||||
type APIVersion struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
type BilledUnits struct {
|
||||
InputTokens int `json:"input_tokens"`
|
||||
OutputTokens int `json:"output_tokens"`
|
||||
}
|
||||
|
||||
type Usage struct {
|
||||
InputTokens int `json:"input_tokens"`
|
||||
OutputTokens int `json:"output_tokens"`
|
||||
}
|
||||
75
relay/adaptor/coze/adaptor.go
Normal file
75
relay/adaptor/coze/adaptor.go
Normal file
@@ -0,0 +1,75 @@
|
||||
package coze
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type Adaptor struct {
|
||||
}
|
||||
|
||||
func (a *Adaptor) Init(meta *meta.Meta) {
|
||||
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||
return fmt.Sprintf("%s/open_api/v2/chat", meta.BaseURL), nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
|
||||
adaptor.SetupCommonRequestHeader(c, req, meta)
|
||||
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
request.User = c.GetString(ctxkey.ConfigUserID)
|
||||
return ConvertRequest(*request), nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
return request, nil
|
||||
}
|
||||
|
||||
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
|
||||
return adaptor.DoRequestHelper(a, c, meta, requestBody)
|
||||
}
|
||||
|
||||
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
|
||||
var responseText *string
|
||||
if meta.IsStream {
|
||||
err, responseText = StreamHandler(c, resp)
|
||||
} else {
|
||||
err, responseText = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||
}
|
||||
if responseText != nil {
|
||||
usage = openai.ResponseText2Usage(*responseText, meta.ActualModelName, meta.PromptTokens)
|
||||
} else {
|
||||
usage = &model.Usage{}
|
||||
}
|
||||
usage.PromptTokens = meta.PromptTokens
|
||||
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
|
||||
return
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetModelList() []string {
|
||||
return ModelList
|
||||
}
|
||||
|
||||
func (a *Adaptor) GetChannelName() string {
|
||||
return "coze"
|
||||
}
|
||||
5
relay/adaptor/coze/constant/contenttype/define.go
Normal file
5
relay/adaptor/coze/constant/contenttype/define.go
Normal file
@@ -0,0 +1,5 @@
|
||||
package contenttype
|
||||
|
||||
const (
|
||||
Text = "text"
|
||||
)
|
||||
7
relay/adaptor/coze/constant/event/define.go
Normal file
7
relay/adaptor/coze/constant/event/define.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package event
|
||||
|
||||
const (
|
||||
Message = "message"
|
||||
Done = "done"
|
||||
Error = "error"
|
||||
)
|
||||
6
relay/adaptor/coze/constant/messagetype/define.go
Normal file
6
relay/adaptor/coze/constant/messagetype/define.go
Normal file
@@ -0,0 +1,6 @@
|
||||
package messagetype
|
||||
|
||||
const (
|
||||
Answer = "answer"
|
||||
FollowUp = "follow_up"
|
||||
)
|
||||
3
relay/adaptor/coze/constants.go
Normal file
3
relay/adaptor/coze/constants.go
Normal file
@@ -0,0 +1,3 @@
|
||||
package coze
|
||||
|
||||
var ModelList = []string{}
|
||||
10
relay/adaptor/coze/helper.go
Normal file
10
relay/adaptor/coze/helper.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package coze
|
||||
|
||||
import "github.com/songquanpeng/one-api/relay/adaptor/coze/constant/event"
|
||||
|
||||
func event2StopReason(e *string) string {
|
||||
if e == nil || *e == event.Message {
|
||||
return ""
|
||||
}
|
||||
return "stop"
|
||||
}
|
||||
215
relay/adaptor/coze/main.go
Normal file
215
relay/adaptor/coze/main.go
Normal file
@@ -0,0 +1,215 @@
|
||||
package coze
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/conv"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// https://www.coze.com/open
|
||||
|
||||
func stopReasonCoze2OpenAI(reason *string) string {
|
||||
if reason == nil {
|
||||
return ""
|
||||
}
|
||||
switch *reason {
|
||||
case "end_turn":
|
||||
return "stop"
|
||||
case "stop_sequence":
|
||||
return "stop"
|
||||
case "max_tokens":
|
||||
return "length"
|
||||
default:
|
||||
return *reason
|
||||
}
|
||||
}
|
||||
|
||||
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
||||
cozeRequest := Request{
|
||||
Stream: textRequest.Stream,
|
||||
User: textRequest.User,
|
||||
BotId: strings.TrimPrefix(textRequest.Model, "bot-"),
|
||||
}
|
||||
for i, message := range textRequest.Messages {
|
||||
if i == len(textRequest.Messages)-1 {
|
||||
cozeRequest.Query = message.StringContent()
|
||||
continue
|
||||
}
|
||||
cozeMessage := Message{
|
||||
Role: message.Role,
|
||||
Content: message.StringContent(),
|
||||
}
|
||||
cozeRequest.ChatHistory = append(cozeRequest.ChatHistory, cozeMessage)
|
||||
}
|
||||
return &cozeRequest
|
||||
}
|
||||
|
||||
func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
|
||||
var response *Response
|
||||
var stopReason string
|
||||
var choice openai.ChatCompletionsStreamResponseChoice
|
||||
|
||||
if cozeResponse.Message != nil {
|
||||
if cozeResponse.Message.Type != messagetype.Answer {
|
||||
return nil, nil
|
||||
}
|
||||
choice.Delta.Content = cozeResponse.Message.Content
|
||||
}
|
||||
choice.Delta.Role = "assistant"
|
||||
finishReason := stopReasonCoze2OpenAI(&stopReason)
|
||||
if finishReason != "null" {
|
||||
choice.FinishReason = &finishReason
|
||||
}
|
||||
var openaiResponse openai.ChatCompletionsStreamResponse
|
||||
openaiResponse.Object = "chat.completion.chunk"
|
||||
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
|
||||
openaiResponse.Id = cozeResponse.ConversationId
|
||||
return &openaiResponse, response
|
||||
}
|
||||
|
||||
func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
|
||||
var responseText string
|
||||
for _, message := range cozeResponse.Messages {
|
||||
if message.Type == messagetype.Answer {
|
||||
responseText = message.Content
|
||||
break
|
||||
}
|
||||
}
|
||||
choice := openai.TextResponseChoice{
|
||||
Index: 0,
|
||||
Message: model.Message{
|
||||
Role: "assistant",
|
||||
Content: responseText,
|
||||
Name: nil,
|
||||
},
|
||||
FinishReason: "stop",
|
||||
}
|
||||
fullTextResponse := openai.TextResponse{
|
||||
Id: fmt.Sprintf("chatcmpl-%s", cozeResponse.ConversationId),
|
||||
Model: "coze-bot",
|
||||
Object: "chat.completion",
|
||||
Created: helper.GetTimestamp(),
|
||||
Choices: []openai.TextResponseChoice{choice},
|
||||
}
|
||||
return &fullTextResponse
|
||||
}
|
||||
|
||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
|
||||
var responseText string
|
||||
createdTime := helper.GetTimestamp()
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||
if atEOF && len(data) == 0 {
|
||||
return 0, nil, nil
|
||||
}
|
||||
if i := strings.Index(string(data), "\n"); i >= 0 {
|
||||
return i + 1, data[0:i], nil
|
||||
}
|
||||
if atEOF {
|
||||
return len(data), data, nil
|
||||
}
|
||||
return 0, nil, nil
|
||||
})
|
||||
dataChan := make(chan string)
|
||||
stopChan := make(chan bool)
|
||||
go func() {
|
||||
for scanner.Scan() {
|
||||
data := scanner.Text()
|
||||
if len(data) < 5 {
|
||||
continue
|
||||
}
|
||||
if !strings.HasPrefix(data, "data:") {
|
||||
continue
|
||||
}
|
||||
data = strings.TrimPrefix(data, "data:")
|
||||
dataChan <- data
|
||||
}
|
||||
stopChan <- true
|
||||
}()
|
||||
common.SetEventStreamHeaders(c)
|
||||
var modelName string
|
||||
c.Stream(func(w io.Writer) bool {
|
||||
select {
|
||||
case data := <-dataChan:
|
||||
// some implementations may add \r at the end of data
|
||||
data = strings.TrimSuffix(data, "\r")
|
||||
var cozeResponse StreamResponse
|
||||
err := json.Unmarshal([]byte(data), &cozeResponse)
|
||||
if err != nil {
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
return true
|
||||
}
|
||||
response, _ := StreamResponseCoze2OpenAI(&cozeResponse)
|
||||
if response == nil {
|
||||
return true
|
||||
}
|
||||
for _, choice := range response.Choices {
|
||||
responseText += conv.AsString(choice.Delta.Content)
|
||||
}
|
||||
response.Model = modelName
|
||||
response.Created = createdTime
|
||||
jsonStr, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
logger.SysError("error marshalling stream response: " + err.Error())
|
||||
return true
|
||||
}
|
||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
|
||||
return true
|
||||
case <-stopChan:
|
||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
||||
return false
|
||||
}
|
||||
})
|
||||
_ = resp.Body.Close()
|
||||
return nil, &responseText
|
||||
}
|
||||
|
||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
|
||||
responseBody, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
err = resp.Body.Close()
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
var cozeResponse Response
|
||||
err = json.Unmarshal(responseBody, &cozeResponse)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
if cozeResponse.Code != 0 {
|
||||
return &model.ErrorWithStatusCode{
|
||||
Error: model.Error{
|
||||
Message: cozeResponse.Msg,
|
||||
Code: cozeResponse.Code,
|
||||
},
|
||||
StatusCode: resp.StatusCode,
|
||||
}, nil
|
||||
}
|
||||
fullTextResponse := ResponseCoze2OpenAI(&cozeResponse)
|
||||
fullTextResponse.Model = modelName
|
||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
c.Writer.Header().Set("Content-Type", "application/json")
|
||||
c.Writer.WriteHeader(resp.StatusCode)
|
||||
_, err = c.Writer.Write(jsonResponse)
|
||||
var responseText string
|
||||
if len(fullTextResponse.Choices) > 0 {
|
||||
responseText = fullTextResponse.Choices[0].Message.StringContent()
|
||||
}
|
||||
return nil, &responseText
|
||||
}
|
||||
38
relay/adaptor/coze/model.go
Normal file
38
relay/adaptor/coze/model.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package coze
|
||||
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Type string `json:"type"`
|
||||
Content string `json:"content"`
|
||||
ContentType string `json:"content_type"`
|
||||
}
|
||||
|
||||
type ErrorInformation struct {
|
||||
Code int `json:"code"`
|
||||
Msg string `json:"msg"`
|
||||
}
|
||||
|
||||
type Request struct {
|
||||
ConversationId string `json:"conversation_id,omitempty"`
|
||||
BotId string `json:"bot_id"`
|
||||
User string `json:"user"`
|
||||
Query string `json:"query"`
|
||||
ChatHistory []Message `json:"chat_history,omitempty"`
|
||||
Stream bool `json:"stream"`
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
ConversationId string `json:"conversation_id,omitempty"`
|
||||
Messages []Message `json:"messages,omitempty"`
|
||||
Code int `json:"code,omitempty"`
|
||||
Msg string `json:"msg,omitempty"`
|
||||
}
|
||||
|
||||
type StreamResponse struct {
|
||||
Event string `json:"event,omitempty"`
|
||||
Message *Message `json:"message,omitempty"`
|
||||
IsFinish bool `json:"is_finish,omitempty"`
|
||||
Index int `json:"index,omitempty"`
|
||||
ConversationId string `json:"conversation_id,omitempty"`
|
||||
ErrorInformation *ErrorInformation `json:"error_information,omitempty"`
|
||||
}
|
||||
@@ -4,6 +4,10 @@ import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
@@ -13,9 +17,6 @@ import (
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/constant"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
@@ -54,7 +55,17 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
|
||||
MaxOutputTokens: textRequest.MaxTokens,
|
||||
},
|
||||
}
|
||||
if textRequest.Functions != nil {
|
||||
if textRequest.Tools != nil {
|
||||
functions := make([]model.Function, 0, len(textRequest.Tools))
|
||||
for _, tool := range textRequest.Tools {
|
||||
functions = append(functions, tool.Function)
|
||||
}
|
||||
geminiRequest.Tools = []ChatTools{
|
||||
{
|
||||
FunctionDeclarations: functions,
|
||||
},
|
||||
}
|
||||
} else if textRequest.Functions != nil {
|
||||
geminiRequest.Tools = []ChatTools{
|
||||
{
|
||||
FunctionDeclarations: textRequest.Functions,
|
||||
@@ -154,6 +165,30 @@ type ChatPromptFeedback struct {
|
||||
SafetyRatings []ChatSafetyRating `json:"safetyRatings"`
|
||||
}
|
||||
|
||||
func getToolCalls(candidate *ChatCandidate) []model.Tool {
|
||||
var toolCalls []model.Tool
|
||||
|
||||
item := candidate.Content.Parts[0]
|
||||
if item.FunctionCall == nil {
|
||||
return toolCalls
|
||||
}
|
||||
argsBytes, err := json.Marshal(item.FunctionCall.Arguments)
|
||||
if err != nil {
|
||||
logger.FatalLog("getToolCalls failed: " + err.Error())
|
||||
return toolCalls
|
||||
}
|
||||
toolCall := model.Tool{
|
||||
Id: fmt.Sprintf("call_%s", random.GetUUID()),
|
||||
Type: "function",
|
||||
Function: model.Function{
|
||||
Arguments: string(argsBytes),
|
||||
Name: item.FunctionCall.FunctionName,
|
||||
},
|
||||
}
|
||||
toolCalls = append(toolCalls, toolCall)
|
||||
return toolCalls
|
||||
}
|
||||
|
||||
func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
|
||||
fullTextResponse := openai.TextResponse{
|
||||
Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
|
||||
@@ -165,13 +200,19 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
|
||||
choice := openai.TextResponseChoice{
|
||||
Index: i,
|
||||
Message: model.Message{
|
||||
Role: "assistant",
|
||||
Content: "",
|
||||
Role: "assistant",
|
||||
},
|
||||
FinishReason: constant.StopFinishReason,
|
||||
}
|
||||
if len(candidate.Content.Parts) > 0 {
|
||||
choice.Message.Content = candidate.Content.Parts[0].Text
|
||||
if candidate.Content.Parts[0].FunctionCall != nil {
|
||||
choice.Message.ToolCalls = getToolCalls(&candidate)
|
||||
} else {
|
||||
choice.Message.Content = candidate.Content.Parts[0].Text
|
||||
}
|
||||
} else {
|
||||
choice.Message.Content = ""
|
||||
choice.FinishReason = candidate.FinishReason
|
||||
}
|
||||
fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
|
||||
}
|
||||
|
||||
@@ -12,9 +12,15 @@ type InlineData struct {
|
||||
Data string `json:"data"`
|
||||
}
|
||||
|
||||
type FunctionCall struct {
|
||||
FunctionName string `json:"name"`
|
||||
Arguments any `json:"args"`
|
||||
}
|
||||
|
||||
type Part struct {
|
||||
Text string `json:"text,omitempty"`
|
||||
InlineData *InlineData `json:"inlineData,omitempty"`
|
||||
Text string `json:"text,omitempty"`
|
||||
InlineData *InlineData `json:"inlineData,omitempty"`
|
||||
FunctionCall *FunctionCall `json:"functionCall,omitempty"`
|
||||
}
|
||||
|
||||
type ChatContent struct {
|
||||
@@ -28,7 +34,7 @@ type ChatSafetySettings struct {
|
||||
}
|
||||
|
||||
type ChatTools struct {
|
||||
FunctionDeclarations any `json:"functionDeclarations,omitempty"`
|
||||
FunctionDeclarations any `json:"function_declarations,omitempty"`
|
||||
}
|
||||
|
||||
type ChatGenerationConfig struct {
|
||||
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/common/random"
|
||||
@@ -280,7 +280,7 @@ func getAPIVersion(c *gin.Context, modelName string) string {
|
||||
return apiVersion
|
||||
|
||||
}
|
||||
apiVersion = c.GetString(config.KeyAPIVersion)
|
||||
apiVersion = c.GetString(ctxkey.ConfigAPIVersion)
|
||||
if apiVersion != "" {
|
||||
return apiVersion
|
||||
}
|
||||
|
||||
@@ -13,6 +13,8 @@ const (
|
||||
Gemini
|
||||
Ollama
|
||||
AwsClaude
|
||||
Coze
|
||||
Cohere
|
||||
|
||||
Dummy // this one is only for count, do not add any channel after this
|
||||
)
|
||||
|
||||
@@ -2,8 +2,9 @@ package ratio
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"strings"
|
||||
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -162,6 +163,13 @@ var ModelRatio = map[string]float64{
|
||||
"step-1v-32k": 0.024 * RMB,
|
||||
"step-1-32k": 0.024 * RMB,
|
||||
"step-1-200k": 0.15 * RMB,
|
||||
// https://cohere.com/pricing
|
||||
"command": 0.5,
|
||||
"command-nightly": 0.5,
|
||||
"command-light": 0.5,
|
||||
"command-light-nightly": 0.5,
|
||||
"command-r": 0.5 / 1000 * USD,
|
||||
"command-r-plus ": 3.0 / 1000 * USD,
|
||||
}
|
||||
|
||||
var CompletionRatio = map[string]float64{}
|
||||
@@ -284,6 +292,12 @@ func GetCompletionRatio(name string) float64 {
|
||||
return 2
|
||||
case "llama3-70b-8192":
|
||||
return 0.79 / 0.59
|
||||
case "command", "command-light", "command-nightly", "command-light-nightly":
|
||||
return 2
|
||||
case "command-r":
|
||||
return 3
|
||||
case "command-r-plus":
|
||||
return 5
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -35,6 +35,8 @@ const (
|
||||
LingYiWanWu
|
||||
StepFun
|
||||
AwsClaude
|
||||
Coze
|
||||
Cohere
|
||||
|
||||
Dummy
|
||||
)
|
||||
|
||||
@@ -27,6 +27,10 @@ func ToAPIType(channelType int) int {
|
||||
apiType = apitype.Ollama
|
||||
case AwsClaude:
|
||||
apiType = apitype.AwsClaude
|
||||
case Coze:
|
||||
apiType = apitype.Coze
|
||||
case Cohere:
|
||||
apiType = apitype.Cohere
|
||||
}
|
||||
|
||||
return apiType
|
||||
|
||||
@@ -35,6 +35,8 @@ var ChannelBaseURLs = []string{
|
||||
"https://api.lingyiwanwu.com", // 31
|
||||
"https://api.stepfun.com", // 32
|
||||
"", // 33
|
||||
"https://api.coze.com", // 34
|
||||
"https://api.cohere.ai", //35
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/azure"
|
||||
@@ -29,12 +30,12 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
|
||||
ctx := c.Request.Context()
|
||||
audioModel := "whisper-1"
|
||||
|
||||
tokenId := c.GetInt("token_id")
|
||||
channelType := c.GetInt("channel")
|
||||
channelId := c.GetInt("channel_id")
|
||||
userId := c.GetInt("id")
|
||||
group := c.GetString("group")
|
||||
tokenName := c.GetString("token_name")
|
||||
tokenId := c.GetInt(ctxkey.TokenId)
|
||||
channelType := c.GetInt(ctxkey.Channel)
|
||||
channelId := c.GetInt(ctxkey.ChannelId)
|
||||
userId := c.GetInt(ctxkey.Id)
|
||||
group := c.GetString(ctxkey.Group)
|
||||
tokenName := c.GetString(ctxkey.TokenName)
|
||||
|
||||
var ttsRequest openai.TextToSpeechRequest
|
||||
if relayMode == relaymode.AudioSpeech {
|
||||
@@ -107,7 +108,7 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
|
||||
}()
|
||||
|
||||
// map model name
|
||||
modelMapping := c.GetString("model_mapping")
|
||||
modelMapping := c.GetString(ctxkey.ModelMapping)
|
||||
if modelMapping != "" {
|
||||
modelMap := make(map[string]string)
|
||||
err := json.Unmarshal([]byte(modelMapping), &modelMap)
|
||||
@@ -121,8 +122,8 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
|
||||
|
||||
baseURL := channeltype.ChannelBaseURLs[channelType]
|
||||
requestURL := c.Request.URL.String()
|
||||
if c.GetString("base_url") != "" {
|
||||
baseURL = c.GetString("base_url")
|
||||
if c.GetString(ctxkey.BaseURL) != "" {
|
||||
baseURL = c.GetString(ctxkey.BaseURL)
|
||||
}
|
||||
|
||||
fullRequestURL := openai.GetFullRequestURL(baseURL, requestURL, channelType)
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/model"
|
||||
"github.com/songquanpeng/one-api/relay"
|
||||
@@ -119,11 +120,11 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
|
||||
logger.SysError("error update user quota cache: " + err.Error())
|
||||
}
|
||||
if quota != 0 {
|
||||
tokenName := c.GetString("token_name")
|
||||
tokenName := c.GetString(ctxkey.TokenName)
|
||||
logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio)
|
||||
model.RecordConsumeLog(ctx, meta.UserId, meta.ChannelId, 0, 0, imageRequest.Model, tokenName, quota, logContent)
|
||||
model.UpdateUserUsedQuotaAndRequestCount(meta.UserId, quota)
|
||||
channelId := c.GetInt("channel_id")
|
||||
channelId := c.GetInt(ctxkey.ChannelId)
|
||||
model.UpdateChannelUsedQuota(channelId, quota)
|
||||
}
|
||||
}(c.Request.Context())
|
||||
|
||||
@@ -2,7 +2,7 @@ package meta
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/ctxkey"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/azure"
|
||||
"github.com/songquanpeng/one-api/relay/channeltype"
|
||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||
@@ -33,15 +33,15 @@ type Meta struct {
|
||||
func GetByContext(c *gin.Context) *Meta {
|
||||
meta := Meta{
|
||||
Mode: relaymode.GetByPath(c.Request.URL.Path),
|
||||
ChannelType: c.GetInt("channel"),
|
||||
ChannelId: c.GetInt("channel_id"),
|
||||
TokenId: c.GetInt("token_id"),
|
||||
TokenName: c.GetString("token_name"),
|
||||
UserId: c.GetInt("id"),
|
||||
Group: c.GetString("group"),
|
||||
ModelMapping: c.GetStringMapString("model_mapping"),
|
||||
BaseURL: c.GetString("base_url"),
|
||||
APIVersion: c.GetString(config.KeyAPIVersion),
|
||||
ChannelType: c.GetInt(ctxkey.Channel),
|
||||
ChannelId: c.GetInt(ctxkey.ChannelId),
|
||||
TokenId: c.GetInt(ctxkey.TokenId),
|
||||
TokenName: c.GetString(ctxkey.TokenName),
|
||||
UserId: c.GetInt(ctxkey.Id),
|
||||
Group: c.GetString(ctxkey.Group),
|
||||
ModelMapping: c.GetStringMapString(ctxkey.ModelMapping),
|
||||
BaseURL: c.GetString(ctxkey.BaseURL),
|
||||
APIVersion: c.GetString(ctxkey.ConfigAPIVersion),
|
||||
APIKey: strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "),
|
||||
Config: nil,
|
||||
RequestURLPath: c.Request.URL.String(),
|
||||
|
||||
@@ -33,7 +33,7 @@ function renderType(type) {
|
||||
}
|
||||
type2label[0] = { value: 0, text: '未知类型', color: 'grey' };
|
||||
}
|
||||
return <Label basic color={type2label[type]?.color}>{type2label[type]?.text}</Label>;
|
||||
return <Label basic color={type2label[type]?.color}>{type2label[type] ? type2label[type].text : type}</Label>;
|
||||
}
|
||||
|
||||
function renderBalance(type, balance) {
|
||||
|
||||
@@ -19,6 +19,8 @@ export const CHANNEL_OPTIONS = [
|
||||
{ key: 30, text: 'Ollama', value: 30, color: 'black' },
|
||||
{ key: 31, text: '零一万物', value: 31, color: 'green' },
|
||||
{ key: 32, text: '阶跃星辰', value: 32, color: 'blue' },
|
||||
{ key: 34, text: 'Coze', value: 34, color: 'blue' },
|
||||
{ key: 35, text: 'Cohere', value: 35, color: 'blue' },
|
||||
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' },
|
||||
{ key: 22, text: '知识库:FastGPT', value: 22, color: 'blue' },
|
||||
{ key: 21, text: '知识库:AI Proxy', value: 21, color: 'purple' },
|
||||
|
||||
@@ -174,7 +174,7 @@ const EditChannel = () => {
|
||||
showInfo('模型映射必须是合法的 JSON 格式!');
|
||||
return;
|
||||
}
|
||||
let localInputs = inputs;
|
||||
let localInputs = {...inputs};
|
||||
if (localInputs.base_url && localInputs.base_url.endsWith('/')) {
|
||||
localInputs.base_url = localInputs.base_url.slice(0, localInputs.base_url.length - 1);
|
||||
}
|
||||
@@ -355,6 +355,13 @@ const EditChannel = () => {
|
||||
</Form.Field>
|
||||
)
|
||||
}
|
||||
{
|
||||
inputs.type === 34 && (
|
||||
<Message>
|
||||
对于 Coze 而言,模型名称即 Bot ID,你可以添加一个前缀 `bot-`,例如:`bot-123456`。
|
||||
</Message>
|
||||
)
|
||||
}
|
||||
<Form.Field>
|
||||
<Form.Dropdown
|
||||
label='模型'
|
||||
|
||||
Reference in New Issue
Block a user