Compare commits

..

35 Commits

Author SHA1 Message Date
JustSong
f965469e8a chore: update dependencies version 2024-05-22 00:52:23 +08:00
JustSong
03ea60532a fix: fix html lang attribute (close #1433) 2024-05-21 01:20:37 +08:00
Qiying Wang
2457d00afb feat: support gpt-4o (#1431) 2024-05-21 01:14:22 +08:00
JustSong
91b80ae879 fix: remove extra space 2024-05-07 23:57:34 +08:00
JustSong
2720e1a358 feat: support minimax's 6.5 models (close #1395) 2024-04-30 02:23:14 +08:00
JustSong
71f4403fd5 feat: add together.ai support (#1298) 2024-04-30 02:16:53 +08:00
JustSong
1f76c80553 fix: fix aws claude panic (#1384) 2024-04-29 22:49:06 +08:00
JustSong
7e027d2bd0 fix: fix minimax prompt & completion tokens is empty (#1391) 2024-04-29 22:35:47 +08:00
JustSong
30f373b623 fix: fix usage is empty (close #1391) 2024-04-29 22:29:13 +08:00
plusye
1c2654320e fix: fix getPreConsumedQuota (#1312) 2024-04-27 16:07:06 +08:00
caixinjiang
6cffb116b7 fix: fix zhipu embedding error when input is array but not string (#1306)
* fix zhipu embedding error when input is array but not string

* fix: only use the first one

---------

Co-authored-by: 蔡新疆 <cxj@icc.link>
Co-authored-by: JustSong <songquanpeng@foxmail.com>
2024-04-27 16:05:14 +08:00
Qiying Wang
a84c7b38b7 fix: claude stream response parse (#1334) 2024-04-27 15:58:07 +08:00
tylinux
1bd14af47b feat: use mapped model name to test (#1370) 2024-04-27 15:53:20 +08:00
NongMO
6170b91d1c feat: support for the ollama vision model (#1376)
* feat: support for the ollama vision model

`llava` model, pass test

* Update main.go

format code

* chore: remove useless log

---------

Co-authored-by: nongqiqin <nongqiqin@tipdm.com>
Co-authored-by: JustSong <songquanpeng@foxmail.com>
2024-04-27 15:47:27 +08:00
JustSong
04b49aa0ec chore: use StringContent() to convert response to text 2024-04-27 15:41:02 +08:00
Wei Tingjiang
ef88497f25 fix: refactor Gemini adaptor to support streaming content generation (#1382) 2024-04-27 15:39:59 +08:00
JustSong
007906216d feat: support DeepL's model (close #1126) 2024-04-27 13:37:22 +08:00
JustSong
e64e7707a0 feat: support cohere's web search 2024-04-27 00:06:43 +08:00
JustSong
ea210b6ed7 chore: update ollama models 2024-04-26 23:12:39 +08:00
JustSong
9026ec7510 feat: support cloudflare now 2024-04-26 23:05:48 +08:00
JustSong
c317872097 feat: support deepseek now 2024-04-26 00:48:53 +08:00
JustSong
da0842272c fix: add model to response (close #1362) 2024-04-24 22:19:58 +08:00
JustSong
0a650b85b4 chore: update berry 2024-04-24 22:08:47 +08:00
Ghostz
24f026d18e feat: add cohere support (#1355)
* support cohere

* chore: tiny improvements

---------

Co-authored-by: JustSong <songquanpeng@foxmail.com>
2024-04-24 21:50:01 +08:00
tylinux
cb33e8aad5 fix: fix default theme blank screen when edit channel again (#1363)
* fix: throw exception after submit channel edit

* fix: replace with destructuring assignment
2024-04-24 21:29:48 +08:00
Wei Tingjiang
779b747e9e feat: add function and tools support for Gemini (#1358)
* Update model.go

* Support Gemini tool_calls.

* Fix gemini tool calls (also keep support functions).

* Fixed the problem of arguments not being stringified.

Fix panic: candidate.Content.Parts out of range
2024-04-24 21:26:45 +08:00
JustSong
3d149fedf4 chore: do not hardcode context key 2024-04-21 19:43:23 +08:00
JustSong
83517f687c chore: move config key to package ctxkey 2024-04-21 18:55:25 +08:00
JustSong
e30ebda0fe chore: move config key to package ctxkey 2024-04-21 18:55:13 +08:00
JustSong
d87c55f542 chore: render unknown channel type 2024-04-21 18:54:35 +08:00
JustSong
e5b3e37c46 feat: support bot prefix for coze 2024-04-21 18:04:56 +08:00
JustSong
8de489cf06 feat: support coze now 2024-04-21 17:59:57 +08:00
JustSong
d14e4aa01b fix: key is wrongly updated 2024-04-21 17:38:39 +08:00
JustSong
541182102e fix: ignore empty choice response for azure (close #1324) 2024-04-21 16:22:28 +08:00
JustSong
b2679cca65 fix: fix preview completion ratio (close #1326) 2024-04-21 15:57:01 +08:00
90 changed files with 2097 additions and 374 deletions

View File

@@ -82,6 +82,12 @@ _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用
+ [x] [Ollama](https://github.com/ollama/ollama) + [x] [Ollama](https://github.com/ollama/ollama)
+ [x] [零一万物](https://platform.lingyiwanwu.com/) + [x] [零一万物](https://platform.lingyiwanwu.com/)
+ [x] [阶跃星辰](https://platform.stepfun.com/) + [x] [阶跃星辰](https://platform.stepfun.com/)
+ [x] [Coze](https://www.coze.com/)
+ [x] [Cohere](https://cohere.com/)
+ [x] [DeepSeek](https://www.deepseek.com/)
+ [x] [Cloudflare Workers AI](https://developers.cloudflare.com/workers-ai/)
+ [x] [DeepL](https://www.deepl.com/)
+ [x] [together.ai](https://www.together.ai/)
2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。 2. 支持配置镜像以及众多[第三方代理服务](https://iamazing.cn/page/openai-api-third-party-services)。
3. 支持通过**负载均衡**的方式访问多个渠道。 3. 支持通过**负载均衡**的方式访问多个渠道。
4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。 4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。

View File

@@ -1,12 +0,0 @@
package config
const (
KeyPrefix = "cfg_"
KeyAPIVersion = KeyPrefix + "api_version"
KeyLibraryID = KeyPrefix + "library_id"
KeyPlugin = KeyPrefix + "plugin"
KeySK = KeyPrefix + "sk"
KeyAK = KeyPrefix + "ak"
KeyRegion = KeyPrefix + "region"
)

View File

@@ -1,7 +1,22 @@
package ctxkey package ctxkey
var ( const (
RequestModel = "request_model" Config = "config"
ConvertedRequest = "converted_request" Id = "id"
OriginalModel = "original_model" Username = "username"
Role = "role"
Status = "status"
Channel = "channel"
ChannelId = "channel_id"
SpecificChannelId = "specific_channel_id"
RequestModel = "request_model"
ConvertedRequest = "converted_request"
OriginalModel = "original_model"
Group = "group"
ModelMapping = "model_mapping"
ChannelName = "channel_name"
TokenId = "token_id"
TokenName = "token_name"
BaseURL = "base_url"
AvailableModels = "available_models"
) )

View File

@@ -2,6 +2,7 @@ package helper
import ( import (
"fmt" "fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/random" "github.com/songquanpeng/one-api/common/random"
"html/template" "html/template"
"log" "log"
@@ -105,6 +106,11 @@ func GenRequestID() string {
return GetTimeString() + random.GetRandomNumberString(8) return GetTimeString() + random.GetRandomNumberString(8)
} }
func GetResponseID(c *gin.Context) string {
logID := c.GetString(RequestIdKey)
return fmt.Sprintf("chatcmpl-%s", logID)
}
func Max(a int, b int) int { func Max(a int, b int) int {
if a >= b { if a >= b {
return a return a

5
common/helper/key.go Normal file
View File

@@ -0,0 +1,5 @@
package helper
const (
RequestIdKey = "X-Oneapi-Request-Id"
)

View File

@@ -1,7 +1,3 @@
package logger package logger
const (
RequestIdKey = "X-Oneapi-Request-Id"
)
var LogDir string var LogDir string

View File

@@ -87,7 +87,7 @@ func logHelper(ctx context.Context, level string, msg string) {
if level == loggerINFO { if level == loggerINFO {
writer = gin.DefaultWriter writer = gin.DefaultWriter
} }
id := ctx.Value(RequestIdKey) id := ctx.Value(helper.RequestIdKey)
if id == nil { if id == nil {
id = helper.GenRequestID() id = helper.GenRequestID()
} }

View File

@@ -6,6 +6,7 @@ import (
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/controller" "github.com/songquanpeng/one-api/controller"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
"net/http" "net/http"
@@ -136,7 +137,7 @@ func WeChatBind(c *gin.Context) {
}) })
return return
} }
id := c.GetInt("id") id := c.GetInt(ctxkey.Id)
user := model.User{ user := model.User{
Id: id, Id: id,
} }

View File

@@ -3,6 +3,7 @@ package controller
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
relaymodel "github.com/songquanpeng/one-api/relay/model" relaymodel "github.com/songquanpeng/one-api/relay/model"
) )
@@ -14,13 +15,13 @@ func GetSubscription(c *gin.Context) {
var token *model.Token var token *model.Token
var expiredTime int64 var expiredTime int64
if config.DisplayTokenStatEnabled { if config.DisplayTokenStatEnabled {
tokenId := c.GetInt("token_id") tokenId := c.GetInt(ctxkey.TokenId)
token, err = model.GetTokenById(tokenId) token, err = model.GetTokenById(tokenId)
expiredTime = token.ExpiredTime expiredTime = token.ExpiredTime
remainQuota = token.RemainQuota remainQuota = token.RemainQuota
usedQuota = token.UsedQuota usedQuota = token.UsedQuota
} else { } else {
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
remainQuota, err = model.GetUserQuota(userId) remainQuota, err = model.GetUserQuota(userId)
if err != nil { if err != nil {
usedQuota, err = model.GetUserUsedQuota(userId) usedQuota, err = model.GetUserUsedQuota(userId)
@@ -64,11 +65,11 @@ func GetUsage(c *gin.Context) {
var err error var err error
var token *model.Token var token *model.Token
if config.DisplayTokenStatEnabled { if config.DisplayTokenStatEnabled {
tokenId := c.GetInt("token_id") tokenId := c.GetInt(ctxkey.TokenId)
token, err = model.GetTokenById(tokenId) token, err = model.GetTokenById(tokenId)
quota = token.UsedQuota quota = token.UsedQuota
} else { } else {
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
quota, err = model.GetUserUsedQuota(userId) quota, err = model.GetUserUsedQuota(userId)
} }
if err != nil { if err != nil {

View File

@@ -5,7 +5,17 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"io"
"net/http"
"net/http/httptest"
"net/url"
"strconv"
"strings"
"sync"
"time"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/common/message" "github.com/songquanpeng/one-api/common/message"
"github.com/songquanpeng/one-api/middleware" "github.com/songquanpeng/one-api/middleware"
@@ -17,14 +27,6 @@ import (
"github.com/songquanpeng/one-api/relay/meta" "github.com/songquanpeng/one-api/relay/meta"
relaymodel "github.com/songquanpeng/one-api/relay/model" relaymodel "github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode" "github.com/songquanpeng/one-api/relay/relaymode"
"io"
"net/http"
"net/http/httptest"
"net/url"
"strconv"
"strings"
"sync"
"time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
) )
@@ -54,8 +56,10 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
} }
c.Request.Header.Set("Authorization", "Bearer "+channel.Key) c.Request.Header.Set("Authorization", "Bearer "+channel.Key)
c.Request.Header.Set("Content-Type", "application/json") c.Request.Header.Set("Content-Type", "application/json")
c.Set("channel", channel.Type) c.Set(ctxkey.Channel, channel.Type)
c.Set("base_url", channel.GetBaseURL()) c.Set(ctxkey.BaseURL, channel.GetBaseURL())
cfg, _ := channel.LoadConfig()
c.Set(ctxkey.Config, cfg)
middleware.SetupContextForSelectedChannel(c, channel, "") middleware.SetupContextForSelectedChannel(c, channel, "")
meta := meta.GetByContext(c) meta := meta.GetByContext(c)
apiType := channeltype.ToAPIType(channel.Type) apiType := channeltype.ToAPIType(channel.Type)
@@ -64,12 +68,20 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
} }
adaptor.Init(meta) adaptor.Init(meta)
modelName := adaptor.GetModelList()[0] var modelName string
if !strings.Contains(channel.Models, modelName) { modelList := adaptor.GetModelList()
modelMap := channel.GetModelMapping()
if len(modelList) != 0 {
modelName = modelList[0]
}
if modelName == "" || !strings.Contains(channel.Models, modelName) {
modelNames := strings.Split(channel.Models, ",") modelNames := strings.Split(channel.Models, ",")
if len(modelNames) > 0 { if len(modelNames) > 0 {
modelName = modelNames[0] modelName = modelNames[0]
} }
if modelMap != nil && modelMap[modelName] != "" {
modelName = modelMap[modelName]
}
} }
request := buildTestRequest() request := buildTestRequest()
request.Model = modelName request.Model = modelName
@@ -82,6 +94,7 @@ func testChannel(channel *model.Channel) (err error, openaiErr *relaymodel.Error
if err != nil { if err != nil {
return err, nil return err, nil
} }
logger.SysLog(string(jsonData))
requestBody := bytes.NewBuffer(jsonData) requestBody := bytes.NewBuffer(jsonData)
c.Request.Body = io.NopCloser(requestBody) c.Request.Body = io.NopCloser(requestBody)
resp, err := adaptor.DoRequest(c, meta, requestBody) resp, err := adaptor.DoRequest(c, meta, requestBody)

View File

@@ -3,6 +3,7 @@ package controller
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
"net/http" "net/http"
"strconv" "strconv"
@@ -41,7 +42,7 @@ func GetUserLogs(c *gin.Context) {
if p < 0 { if p < 0 {
p = 0 p = 0
} }
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
logType, _ := strconv.Atoi(c.Query("type")) logType, _ := strconv.Atoi(c.Query("type"))
startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64) startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64)
endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)
@@ -83,7 +84,7 @@ func SearchAllLogs(c *gin.Context) {
func SearchUserLogs(c *gin.Context) { func SearchUserLogs(c *gin.Context) {
keyword := c.Query("keyword") keyword := c.Query("keyword")
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
logs, err := model.SearchUserLogs(userId, keyword) logs, err := model.SearchUserLogs(userId, keyword)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
@@ -122,7 +123,7 @@ func GetLogsStat(c *gin.Context) {
} }
func GetLogsSelfStat(c *gin.Context) { func GetLogsSelfStat(c *gin.Context) {
username := c.GetString("username") username := c.GetString(ctxkey.Username)
logType, _ := strconv.Atoi(c.Query("type")) logType, _ := strconv.Atoi(c.Query("type"))
startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64) startTimestamp, _ := strconv.ParseInt(c.Query("start_timestamp"), 10, 64)
endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)

View File

@@ -3,6 +3,7 @@ package controller
import ( import (
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
relay "github.com/songquanpeng/one-api/relay" relay "github.com/songquanpeng/one-api/relay"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
@@ -131,10 +132,10 @@ func ListAllModels(c *gin.Context) {
func ListModels(c *gin.Context) { func ListModels(c *gin.Context) {
ctx := c.Request.Context() ctx := c.Request.Context()
var availableModels []string var availableModels []string
if c.GetString("available_models") != "" { if c.GetString(ctxkey.AvailableModels) != "" {
availableModels = strings.Split(c.GetString("available_models"), ",") availableModels = strings.Split(c.GetString(ctxkey.AvailableModels), ",")
} else { } else {
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
userGroup, _ := model.CacheGetUserGroup(userId) userGroup, _ := model.CacheGetUserGroup(userId)
availableModels, _ = model.CacheGetGroupModels(ctx, userGroup) availableModels, _ = model.CacheGetGroupModels(ctx, userGroup)
} }
@@ -186,7 +187,7 @@ func RetrieveModel(c *gin.Context) {
func GetUserAvailableModels(c *gin.Context) { func GetUserAvailableModels(c *gin.Context) {
ctx := c.Request.Context() ctx := c.Request.Context()
id := c.GetInt("id") id := c.GetInt(ctxkey.Id)
userGroup, err := model.CacheGetUserGroup(id) userGroup, err := model.CacheGetUserGroup(id)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{

View File

@@ -3,6 +3,7 @@ package controller
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/random" "github.com/songquanpeng/one-api/common/random"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
@@ -109,7 +110,7 @@ func AddRedemption(c *gin.Context) {
for i := 0; i < redemption.Count; i++ { for i := 0; i < redemption.Count; i++ {
key := random.GetUUID() key := random.GetUUID()
cleanRedemption := model.Redemption{ cleanRedemption := model.Redemption{
UserId: c.GetInt("id"), UserId: c.GetInt(ctxkey.Id),
Name: redemption.Name, Name: redemption.Name,
Key: key, Key: key,
CreatedTime: helper.GetTimestamp(), CreatedTime: helper.GetTimestamp(),

View File

@@ -46,18 +46,18 @@ func Relay(c *gin.Context) {
requestBody, _ := common.GetRequestBody(c) requestBody, _ := common.GetRequestBody(c)
logger.Debugf(ctx, "request body: %s", string(requestBody)) logger.Debugf(ctx, "request body: %s", string(requestBody))
} }
channelId := c.GetInt("channel_id") channelId := c.GetInt(ctxkey.ChannelId)
bizErr := relayHelper(c, relayMode) bizErr := relayHelper(c, relayMode)
if bizErr == nil { if bizErr == nil {
monitor.Emit(channelId, true) monitor.Emit(channelId, true)
return return
} }
lastFailedChannelId := channelId lastFailedChannelId := channelId
channelName := c.GetString("channel_name") channelName := c.GetString(ctxkey.ChannelName)
group := c.GetString("group") group := c.GetString(ctxkey.Group)
originalModel := c.GetString(ctxkey.OriginalModel) originalModel := c.GetString(ctxkey.OriginalModel)
go processChannelRelayError(ctx, channelId, channelName, bizErr) go processChannelRelayError(ctx, channelId, channelName, bizErr)
requestId := c.GetString(logger.RequestIdKey) requestId := c.GetString(helper.RequestIdKey)
retryTimes := config.RetryTimes retryTimes := config.RetryTimes
if !shouldRetry(c, bizErr.StatusCode) { if !shouldRetry(c, bizErr.StatusCode) {
logger.Errorf(ctx, "relay error happen, status code is %d, won't retry in this case", bizErr.StatusCode) logger.Errorf(ctx, "relay error happen, status code is %d, won't retry in this case", bizErr.StatusCode)
@@ -80,9 +80,9 @@ func Relay(c *gin.Context) {
if bizErr == nil { if bizErr == nil {
return return
} }
channelId := c.GetInt("channel_id") channelId := c.GetInt(ctxkey.ChannelId)
lastFailedChannelId = channelId lastFailedChannelId = channelId
channelName := c.GetString("channel_name") channelName := c.GetString(ctxkey.ChannelName)
go processChannelRelayError(ctx, channelId, channelName, bizErr) go processChannelRelayError(ctx, channelId, channelName, bizErr)
} }
if bizErr != nil { if bizErr != nil {
@@ -97,7 +97,7 @@ func Relay(c *gin.Context) {
} }
func shouldRetry(c *gin.Context, statusCode int) bool { func shouldRetry(c *gin.Context, statusCode int) bool {
if _, ok := c.Get("specific_channel_id"); ok { if _, ok := c.Get(ctxkey.SpecificChannelId); ok {
return false return false
} }
if statusCode == http.StatusTooManyRequests { if statusCode == http.StatusTooManyRequests {

View File

@@ -4,6 +4,7 @@ import (
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/network" "github.com/songquanpeng/one-api/common/network"
"github.com/songquanpeng/one-api/common/random" "github.com/songquanpeng/one-api/common/random"
@@ -13,7 +14,7 @@ import (
) )
func GetAllTokens(c *gin.Context) { func GetAllTokens(c *gin.Context) {
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
p, _ := strconv.Atoi(c.Query("p")) p, _ := strconv.Atoi(c.Query("p"))
if p < 0 { if p < 0 {
p = 0 p = 0
@@ -38,7 +39,7 @@ func GetAllTokens(c *gin.Context) {
} }
func SearchTokens(c *gin.Context) { func SearchTokens(c *gin.Context) {
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
keyword := c.Query("keyword") keyword := c.Query("keyword")
tokens, err := model.SearchUserTokens(userId, keyword) tokens, err := model.SearchUserTokens(userId, keyword)
if err != nil { if err != nil {
@@ -58,7 +59,7 @@ func SearchTokens(c *gin.Context) {
func GetToken(c *gin.Context) { func GetToken(c *gin.Context) {
id, err := strconv.Atoi(c.Param("id")) id, err := strconv.Atoi(c.Param("id"))
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"success": false, "success": false,
@@ -83,8 +84,8 @@ func GetToken(c *gin.Context) {
} }
func GetTokenStatus(c *gin.Context) { func GetTokenStatus(c *gin.Context) {
tokenId := c.GetInt("token_id") tokenId := c.GetInt(ctxkey.TokenId)
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
token, err := model.GetTokenByIds(tokenId, userId) token, err := model.GetTokenByIds(tokenId, userId)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
@@ -139,7 +140,7 @@ func AddToken(c *gin.Context) {
} }
cleanToken := model.Token{ cleanToken := model.Token{
UserId: c.GetInt("id"), UserId: c.GetInt(ctxkey.Id),
Name: token.Name, Name: token.Name,
Key: random.GenerateKey(), Key: random.GenerateKey(),
CreatedTime: helper.GetTimestamp(), CreatedTime: helper.GetTimestamp(),
@@ -168,7 +169,7 @@ func AddToken(c *gin.Context) {
func DeleteToken(c *gin.Context) { func DeleteToken(c *gin.Context) {
id, _ := strconv.Atoi(c.Param("id")) id, _ := strconv.Atoi(c.Param("id"))
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
err := model.DeleteTokenById(id, userId) err := model.DeleteTokenById(id, userId)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
@@ -185,7 +186,7 @@ func DeleteToken(c *gin.Context) {
} }
func UpdateToken(c *gin.Context) { func UpdateToken(c *gin.Context) {
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
statusOnly := c.Query("status_only") statusOnly := c.Query("status_only")
token := model.Token{} token := model.Token{}
err := c.ShouldBindJSON(&token) err := c.ShouldBindJSON(&token)

View File

@@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/random" "github.com/songquanpeng/one-api/common/random"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
"net/http" "net/http"
@@ -238,7 +239,7 @@ func GetUser(c *gin.Context) {
}) })
return return
} }
myRole := c.GetInt("role") myRole := c.GetInt(ctxkey.Role)
if myRole <= user.Role && myRole != model.RoleRootUser { if myRole <= user.Role && myRole != model.RoleRootUser {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"success": false, "success": false,
@@ -255,7 +256,7 @@ func GetUser(c *gin.Context) {
} }
func GetUserDashboard(c *gin.Context) { func GetUserDashboard(c *gin.Context) {
id := c.GetInt("id") id := c.GetInt(ctxkey.Id)
now := time.Now() now := time.Now()
startOfDay := now.Truncate(24*time.Hour).AddDate(0, 0, -6).Unix() startOfDay := now.Truncate(24*time.Hour).AddDate(0, 0, -6).Unix()
endOfDay := now.Truncate(24 * time.Hour).Add(24*time.Hour - time.Second).Unix() endOfDay := now.Truncate(24 * time.Hour).Add(24*time.Hour - time.Second).Unix()
@@ -278,7 +279,7 @@ func GetUserDashboard(c *gin.Context) {
} }
func GenerateAccessToken(c *gin.Context) { func GenerateAccessToken(c *gin.Context) {
id := c.GetInt("id") id := c.GetInt(ctxkey.Id)
user, err := model.GetUserById(id, true) user, err := model.GetUserById(id, true)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
@@ -314,7 +315,7 @@ func GenerateAccessToken(c *gin.Context) {
} }
func GetAffCode(c *gin.Context) { func GetAffCode(c *gin.Context) {
id := c.GetInt("id") id := c.GetInt(ctxkey.Id)
user, err := model.GetUserById(id, true) user, err := model.GetUserById(id, true)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
@@ -342,7 +343,7 @@ func GetAffCode(c *gin.Context) {
} }
func GetSelf(c *gin.Context) { func GetSelf(c *gin.Context) {
id := c.GetInt("id") id := c.GetInt(ctxkey.Id)
user, err := model.GetUserById(id, false) user, err := model.GetUserById(id, false)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
@@ -387,7 +388,7 @@ func UpdateUser(c *gin.Context) {
}) })
return return
} }
myRole := c.GetInt("role") myRole := c.GetInt(ctxkey.Role)
if myRole <= originUser.Role && myRole != model.RoleRootUser { if myRole <= originUser.Role && myRole != model.RoleRootUser {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"success": false, "success": false,
@@ -445,7 +446,7 @@ func UpdateSelf(c *gin.Context) {
} }
cleanUser := model.User{ cleanUser := model.User{
Id: c.GetInt("id"), Id: c.GetInt(ctxkey.Id),
Username: user.Username, Username: user.Username,
Password: user.Password, Password: user.Password,
DisplayName: user.DisplayName, DisplayName: user.DisplayName,

48
go.mod
View File

@@ -4,42 +4,42 @@ module github.com/songquanpeng/one-api
go 1.20 go 1.20
require ( require (
github.com/aws/aws-sdk-go-v2 v1.26.1 github.com/aws/aws-sdk-go-v2 v1.27.0
github.com/aws/aws-sdk-go-v2/credentials v1.17.11 github.com/aws/aws-sdk-go-v2/credentials v1.17.15
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4 github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.8.3
github.com/gin-contrib/cors v1.7.1 github.com/gin-contrib/cors v1.7.2
github.com/gin-contrib/gzip v1.0.0 github.com/gin-contrib/gzip v1.0.1
github.com/gin-contrib/sessions v1.0.0 github.com/gin-contrib/sessions v1.0.1
github.com/gin-contrib/static v1.1.1 github.com/gin-contrib/static v1.1.2
github.com/gin-gonic/gin v1.9.1 github.com/gin-gonic/gin v1.10.0
github.com/go-playground/validator/v10 v10.19.0 github.com/go-playground/validator/v10 v10.20.0
github.com/go-redis/redis/v8 v8.11.5 github.com/go-redis/redis/v8 v8.11.5
github.com/golang-jwt/jwt v3.2.2+incompatible github.com/golang-jwt/jwt v3.2.2+incompatible
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
github.com/gorilla/websocket v1.5.1 github.com/gorilla/websocket v1.5.1
github.com/jinzhu/copier v0.4.0 github.com/jinzhu/copier v0.4.0
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/pkoukk/tiktoken-go v0.1.6 github.com/pkoukk/tiktoken-go v0.1.7
github.com/smartystreets/goconvey v1.8.1 github.com/smartystreets/goconvey v1.8.1
github.com/stretchr/testify v1.9.0 github.com/stretchr/testify v1.9.0
golang.org/x/crypto v0.22.0 golang.org/x/crypto v0.23.0
golang.org/x/image v0.15.0 golang.org/x/image v0.16.0
gorm.io/driver/mysql v1.5.6 gorm.io/driver/mysql v1.5.6
gorm.io/driver/postgres v1.5.7 gorm.io/driver/postgres v1.5.7
gorm.io/driver/sqlite v1.5.5 gorm.io/driver/sqlite v1.5.5
gorm.io/gorm v1.25.9 gorm.io/gorm v1.25.10
) )
require ( require (
filippo.io/edwards25519 v1.1.0 // indirect filippo.io/edwards25519 v1.1.0 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.7 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.7 // indirect
github.com/aws/smithy-go v1.20.2 // indirect github.com/aws/smithy-go v1.20.2 // indirect
github.com/bytedance/sonic v1.11.5 // indirect github.com/bytedance/sonic v1.11.6 // indirect
github.com/bytedance/sonic/loader v0.1.1 // indirect github.com/bytedance/sonic/loader v0.1.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.3 // indirect github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/iasm v0.2.0 // indirect github.com/cloudwego/iasm v0.2.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
@@ -50,7 +50,7 @@ require (
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-sql-driver/mysql v1.8.1 // indirect github.com/go-sql-driver/mysql v1.8.1 // indirect
github.com/goccy/go-json v0.10.2 // indirect github.com/goccy/go-json v0.10.3 // indirect
github.com/gopherjs/gopherjs v1.17.2 // indirect github.com/gopherjs/gopherjs v1.17.2 // indirect
github.com/gorilla/context v1.1.2 // indirect github.com/gorilla/context v1.1.2 // indirect
github.com/gorilla/securecookie v1.1.2 // indirect github.com/gorilla/securecookie v1.1.2 // indirect
@@ -70,16 +70,16 @@ require (
github.com/mattn/go-sqlite3 v2.0.3+incompatible // indirect github.com/mattn/go-sqlite3 v2.0.3+incompatible // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.2.1 // indirect github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/smarty/assertions v1.15.0 // indirect github.com/smarty/assertions v1.15.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.12 // indirect github.com/ugorji/go/codec v1.2.12 // indirect
golang.org/x/arch v0.7.0 // indirect golang.org/x/arch v0.8.0 // indirect
golang.org/x/net v0.24.0 // indirect golang.org/x/net v0.25.0 // indirect
golang.org/x/sync v0.7.0 // indirect golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.19.0 // indirect golang.org/x/sys v0.20.0 // indirect
golang.org/x/text v0.14.0 // indirect golang.org/x/text v0.15.0 // indirect
google.golang.org/protobuf v1.33.0 // indirect google.golang.org/protobuf v1.34.1 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

48
go.sum
View File

@@ -2,20 +2,32 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA=
github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
github.com/aws/aws-sdk-go-v2 v1.27.0 h1:7bZWKoXhzI+mMR/HjdMx8ZCC5+6fY0lS5tr0bbgiLlo=
github.com/aws/aws-sdk-go-v2 v1.27.0/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 h1:x6xsQXGSmW6frevwDA+vi/wqhp1ct18mVXYN08/93to= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 h1:x6xsQXGSmW6frevwDA+vi/wqhp1ct18mVXYN08/93to=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2/go.mod h1:lPprDr1e6cJdyYeGXnRaJoP4Md+cDBvi2eOj00BlGmg= github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2/go.mod h1:lPprDr1e6cJdyYeGXnRaJoP4Md+cDBvi2eOj00BlGmg=
github.com/aws/aws-sdk-go-v2/credentials v1.17.11 h1:YuIB1dJNf1Re822rriUOTxopaHHvIq0l/pX3fwO+Tzs= github.com/aws/aws-sdk-go-v2/credentials v1.17.11 h1:YuIB1dJNf1Re822rriUOTxopaHHvIq0l/pX3fwO+Tzs=
github.com/aws/aws-sdk-go-v2/credentials v1.17.11/go.mod h1:AQtFPsDH9bI2O+71anW6EKL+NcD7LG3dpKGMV4SShgo= github.com/aws/aws-sdk-go-v2/credentials v1.17.11/go.mod h1:AQtFPsDH9bI2O+71anW6EKL+NcD7LG3dpKGMV4SShgo=
github.com/aws/aws-sdk-go-v2/credentials v1.17.15 h1:YDexlvDRCA8ems2T5IP1xkMtOZ1uLJOCJdTr0igs5zo=
github.com/aws/aws-sdk-go-v2/credentials v1.17.15/go.mod h1:vxHggqW6hFNaeNC0WyXS3VdyjcV0a4KMUY4dKJ96buU=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 h1:aw39xVGeRWlWx9EzGVnhOR4yOjQDHPQ6o6NmBlscyQg=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5/go.mod h1:FSaRudD0dXiMPK2UjknVwwTYyZMRsHv3TtkabsZih5I=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.7 h1:lf/8VTF2cM+N4SLzaYJERKEWAXq8MOMpZfU6wEPWsPk=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.7/go.mod h1:4SjkU7QiqK2M9oozyMzfZ/23LmUY+h3oFqhdeP5OMiI=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 h1:PG1F3OD1szkuQPzDw3CIQsRIrtTlUC3lP84taWzHlq0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5/go.mod h1:jU1li6RFryMz+so64PpKtudI+QzbKoIEivqdf6LNpOc=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.7 h1:4OYVp0705xu8yjdyoWix0r9wPIRXnIzzOoUpQVHIJ/g=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.7/go.mod h1:vd7ESTEvI76T2Na050gODNmNU7+OyKrIKroYTu4ABiI=
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4 h1:JgHnonzbnA3pbqj76wYsSZIZZQYBxkmMEjvL6GHy8XU= github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4 h1:JgHnonzbnA3pbqj76wYsSZIZZQYBxkmMEjvL6GHy8XU=
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4/go.mod h1:nZspkhg+9p8iApLFoyAqfyuMP0F38acy2Hm3r5r95Cg= github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4/go.mod h1:nZspkhg+9p8iApLFoyAqfyuMP0F38acy2Hm3r5r95Cg=
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.8.3 h1:Fihjyd6DeNjcawBEGLH9dkIEUi6AdhucDKPE9nJ4QiY=
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.8.3/go.mod h1:opvUj3ismqSCxYc+m4WIjPL0ewZGtvp0ess7cKvBPOQ=
github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q= github.com/aws/smithy-go v1.20.2 h1:tbp628ireGtzcHDDmLT/6ADHidqnwgF57XOXZe6tp4Q=
github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/aws/smithy-go v1.20.2/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k= github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k=
github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw= github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw=
github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY= github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY=
github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
@@ -23,6 +35,8 @@ github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UF
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg= github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg=
github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8= github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8=
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
@@ -39,16 +53,26 @@ github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uq
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gin-contrib/cors v1.7.1 h1:s9SIppU/rk8enVvkzwiC2VK3UZ/0NNGsWfUKvV55rqs= github.com/gin-contrib/cors v1.7.1 h1:s9SIppU/rk8enVvkzwiC2VK3UZ/0NNGsWfUKvV55rqs=
github.com/gin-contrib/cors v1.7.1/go.mod h1:n/Zj7B4xyrgk/cX1WCX2dkzFfaNm/xJb6oIUk7WTtps= github.com/gin-contrib/cors v1.7.1/go.mod h1:n/Zj7B4xyrgk/cX1WCX2dkzFfaNm/xJb6oIUk7WTtps=
github.com/gin-contrib/cors v1.7.2 h1:oLDHxdg8W/XDoN/8zamqk/Drgt4oVZDvaV0YmvVICQw=
github.com/gin-contrib/cors v1.7.2/go.mod h1:SUJVARKgQ40dmrzgXEVxj2m7Ig1v1qIboQkPDTQ9t2E=
github.com/gin-contrib/gzip v1.0.0 h1:UKN586Po/92IDX6ie5CWLgMI81obiIp5nSP85T3wlTk= github.com/gin-contrib/gzip v1.0.0 h1:UKN586Po/92IDX6ie5CWLgMI81obiIp5nSP85T3wlTk=
github.com/gin-contrib/gzip v1.0.0/go.mod h1:CtG7tQrPB3vIBo6Gat9FVUsis+1emjvQqd66ME5TdnE= github.com/gin-contrib/gzip v1.0.0/go.mod h1:CtG7tQrPB3vIBo6Gat9FVUsis+1emjvQqd66ME5TdnE=
github.com/gin-contrib/gzip v1.0.1 h1:HQ8ENHODeLY7a4g1Au/46Z92bdGFl74OhxcZble9WJE=
github.com/gin-contrib/gzip v1.0.1/go.mod h1:njt428fdUNRvjuJf16tZMYZ2Yl+WQB53X5wmhDwXvC4=
github.com/gin-contrib/sessions v1.0.0 h1:r5GLta4Oy5xo9rAwMHx8B4wLpeRGHMdz9NafzJAdP8Y= github.com/gin-contrib/sessions v1.0.0 h1:r5GLta4Oy5xo9rAwMHx8B4wLpeRGHMdz9NafzJAdP8Y=
github.com/gin-contrib/sessions v1.0.0/go.mod h1:DN0f4bvpqMQElDdi+gNGScrP2QEI04IErRyMFyorUOI= github.com/gin-contrib/sessions v1.0.0/go.mod h1:DN0f4bvpqMQElDdi+gNGScrP2QEI04IErRyMFyorUOI=
github.com/gin-contrib/sessions v1.0.1 h1:3hsJyNs7v7N8OtelFmYXFrulAf6zSR7nW/putcPEHxI=
github.com/gin-contrib/sessions v1.0.1/go.mod h1:ouxSFM24/OgIud5MJYQJLpy6AwxQ5EYO9yLhbtObGkM=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-contrib/static v1.1.1 h1:XEvBd4DDLG1HBlyPBQU1XO8NlTpw6mgdqcPteetYA5k= github.com/gin-contrib/static v1.1.1 h1:XEvBd4DDLG1HBlyPBQU1XO8NlTpw6mgdqcPteetYA5k=
github.com/gin-contrib/static v1.1.1/go.mod h1:yRGmar7+JYvbMLRPIi4H5TVVSBwULfT9vetnVD0IO74= github.com/gin-contrib/static v1.1.1/go.mod h1:yRGmar7+JYvbMLRPIi4H5TVVSBwULfT9vetnVD0IO74=
github.com/gin-contrib/static v1.1.2 h1:c3kT4bFkUJn2aoRU3s6XnMjJT8J6nNWJkR0NglqmlZ4=
github.com/gin-contrib/static v1.1.2/go.mod h1:Fw90ozjHCmZBWbgrsqrDvO28YbhKEKzKp8GixhR4yLw=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
@@ -56,6 +80,8 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4=
github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI= github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo= github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI= github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
@@ -63,6 +89,8 @@ github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpv
github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
@@ -121,10 +149,14 @@ github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE= github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg= github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg=
github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkoukk/tiktoken-go v0.1.6 h1:JF0TlJzhTbrI30wCvFuiw6FzP2+/bR+FIxUdgEAcUsw= github.com/pkoukk/tiktoken-go v0.1.6 h1:JF0TlJzhTbrI30wCvFuiw6FzP2+/bR+FIxUdgEAcUsw=
github.com/pkoukk/tiktoken-go v0.1.6/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg= github.com/pkoukk/tiktoken-go v0.1.6/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg=
github.com/pkoukk/tiktoken-go v0.1.7 h1:qOBHXX4PHtvIvmOtyg1EeKlwFRiMKAcoMp4Q+bLQDmw=
github.com/pkoukk/tiktoken-go v0.1.7/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
@@ -151,23 +183,37 @@ github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZ
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc=
golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8= golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw=
golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs=
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
@@ -184,5 +230,7 @@ gorm.io/driver/sqlite v1.5.5/go.mod h1:6NgQ7sQWAIFsPrJJl1lSNSu2TABh0ZZ/zm5fosATa
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
gorm.io/gorm v1.25.9 h1:wct0gxZIELDk8+ZqF/MVnHLkA1rvYlBWUMv2EdsK1g8= gorm.io/gorm v1.25.9 h1:wct0gxZIELDk8+ZqF/MVnHLkA1rvYlBWUMv2EdsK1g8=
gorm.io/gorm v1.25.9/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= gorm.io/gorm v1.25.9/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
gorm.io/gorm v1.25.10 h1:dQpO+33KalOA+aFYGlK+EfxcI5MbO7EP2yYygwh9h+s=
gorm.io/gorm v1.25.10/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@@ -5,6 +5,7 @@ import (
"github.com/gin-contrib/sessions" "github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/blacklist" "github.com/songquanpeng/one-api/common/blacklist"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/network" "github.com/songquanpeng/one-api/common/network"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
"net/http" "net/http"
@@ -120,20 +121,20 @@ func TokenAuth() func(c *gin.Context) {
abortWithMessage(c, http.StatusBadRequest, err.Error()) abortWithMessage(c, http.StatusBadRequest, err.Error())
return return
} }
c.Set("request_model", requestModel) c.Set(ctxkey.RequestModel, requestModel)
if token.Models != nil && *token.Models != "" { if token.Models != nil && *token.Models != "" {
c.Set("available_models", *token.Models) c.Set(ctxkey.AvailableModels, *token.Models)
if requestModel != "" && !isModelInList(requestModel, *token.Models) { if requestModel != "" && !isModelInList(requestModel, *token.Models) {
abortWithMessage(c, http.StatusForbidden, fmt.Sprintf("该令牌无权使用模型:%s", requestModel)) abortWithMessage(c, http.StatusForbidden, fmt.Sprintf("该令牌无权使用模型:%s", requestModel))
return return
} }
} }
c.Set("id", token.UserId) c.Set(ctxkey.Id, token.UserId)
c.Set("token_id", token.Id) c.Set(ctxkey.TokenId, token.Id)
c.Set("token_name", token.Name) c.Set(ctxkey.TokenName, token.Name)
if len(parts) > 1 { if len(parts) > 1 {
if model.IsAdmin(token.UserId) { if model.IsAdmin(token.UserId) {
c.Set("specific_channel_id", parts[1]) c.Set(ctxkey.SpecificChannelId, parts[1])
} else { } else {
abortWithMessage(c, http.StatusForbidden, "普通用户不支持指定渠道") abortWithMessage(c, http.StatusForbidden, "普通用户不支持指定渠道")
return return

View File

@@ -3,7 +3,6 @@ package middleware
import ( import (
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey" "github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
@@ -18,12 +17,12 @@ type ModelRequest struct {
func Distribute() func(c *gin.Context) { func Distribute() func(c *gin.Context) {
return func(c *gin.Context) { return func(c *gin.Context) {
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
userGroup, _ := model.CacheGetUserGroup(userId) userGroup, _ := model.CacheGetUserGroup(userId)
c.Set("group", userGroup) c.Set(ctxkey.Group, userGroup)
var requestModel string var requestModel string
var channel *model.Channel var channel *model.Channel
channelId, ok := c.Get("specific_channel_id") channelId, ok := c.Get(ctxkey.SpecificChannelId)
if ok { if ok {
id, err := strconv.Atoi(channelId.(string)) id, err := strconv.Atoi(channelId.(string))
if err != nil { if err != nil {
@@ -40,7 +39,7 @@ func Distribute() func(c *gin.Context) {
return return
} }
} else { } else {
requestModel = c.GetString("request_model") requestModel = c.GetString(ctxkey.RequestModel)
var err error var err error
channel, err = model.CacheGetRandomSatisfiedChannel(userGroup, requestModel, false) channel, err = model.CacheGetRandomSatisfiedChannel(userGroup, requestModel, false)
if err != nil { if err != nil {
@@ -59,28 +58,36 @@ func Distribute() func(c *gin.Context) {
} }
func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, modelName string) { func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, modelName string) {
c.Set("channel", channel.Type) c.Set(ctxkey.Channel, channel.Type)
c.Set("channel_id", channel.Id) c.Set(ctxkey.ChannelId, channel.Id)
c.Set("channel_name", channel.Name) c.Set(ctxkey.ChannelName, channel.Name)
c.Set("model_mapping", channel.GetModelMapping()) c.Set(ctxkey.ModelMapping, channel.GetModelMapping())
c.Set(ctxkey.OriginalModel, modelName) // for retry c.Set(ctxkey.OriginalModel, modelName) // for retry
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key)) c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
c.Set("base_url", channel.GetBaseURL()) c.Set(ctxkey.BaseURL, channel.GetBaseURL())
cfg, _ := channel.LoadConfig()
// this is for backward compatibility // this is for backward compatibility
switch channel.Type { switch channel.Type {
case channeltype.Azure: case channeltype.Azure:
c.Set(config.KeyAPIVersion, channel.Other) if cfg.APIVersion == "" {
cfg.APIVersion = channel.Other
}
case channeltype.Xunfei: case channeltype.Xunfei:
c.Set(config.KeyAPIVersion, channel.Other) if cfg.APIVersion == "" {
cfg.APIVersion = channel.Other
}
case channeltype.Gemini: case channeltype.Gemini:
c.Set(config.KeyAPIVersion, channel.Other) if cfg.APIVersion == "" {
cfg.APIVersion = channel.Other
}
case channeltype.AIProxyLibrary: case channeltype.AIProxyLibrary:
c.Set(config.KeyLibraryID, channel.Other) if cfg.LibraryID == "" {
cfg.LibraryID = channel.Other
}
case channeltype.Ali: case channeltype.Ali:
c.Set(config.KeyPlugin, channel.Other) if cfg.Plugin == "" {
} cfg.Plugin = channel.Other
cfg, _ := channel.LoadConfig() }
for k, v := range cfg {
c.Set(config.KeyPrefix+k, v)
} }
c.Set(ctxkey.Config, cfg)
} }

View File

@@ -3,14 +3,14 @@ package middleware
import ( import (
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/helper"
) )
func SetUpLogger(server *gin.Engine) { func SetUpLogger(server *gin.Engine) {
server.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string { server.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string {
var requestID string var requestID string
if param.Keys != nil { if param.Keys != nil {
requestID = param.Keys[logger.RequestIdKey].(string) requestID = param.Keys[helper.RequestIdKey].(string)
} }
return fmt.Sprintf("[GIN] %s | %s | %3d | %13v | %15s | %7s %s\n", return fmt.Sprintf("[GIN] %s | %s | %3d | %13v | %15s | %7s %s\n",
param.TimeStamp.Format("2006/01/02 - 15:04:05"), param.TimeStamp.Format("2006/01/02 - 15:04:05"),

View File

@@ -4,16 +4,15 @@ import (
"context" "context"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/logger"
) )
func RequestId() func(c *gin.Context) { func RequestId() func(c *gin.Context) {
return func(c *gin.Context) { return func(c *gin.Context) {
id := helper.GenRequestID() id := helper.GenRequestID()
c.Set(logger.RequestIdKey, id) c.Set(helper.RequestIdKey, id)
ctx := context.WithValue(c.Request.Context(), logger.RequestIdKey, id) ctx := context.WithValue(c.Request.Context(), helper.RequestIdKey, id)
c.Request = c.Request.WithContext(ctx) c.Request = c.Request.WithContext(ctx)
c.Header(logger.RequestIdKey, id) c.Header(helper.RequestIdKey, id)
c.Next() c.Next()
} }
} }

View File

@@ -12,7 +12,7 @@ import (
func abortWithMessage(c *gin.Context, statusCode int, message string) { func abortWithMessage(c *gin.Context, statusCode int, message string) {
c.JSON(statusCode, gin.H{ c.JSON(statusCode, gin.H{
"error": gin.H{ "error": gin.H{
"message": helper.MessageWithRequestId(message, c.GetString(logger.RequestIdKey)), "message": helper.MessageWithRequestId(message, c.GetString(helper.RequestIdKey)),
"type": "one_api_error", "type": "one_api_error",
}, },
}) })

View File

@@ -38,6 +38,16 @@ type Channel struct {
Config string `json:"config"` Config string `json:"config"`
} }
type ChannelConfig struct {
Region string `json:"region,omitempty"`
SK string `json:"sk,omitempty"`
AK string `json:"ak,omitempty"`
UserID string `json:"user_id,omitempty"`
APIVersion string `json:"api_version,omitempty"`
LibraryID string `json:"library_id,omitempty"`
Plugin string `json:"plugin,omitempty"`
}
func GetAllChannels(startIdx int, num int, scope string) ([]*Channel, error) { func GetAllChannels(startIdx int, num int, scope string) ([]*Channel, error) {
var channels []*Channel var channels []*Channel
var err error var err error
@@ -161,14 +171,14 @@ func (channel *Channel) Delete() error {
return err return err
} }
func (channel *Channel) LoadConfig() (map[string]string, error) { func (channel *Channel) LoadConfig() (ChannelConfig, error) {
var cfg ChannelConfig
if channel.Config == "" { if channel.Config == "" {
return nil, nil return cfg, nil
} }
cfg := make(map[string]string)
err := json.Unmarshal([]byte(channel.Config), &cfg) err := json.Unmarshal([]byte(channel.Config), &cfg)
if err != nil { if err != nil {
return nil, err return cfg, err
} }
return cfg, nil return cfg, nil
} }

View File

@@ -7,6 +7,10 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/anthropic" "github.com/songquanpeng/one-api/relay/adaptor/anthropic"
"github.com/songquanpeng/one-api/relay/adaptor/aws" "github.com/songquanpeng/one-api/relay/adaptor/aws"
"github.com/songquanpeng/one-api/relay/adaptor/baidu" "github.com/songquanpeng/one-api/relay/adaptor/baidu"
"github.com/songquanpeng/one-api/relay/adaptor/cloudflare"
"github.com/songquanpeng/one-api/relay/adaptor/cohere"
"github.com/songquanpeng/one-api/relay/adaptor/coze"
"github.com/songquanpeng/one-api/relay/adaptor/deepl"
"github.com/songquanpeng/one-api/relay/adaptor/gemini" "github.com/songquanpeng/one-api/relay/adaptor/gemini"
"github.com/songquanpeng/one-api/relay/adaptor/ollama" "github.com/songquanpeng/one-api/relay/adaptor/ollama"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
@@ -43,6 +47,14 @@ func GetAdaptor(apiType int) adaptor.Adaptor {
return &zhipu.Adaptor{} return &zhipu.Adaptor{}
case apitype.Ollama: case apitype.Ollama:
return &ollama.Adaptor{} return &ollama.Adaptor{}
case apitype.Coze:
return &coze.Adaptor{}
case apitype.Cohere:
return &cohere.Adaptor{}
case apitype.Cloudflare:
return &cloudflare.Adaptor{}
case apitype.DeepL:
return &deepl.Adaptor{}
} }
return nil return nil
} }

View File

@@ -4,7 +4,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/relay/adaptor" "github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/meta" "github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
@@ -13,10 +12,11 @@ import (
) )
type Adaptor struct { type Adaptor struct {
meta *meta.Meta
} }
func (a *Adaptor) Init(meta *meta.Meta) { func (a *Adaptor) Init(meta *meta.Meta) {
a.meta = meta
} }
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
@@ -34,7 +34,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
return nil, errors.New("request is nil") return nil, errors.New("request is nil")
} }
aiProxyLibraryRequest := ConvertRequest(*request) aiProxyLibraryRequest := ConvertRequest(*request)
aiProxyLibraryRequest.LibraryId = c.GetString(config.KeyLibraryID) aiProxyLibraryRequest.LibraryId = a.meta.Config.LibraryID
return aiProxyLibraryRequest, nil return aiProxyLibraryRequest, nil
} }

View File

@@ -4,7 +4,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/relay/adaptor" "github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/meta" "github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
@@ -16,10 +15,11 @@ import (
// https://help.aliyun.com/zh/dashscope/developer-reference/api-details // https://help.aliyun.com/zh/dashscope/developer-reference/api-details
type Adaptor struct { type Adaptor struct {
meta *meta.Meta
} }
func (a *Adaptor) Init(meta *meta.Meta) { func (a *Adaptor) Init(meta *meta.Meta) {
a.meta = meta
} }
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
@@ -47,8 +47,8 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
if meta.Mode == relaymode.ImagesGenerations { if meta.Mode == relaymode.ImagesGenerations {
req.Header.Set("X-DashScope-Async", "enable") req.Header.Set("X-DashScope-Async", "enable")
} }
if c.GetString(config.KeyPlugin) != "" { if a.meta.Config.Plugin != "" {
req.Header.Set("X-DashScope-Plugin", c.GetString(config.KeyPlugin)) req.Header.Set("X-DashScope-Plugin", a.meta.Config.Plugin)
} }
return nil return nil
} }

View File

@@ -4,6 +4,10 @@ import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"net/http"
"strings"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
@@ -11,9 +15,6 @@ import (
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
) )
func stopReasonClaude2OpenAI(reason *string) string { func stopReasonClaude2OpenAI(reason *string) string {
@@ -176,10 +177,10 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
if len(data) < 6 { if len(data) < 6 {
continue continue
} }
if !strings.HasPrefix(data, "data: ") { if !strings.HasPrefix(data, "data:") {
continue continue
} }
data = strings.TrimPrefix(data, "data: ") data = strings.TrimPrefix(data, "data:")
dataChan <- data dataChan <- data
} }
stopChan <- true stopChan <- true
@@ -192,7 +193,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
select { select {
case data := <-dataChan: case data := <-dataChan:
// some implementations may add \r at the end of data // some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r") data = strings.TrimSpace(data)
var claudeResponse StreamResponse var claudeResponse StreamResponse
err := json.Unmarshal([]byte(data), &claudeResponse) err := json.Unmarshal([]byte(data), &claudeResponse)
if err != nil { if err != nil {

View File

@@ -1,6 +1,9 @@
package aws package aws
import ( import (
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/credentials"
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
"github.com/songquanpeng/one-api/common/ctxkey" "github.com/songquanpeng/one-api/common/ctxkey"
"io" "io"
"net/http" "net/http"
@@ -16,10 +19,16 @@ import (
var _ adaptor.Adaptor = new(Adaptor) var _ adaptor.Adaptor = new(Adaptor)
type Adaptor struct { type Adaptor struct {
meta *meta.Meta
awsClient *bedrockruntime.Client
} }
func (a *Adaptor) Init(meta *meta.Meta) { func (a *Adaptor) Init(meta *meta.Meta) {
a.meta = meta
a.awsClient = bedrockruntime.New(bedrockruntime.Options{
Region: meta.Config.Region,
Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(meta.Config.AK, meta.Config.SK, "")),
})
} }
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
@@ -54,9 +63,9 @@ func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Read
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) { func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
if meta.IsStream { if meta.IsStream {
err, usage = StreamHandler(c, resp) err, usage = StreamHandler(c, a.awsClient)
} else { } else {
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) err, usage = Handler(c, a.awsClient, meta.ActualModelName)
} }
return return
} }
@@ -65,7 +74,6 @@ func (a *Adaptor) GetModelList() (models []string) {
for n := range awsModelIDMap { for n := range awsModelIDMap {
models = append(models, n) models = append(models, n)
} }
return return
} }

View File

@@ -5,13 +5,11 @@ import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey" "github.com/songquanpeng/one-api/common/ctxkey"
"io" "io"
"net/http" "net/http"
"github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/credentials"
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime" "github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
"github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types" "github.com/aws/aws-sdk-go-v2/service/bedrockruntime/types"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
@@ -24,18 +22,6 @@ import (
relaymodel "github.com/songquanpeng/one-api/relay/model" relaymodel "github.com/songquanpeng/one-api/relay/model"
) )
func newAwsClient(c *gin.Context) (*bedrockruntime.Client, error) {
ak := c.GetString(config.KeyAK)
sk := c.GetString(config.KeySK)
region := c.GetString(config.KeyRegion)
client := bedrockruntime.New(bedrockruntime.Options{
Region: region,
Credentials: aws.NewCredentialsCache(credentials.NewStaticCredentialsProvider(ak, sk, "")),
})
return client, nil
}
func wrapErr(err error) *relaymodel.ErrorWithStatusCode { func wrapErr(err error) *relaymodel.ErrorWithStatusCode {
return &relaymodel.ErrorWithStatusCode{ return &relaymodel.ErrorWithStatusCode{
StatusCode: http.StatusInternalServerError, StatusCode: http.StatusInternalServerError,
@@ -63,12 +49,7 @@ func awsModelID(requestModel string) (string, error) {
return "", errors.Errorf("model %s not found", requestModel) return "", errors.Errorf("model %s not found", requestModel)
} }
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) { func Handler(c *gin.Context, awsCli *bedrockruntime.Client, modelName string) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
awsCli, err := newAwsClient(c)
if err != nil {
return wrapErr(errors.Wrap(err, "newAwsClient")), nil
}
awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel)) awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
if err != nil { if err != nil {
return wrapErr(errors.Wrap(err, "awsModelID")), nil return wrapErr(errors.Wrap(err, "awsModelID")), nil
@@ -121,13 +102,8 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
return nil, &usage return nil, &usage
} }
func StreamHandler(c *gin.Context, resp *http.Response) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) { func StreamHandler(c *gin.Context, awsCli *bedrockruntime.Client) (*relaymodel.ErrorWithStatusCode, *relaymodel.Usage) {
createdTime := helper.GetTimestamp() createdTime := helper.GetTimestamp()
awsCli, err := newAwsClient(c)
if err != nil {
return wrapErr(errors.Wrap(err, "newAwsClient")), nil
}
awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel)) awsModelId, err := awsModelID(c.GetString(ctxkey.RequestModel))
if err != nil { if err != nil {
return wrapErr(errors.Wrap(err, "awsModelID")), nil return wrapErr(errors.Wrap(err, "awsModelID")), nil

View File

@@ -1,15 +0,0 @@
package azure
import (
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config"
)
func GetAPIVersion(c *gin.Context) string {
query := c.Request.URL.Query()
apiVersion := query.Get("api-version")
if apiVersion == "" {
apiVersion = c.GetString(config.KeyAPIVersion)
}
return apiVersion
}

View File

@@ -0,0 +1,66 @@
package cloudflare
import (
"errors"
"fmt"
"io"
"net/http"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
)
type Adaptor struct {
meta *meta.Meta
}
// ConvertImageRequest implements adaptor.Adaptor.
func (*Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
return nil, errors.New("not implemented")
}
// ConvertImageRequest implements adaptor.Adaptor.
func (a *Adaptor) Init(meta *meta.Meta) {
a.meta = meta
}
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
return fmt.Sprintf("%s/client/v4/accounts/%s/ai/run/%s", meta.BaseURL, meta.Config.UserID, meta.ActualModelName), nil
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta)
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
return nil
}
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
return ConvertRequest(*request), nil
}
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return adaptor.DoRequestHelper(a, c, meta, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
if meta.IsStream {
err, usage = StreamHandler(c, resp, meta.PromptTokens, meta.ActualModelName)
} else {
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
}
return
}
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return "cloudflare"
}

View File

@@ -0,0 +1,36 @@
package cloudflare
var ModelList = []string{
"@cf/meta/llama-2-7b-chat-fp16",
"@cf/meta/llama-2-7b-chat-int8",
"@cf/mistral/mistral-7b-instruct-v0.1",
"@hf/thebloke/deepseek-coder-6.7b-base-awq",
"@hf/thebloke/deepseek-coder-6.7b-instruct-awq",
"@cf/deepseek-ai/deepseek-math-7b-base",
"@cf/deepseek-ai/deepseek-math-7b-instruct",
"@cf/thebloke/discolm-german-7b-v1-awq",
"@cf/tiiuae/falcon-7b-instruct",
"@cf/google/gemma-2b-it-lora",
"@hf/google/gemma-7b-it",
"@cf/google/gemma-7b-it-lora",
"@hf/nousresearch/hermes-2-pro-mistral-7b",
"@hf/thebloke/llama-2-13b-chat-awq",
"@cf/meta-llama/llama-2-7b-chat-hf-lora",
"@cf/meta/llama-3-8b-instruct",
"@hf/thebloke/llamaguard-7b-awq",
"@hf/thebloke/mistral-7b-instruct-v0.1-awq",
"@hf/mistralai/mistral-7b-instruct-v0.2",
"@cf/mistral/mistral-7b-instruct-v0.2-lora",
"@hf/thebloke/neural-chat-7b-v3-1-awq",
"@cf/openchat/openchat-3.5-0106",
"@hf/thebloke/openhermes-2.5-mistral-7b-awq",
"@cf/microsoft/phi-2",
"@cf/qwen/qwen1.5-0.5b-chat",
"@cf/qwen/qwen1.5-1.8b-chat",
"@cf/qwen/qwen1.5-14b-chat-awq",
"@cf/qwen/qwen1.5-7b-chat-awq",
"@cf/defog/sqlcoder-7b-2",
"@hf/nexusflow/starling-lm-7b-beta",
"@cf/tinyllama/tinyllama-1.1b-chat-v1.0",
"@hf/thebloke/zephyr-7b-beta-awq",
}

View File

@@ -0,0 +1,152 @@
package cloudflare
import (
"bufio"
"bytes"
"encoding/json"
"io"
"net/http"
"strings"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/model"
)
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
lastMessage := textRequest.Messages[len(textRequest.Messages)-1]
return &Request{
MaxTokens: textRequest.MaxTokens,
Prompt: lastMessage.StringContent(),
Stream: textRequest.Stream,
Temperature: textRequest.Temperature,
}
}
func ResponseCloudflare2OpenAI(cloudflareResponse *Response) *openai.TextResponse {
choice := openai.TextResponseChoice{
Index: 0,
Message: model.Message{
Role: "assistant",
Content: cloudflareResponse.Result.Response,
},
FinishReason: "stop",
}
fullTextResponse := openai.TextResponse{
Object: "chat.completion",
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
}
func StreamResponseCloudflare2OpenAI(cloudflareResponse *StreamResponse) *openai.ChatCompletionsStreamResponse {
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = cloudflareResponse.Response
choice.Delta.Role = "assistant"
openaiResponse := openai.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
Created: helper.GetTimestamp(),
}
return &openaiResponse
}
func StreamHandler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\n'); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() {
data := scanner.Text()
if len(data) < len("data: ") {
continue
}
data = strings.TrimPrefix(data, "data: ")
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
id := helper.GetResponseID(c)
responseModel := c.GetString("original_model")
var responseText string
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r")
var cloudflareResponse StreamResponse
err := json.Unmarshal([]byte(data), &cloudflareResponse)
if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
response := StreamResponseCloudflare2OpenAI(&cloudflareResponse)
if response == nil {
return true
}
responseText += cloudflareResponse.Response
response.Id = id
response.Model = responseModel
jsonStr, err := json.Marshal(response)
if err != nil {
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
}
})
_ = resp.Body.Close()
usage := openai.ResponseText2Usage(responseText, responseModel, promptTokens)
return nil, usage
}
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
var cloudflareResponse Response
err = json.Unmarshal(responseBody, &cloudflareResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
fullTextResponse := ResponseCloudflare2OpenAI(&cloudflareResponse)
fullTextResponse.Model = modelName
usage := openai.ResponseText2Usage(cloudflareResponse.Result.Response, modelName, promptTokens)
fullTextResponse.Usage = *usage
fullTextResponse.Id = helper.GetResponseID(c)
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, usage
}

View File

@@ -0,0 +1,25 @@
package cloudflare
type Request struct {
Lora string `json:"lora,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
Prompt string `json:"prompt,omitempty"`
Raw bool `json:"raw,omitempty"`
Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
}
type Result struct {
Response string `json:"response"`
}
type Response struct {
Result Result `json:"result"`
Success bool `json:"success"`
Errors []string `json:"errors"`
Messages []string `json:"messages"`
}
type StreamResponse struct {
Response string `json:"response"`
}

View File

@@ -0,0 +1,64 @@
package cohere
import (
"errors"
"fmt"
"io"
"net/http"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
)
type Adaptor struct{}
// ConvertImageRequest implements adaptor.Adaptor.
func (*Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
return nil, errors.New("not implemented")
}
// ConvertImageRequest implements adaptor.Adaptor.
func (a *Adaptor) Init(meta *meta.Meta) {
}
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
return fmt.Sprintf("%s/v1/chat", meta.BaseURL), nil
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta)
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
return nil
}
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
return ConvertRequest(*request), nil
}
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return adaptor.DoRequestHelper(a, c, meta, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
if meta.IsStream {
err, usage = StreamHandler(c, resp)
} else {
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
}
return
}
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return "Cohere"
}

View File

@@ -0,0 +1,14 @@
package cohere
var ModelList = []string{
"command", "command-nightly",
"command-light", "command-light-nightly",
"command-r", "command-r-plus",
}
func init() {
num := len(ModelList)
for i := 0; i < num; i++ {
ModelList = append(ModelList, ModelList[i]+"-internet")
}
}

View File

@@ -0,0 +1,241 @@
package cohere
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"strings"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/model"
)
var (
WebSearchConnector = Connector{ID: "web-search"}
)
func stopReasonCohere2OpenAI(reason *string) string {
if reason == nil {
return ""
}
switch *reason {
case "COMPLETE":
return "stop"
default:
return *reason
}
}
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
cohereRequest := Request{
Model: textRequest.Model,
Message: "",
MaxTokens: textRequest.MaxTokens,
Temperature: textRequest.Temperature,
P: textRequest.TopP,
K: textRequest.TopK,
Stream: textRequest.Stream,
FrequencyPenalty: textRequest.FrequencyPenalty,
PresencePenalty: textRequest.FrequencyPenalty,
Seed: int(textRequest.Seed),
}
if cohereRequest.Model == "" {
cohereRequest.Model = "command-r"
}
if strings.HasSuffix(cohereRequest.Model, "-internet") {
cohereRequest.Model = strings.TrimSuffix(cohereRequest.Model, "-internet")
cohereRequest.Connectors = append(cohereRequest.Connectors, WebSearchConnector)
}
for _, message := range textRequest.Messages {
if message.Role == "user" {
cohereRequest.Message = message.Content.(string)
} else {
var role string
if message.Role == "assistant" {
role = "CHATBOT"
} else if message.Role == "system" {
role = "SYSTEM"
} else {
role = "USER"
}
cohereRequest.ChatHistory = append(cohereRequest.ChatHistory, ChatMessage{
Role: role,
Message: message.Content.(string),
})
}
}
return &cohereRequest
}
func StreamResponseCohere2OpenAI(cohereResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
var response *Response
var responseText string
var finishReason string
switch cohereResponse.EventType {
case "stream-start":
return nil, nil
case "text-generation":
responseText += cohereResponse.Text
case "stream-end":
usage := cohereResponse.Response.Meta.Tokens
response = &Response{
Meta: Meta{
Tokens: Usage{
InputTokens: usage.InputTokens,
OutputTokens: usage.OutputTokens,
},
},
}
finishReason = *cohereResponse.Response.FinishReason
default:
return nil, nil
}
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = responseText
choice.Delta.Role = "assistant"
if finishReason != "" {
choice.FinishReason = &finishReason
}
var openaiResponse openai.ChatCompletionsStreamResponse
openaiResponse.Object = "chat.completion.chunk"
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
return &openaiResponse, response
}
func ResponseCohere2OpenAI(cohereResponse *Response) *openai.TextResponse {
choice := openai.TextResponseChoice{
Index: 0,
Message: model.Message{
Role: "assistant",
Content: cohereResponse.Text,
Name: nil,
},
FinishReason: stopReasonCohere2OpenAI(cohereResponse.FinishReason),
}
fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", cohereResponse.ResponseID),
Model: "model",
Object: "chat.completion",
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
}
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) {
createdTime := helper.GetTimestamp()
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\n'); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() {
data := scanner.Text()
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
var usage model.Usage
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r")
var cohereResponse StreamResponse
err := json.Unmarshal([]byte(data), &cohereResponse)
if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
response, meta := StreamResponseCohere2OpenAI(&cohereResponse)
if meta != nil {
usage.PromptTokens += meta.Meta.Tokens.InputTokens
usage.CompletionTokens += meta.Meta.Tokens.OutputTokens
return true
}
if response == nil {
return true
}
response.Id = fmt.Sprintf("chatcmpl-%d", createdTime)
response.Model = c.GetString("original_model")
response.Created = createdTime
jsonStr, err := json.Marshal(response)
if err != nil {
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
}
})
_ = resp.Body.Close()
return nil, &usage
}
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *model.Usage) {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
var cohereResponse Response
err = json.Unmarshal(responseBody, &cohereResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if cohereResponse.ResponseID == "" {
return &model.ErrorWithStatusCode{
Error: model.Error{
Message: cohereResponse.Message,
Type: cohereResponse.Message,
Param: "",
Code: resp.StatusCode,
},
StatusCode: resp.StatusCode,
}, nil
}
fullTextResponse := ResponseCohere2OpenAI(&cohereResponse)
fullTextResponse.Model = modelName
usage := model.Usage{
PromptTokens: cohereResponse.Meta.Tokens.InputTokens,
CompletionTokens: cohereResponse.Meta.Tokens.OutputTokens,
TotalTokens: cohereResponse.Meta.Tokens.InputTokens + cohereResponse.Meta.Tokens.OutputTokens,
}
fullTextResponse.Usage = usage
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, &usage
}

View File

@@ -0,0 +1,147 @@
package cohere
type Request struct {
Message string `json:"message" required:"true"`
Model string `json:"model,omitempty"` // 默认值为"command-r"
Stream bool `json:"stream,omitempty"` // 默认值为false
Preamble string `json:"preamble,omitempty"`
ChatHistory []ChatMessage `json:"chat_history,omitempty"`
ConversationID string `json:"conversation_id,omitempty"`
PromptTruncation string `json:"prompt_truncation,omitempty"` // 默认值为"AUTO"
Connectors []Connector `json:"connectors,omitempty"`
Documents []Document `json:"documents,omitempty"`
Temperature float64 `json:"temperature,omitempty"` // 默认值为0.3
MaxTokens int `json:"max_tokens,omitempty"`
MaxInputTokens int `json:"max_input_tokens,omitempty"`
K int `json:"k,omitempty"` // 默认值为0
P float64 `json:"p,omitempty"` // 默认值为0.75
Seed int `json:"seed,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"` // 默认值为0.0
PresencePenalty float64 `json:"presence_penalty,omitempty"` // 默认值为0.0
Tools []Tool `json:"tools,omitempty"`
ToolResults []ToolResult `json:"tool_results,omitempty"`
}
type ChatMessage struct {
Role string `json:"role" required:"true"`
Message string `json:"message" required:"true"`
}
type Tool struct {
Name string `json:"name" required:"true"`
Description string `json:"description" required:"true"`
ParameterDefinitions map[string]ParameterSpec `json:"parameter_definitions"`
}
type ParameterSpec struct {
Description string `json:"description"`
Type string `json:"type" required:"true"`
Required bool `json:"required"`
}
type ToolResult struct {
Call ToolCall `json:"call"`
Outputs []map[string]interface{} `json:"outputs"`
}
type ToolCall struct {
Name string `json:"name" required:"true"`
Parameters map[string]interface{} `json:"parameters" required:"true"`
}
type StreamResponse struct {
IsFinished bool `json:"is_finished"`
EventType string `json:"event_type"`
GenerationID string `json:"generation_id,omitempty"`
SearchQueries []*SearchQuery `json:"search_queries,omitempty"`
SearchResults []*SearchResult `json:"search_results,omitempty"`
Documents []*Document `json:"documents,omitempty"`
Text string `json:"text,omitempty"`
Citations []*Citation `json:"citations,omitempty"`
Response *Response `json:"response,omitempty"`
FinishReason string `json:"finish_reason,omitempty"`
}
type SearchQuery struct {
Text string `json:"text"`
GenerationID string `json:"generation_id"`
}
type SearchResult struct {
SearchQuery *SearchQuery `json:"search_query"`
DocumentIDs []string `json:"document_ids"`
Connector *Connector `json:"connector"`
}
type Connector struct {
ID string `json:"id"`
}
type Document struct {
ID string `json:"id"`
Snippet string `json:"snippet"`
Timestamp string `json:"timestamp"`
Title string `json:"title"`
URL string `json:"url"`
}
type Citation struct {
Start int `json:"start"`
End int `json:"end"`
Text string `json:"text"`
DocumentIDs []string `json:"document_ids"`
}
type Response struct {
ResponseID string `json:"response_id"`
Text string `json:"text"`
GenerationID string `json:"generation_id"`
ChatHistory []*Message `json:"chat_history"`
FinishReason *string `json:"finish_reason"`
Meta Meta `json:"meta"`
Citations []*Citation `json:"citations"`
Documents []*Document `json:"documents"`
SearchResults []*SearchResult `json:"search_results"`
SearchQueries []*SearchQuery `json:"search_queries"`
Message string `json:"message"`
}
type Message struct {
Role string `json:"role"`
Message string `json:"message"`
}
type Version struct {
Version string `json:"version"`
}
type Units struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
}
type ChatEntry struct {
Role string `json:"role"`
Message string `json:"message"`
}
type Meta struct {
APIVersion APIVersion `json:"api_version"`
BilledUnits BilledUnits `json:"billed_units"`
Tokens Usage `json:"tokens"`
}
type APIVersion struct {
Version string `json:"version"`
}
type BilledUnits struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
}
type Usage struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
}

View File

@@ -0,0 +1,75 @@
package coze
import (
"errors"
"fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)
type Adaptor struct {
meta *meta.Meta
}
func (a *Adaptor) Init(meta *meta.Meta) {
a.meta = meta
}
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
return fmt.Sprintf("%s/open_api/v2/chat", meta.BaseURL), nil
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta)
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
return nil
}
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
request.User = a.meta.Config.UserID
return ConvertRequest(*request), nil
}
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
return request, nil
}
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return adaptor.DoRequestHelper(a, c, meta, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
var responseText *string
if meta.IsStream {
err, responseText = StreamHandler(c, resp)
} else {
err, responseText = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
}
if responseText != nil {
usage = openai.ResponseText2Usage(*responseText, meta.ActualModelName, meta.PromptTokens)
} else {
usage = &model.Usage{}
}
usage.PromptTokens = meta.PromptTokens
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
return
}
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return "coze"
}

View File

@@ -0,0 +1,5 @@
package contenttype
const (
Text = "text"
)

View File

@@ -0,0 +1,7 @@
package event
const (
Message = "message"
Done = "done"
Error = "error"
)

View File

@@ -0,0 +1,6 @@
package messagetype
const (
Answer = "answer"
FollowUp = "follow_up"
)

View File

@@ -0,0 +1,3 @@
package coze
var ModelList = []string{}

View File

@@ -0,0 +1,10 @@
package coze
import "github.com/songquanpeng/one-api/relay/adaptor/coze/constant/event"
func event2StopReason(e *string) string {
if e == nil || *e == event.Message {
return ""
}
return "stop"
}

215
relay/adaptor/coze/main.go Normal file
View File

@@ -0,0 +1,215 @@
package coze
import (
"bufio"
"encoding/json"
"fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/conv"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
)
// https://www.coze.com/open
func stopReasonCoze2OpenAI(reason *string) string {
if reason == nil {
return ""
}
switch *reason {
case "end_turn":
return "stop"
case "stop_sequence":
return "stop"
case "max_tokens":
return "length"
default:
return *reason
}
}
func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
cozeRequest := Request{
Stream: textRequest.Stream,
User: textRequest.User,
BotId: strings.TrimPrefix(textRequest.Model, "bot-"),
}
for i, message := range textRequest.Messages {
if i == len(textRequest.Messages)-1 {
cozeRequest.Query = message.StringContent()
continue
}
cozeMessage := Message{
Role: message.Role,
Content: message.StringContent(),
}
cozeRequest.ChatHistory = append(cozeRequest.ChatHistory, cozeMessage)
}
return &cozeRequest
}
func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
var response *Response
var stopReason string
var choice openai.ChatCompletionsStreamResponseChoice
if cozeResponse.Message != nil {
if cozeResponse.Message.Type != messagetype.Answer {
return nil, nil
}
choice.Delta.Content = cozeResponse.Message.Content
}
choice.Delta.Role = "assistant"
finishReason := stopReasonCoze2OpenAI(&stopReason)
if finishReason != "null" {
choice.FinishReason = &finishReason
}
var openaiResponse openai.ChatCompletionsStreamResponse
openaiResponse.Object = "chat.completion.chunk"
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
openaiResponse.Id = cozeResponse.ConversationId
return &openaiResponse, response
}
func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
var responseText string
for _, message := range cozeResponse.Messages {
if message.Type == messagetype.Answer {
responseText = message.Content
break
}
}
choice := openai.TextResponseChoice{
Index: 0,
Message: model.Message{
Role: "assistant",
Content: responseText,
Name: nil,
},
FinishReason: "stop",
}
fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", cozeResponse.ConversationId),
Model: "coze-bot",
Object: "chat.completion",
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
}
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
var responseText string
createdTime := helper.GetTimestamp()
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := strings.Index(string(data), "\n"); i >= 0 {
return i + 1, data[0:i], nil
}
if atEOF {
return len(data), data, nil
}
return 0, nil, nil
})
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
for scanner.Scan() {
data := scanner.Text()
if len(data) < 5 {
continue
}
if !strings.HasPrefix(data, "data:") {
continue
}
data = strings.TrimPrefix(data, "data:")
dataChan <- data
}
stopChan <- true
}()
common.SetEventStreamHeaders(c)
var modelName string
c.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
// some implementations may add \r at the end of data
data = strings.TrimSuffix(data, "\r")
var cozeResponse StreamResponse
err := json.Unmarshal([]byte(data), &cozeResponse)
if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
response, _ := StreamResponseCoze2OpenAI(&cozeResponse)
if response == nil {
return true
}
for _, choice := range response.Choices {
responseText += conv.AsString(choice.Delta.Content)
}
response.Model = modelName
response.Created = createdTime
jsonStr, err := json.Marshal(response)
if err != nil {
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
return true
case <-stopChan:
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
}
})
_ = resp.Body.Close()
return nil, &responseText
}
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
var cozeResponse Response
err = json.Unmarshal(responseBody, &cozeResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if cozeResponse.Code != 0 {
return &model.ErrorWithStatusCode{
Error: model.Error{
Message: cozeResponse.Msg,
Code: cozeResponse.Code,
},
StatusCode: resp.StatusCode,
}, nil
}
fullTextResponse := ResponseCoze2OpenAI(&cozeResponse)
fullTextResponse.Model = modelName
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
var responseText string
if len(fullTextResponse.Choices) > 0 {
responseText = fullTextResponse.Choices[0].Message.StringContent()
}
return nil, &responseText
}

View File

@@ -0,0 +1,38 @@
package coze
type Message struct {
Role string `json:"role"`
Type string `json:"type"`
Content string `json:"content"`
ContentType string `json:"content_type"`
}
type ErrorInformation struct {
Code int `json:"code"`
Msg string `json:"msg"`
}
type Request struct {
ConversationId string `json:"conversation_id,omitempty"`
BotId string `json:"bot_id"`
User string `json:"user"`
Query string `json:"query"`
ChatHistory []Message `json:"chat_history,omitempty"`
Stream bool `json:"stream"`
}
type Response struct {
ConversationId string `json:"conversation_id,omitempty"`
Messages []Message `json:"messages,omitempty"`
Code int `json:"code,omitempty"`
Msg string `json:"msg,omitempty"`
}
type StreamResponse struct {
Event string `json:"event,omitempty"`
Message *Message `json:"message,omitempty"`
IsFinish bool `json:"is_finish,omitempty"`
Index int `json:"index,omitempty"`
ConversationId string `json:"conversation_id,omitempty"`
ErrorInformation *ErrorInformation `json:"error_information,omitempty"`
}

View File

@@ -0,0 +1,73 @@
package deepl
import (
"errors"
"fmt"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)
type Adaptor struct {
meta *meta.Meta
promptText string
}
func (a *Adaptor) Init(meta *meta.Meta) {
a.meta = meta
}
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
return fmt.Sprintf("%s/v2/translate", meta.BaseURL), nil
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta)
req.Header.Set("Authorization", "DeepL-Auth-Key "+meta.APIKey)
return nil
}
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
convertedRequest, text := ConvertRequest(*request)
a.promptText = text
return convertedRequest, nil
}
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
return request, nil
}
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return adaptor.DoRequestHelper(a, c, meta, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
if meta.IsStream {
err = StreamHandler(c, resp, meta.ActualModelName)
} else {
err = Handler(c, resp, meta.ActualModelName)
}
promptTokens := len(a.promptText)
usage = &model.Usage{
PromptTokens: promptTokens,
TotalTokens: promptTokens,
}
return
}
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return "deepl"
}

View File

@@ -0,0 +1,9 @@
package deepl
// https://developers.deepl.com/docs/api-reference/glossaries
var ModelList = []string{
"deepl-zh",
"deepl-en",
"deepl-ja",
}

View File

@@ -0,0 +1,11 @@
package deepl
import "strings"
func parseLangFromModelName(modelName string) string {
parts := strings.Split(modelName, "-")
if len(parts) == 1 {
return "ZH"
}
return parts[1]
}

137
relay/adaptor/deepl/main.go Normal file
View File

@@ -0,0 +1,137 @@
package deepl
import (
"encoding/json"
"github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/constant/finishreason"
"github.com/songquanpeng/one-api/relay/constant/role"
"github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
)
// https://developers.deepl.com/docs/getting-started/your-first-api-request
func ConvertRequest(textRequest model.GeneralOpenAIRequest) (*Request, string) {
var text string
if len(textRequest.Messages) != 0 {
text = textRequest.Messages[len(textRequest.Messages)-1].StringContent()
}
deeplRequest := Request{
TargetLang: parseLangFromModelName(textRequest.Model),
Text: []string{text},
}
return &deeplRequest, text
}
func StreamResponseDeepL2OpenAI(deeplResponse *Response) *openai.ChatCompletionsStreamResponse {
var choice openai.ChatCompletionsStreamResponseChoice
if len(deeplResponse.Translations) != 0 {
choice.Delta.Content = deeplResponse.Translations[0].Text
}
choice.Delta.Role = role.Assistant
choice.FinishReason = &constant.StopFinishReason
openaiResponse := openai.ChatCompletionsStreamResponse{
Object: constant.StreamObject,
Created: helper.GetTimestamp(),
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
return &openaiResponse
}
func ResponseDeepL2OpenAI(deeplResponse *Response) *openai.TextResponse {
var responseText string
if len(deeplResponse.Translations) != 0 {
responseText = deeplResponse.Translations[0].Text
}
choice := openai.TextResponseChoice{
Index: 0,
Message: model.Message{
Role: role.Assistant,
Content: responseText,
Name: nil,
},
FinishReason: finishreason.Stop,
}
fullTextResponse := openai.TextResponse{
Object: constant.NonStreamObject,
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
}
func StreamHandler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
}
var deeplResponse Response
err = json.Unmarshal(responseBody, &deeplResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
}
fullTextResponse := StreamResponseDeepL2OpenAI(&deeplResponse)
fullTextResponse.Model = modelName
fullTextResponse.Id = helper.GetResponseID(c)
jsonData, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
}
common.SetEventStreamHeaders(c)
c.Stream(func(w io.Writer) bool {
if jsonData != nil {
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonData)})
jsonData = nil
return true
}
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
return false
})
_ = resp.Body.Close()
return nil
}
func Handler(c *gin.Context, resp *http.Response, modelName string) *model.ErrorWithStatusCode {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
}
err = resp.Body.Close()
if err != nil {
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
}
var deeplResponse Response
err = json.Unmarshal(responseBody, &deeplResponse)
if err != nil {
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
}
if deeplResponse.Message != "" {
return &model.ErrorWithStatusCode{
Error: model.Error{
Message: deeplResponse.Message,
Code: "deepl_error",
},
StatusCode: resp.StatusCode,
}
}
fullTextResponse := ResponseDeepL2OpenAI(&deeplResponse)
fullTextResponse.Model = modelName
fullTextResponse.Id = helper.GetResponseID(c)
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil
}

View File

@@ -0,0 +1,16 @@
package deepl
type Request struct {
Text []string `json:"text"`
TargetLang string `json:"target_lang"`
}
type Translation struct {
DetectedSourceLanguage string `json:"detected_source_language,omitempty"`
Text string `json:"text,omitempty"`
}
type Response struct {
Translations []Translation `json:"translations,omitempty"`
Message string `json:"message,omitempty"`
}

View File

@@ -0,0 +1,6 @@
package deepseek
var ModelList = []string{
"deepseek-chat",
"deepseek-coder",
}

View File

@@ -3,6 +3,9 @@ package gemini
import ( import (
"errors" "errors"
"fmt" "fmt"
"io"
"net/http"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
@@ -10,8 +13,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/meta" "github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
) )
type Adaptor struct { type Adaptor struct {
@@ -22,10 +23,10 @@ func (a *Adaptor) Init(meta *meta.Meta) {
} }
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
version := helper.AssignOrDefault(meta.APIVersion, config.GeminiVersion) version := helper.AssignOrDefault(meta.Config.APIVersion, config.GeminiVersion)
action := "generateContent" action := "generateContent"
if meta.IsStream { if meta.IsStream {
action = "streamGenerateContent" action = "streamGenerateContent?alt=sse"
} }
return fmt.Sprintf("%s/%s/models/%s:%s", meta.BaseURL, version, meta.ActualModelName, action), nil return fmt.Sprintf("%s/%s/models/%s:%s", meta.BaseURL, version, meta.ActualModelName, action), nil
} }

View File

@@ -4,6 +4,10 @@ import (
"bufio" "bufio"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"net/http"
"strings"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
@@ -13,9 +17,6 @@ import (
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
"strings"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
) )
@@ -54,7 +55,17 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *ChatRequest {
MaxOutputTokens: textRequest.MaxTokens, MaxOutputTokens: textRequest.MaxTokens,
}, },
} }
if textRequest.Functions != nil { if textRequest.Tools != nil {
functions := make([]model.Function, 0, len(textRequest.Tools))
for _, tool := range textRequest.Tools {
functions = append(functions, tool.Function)
}
geminiRequest.Tools = []ChatTools{
{
FunctionDeclarations: functions,
},
}
} else if textRequest.Functions != nil {
geminiRequest.Tools = []ChatTools{ geminiRequest.Tools = []ChatTools{
{ {
FunctionDeclarations: textRequest.Functions, FunctionDeclarations: textRequest.Functions,
@@ -154,6 +165,30 @@ type ChatPromptFeedback struct {
SafetyRatings []ChatSafetyRating `json:"safetyRatings"` SafetyRatings []ChatSafetyRating `json:"safetyRatings"`
} }
func getToolCalls(candidate *ChatCandidate) []model.Tool {
var toolCalls []model.Tool
item := candidate.Content.Parts[0]
if item.FunctionCall == nil {
return toolCalls
}
argsBytes, err := json.Marshal(item.FunctionCall.Arguments)
if err != nil {
logger.FatalLog("getToolCalls failed: " + err.Error())
return toolCalls
}
toolCall := model.Tool{
Id: fmt.Sprintf("call_%s", random.GetUUID()),
Type: "function",
Function: model.Function{
Arguments: string(argsBytes),
Name: item.FunctionCall.FunctionName,
},
}
toolCalls = append(toolCalls, toolCall)
return toolCalls
}
func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse { func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
fullTextResponse := openai.TextResponse{ fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()), Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
@@ -165,13 +200,19 @@ func responseGeminiChat2OpenAI(response *ChatResponse) *openai.TextResponse {
choice := openai.TextResponseChoice{ choice := openai.TextResponseChoice{
Index: i, Index: i,
Message: model.Message{ Message: model.Message{
Role: "assistant", Role: "assistant",
Content: "",
}, },
FinishReason: constant.StopFinishReason, FinishReason: constant.StopFinishReason,
} }
if len(candidate.Content.Parts) > 0 { if len(candidate.Content.Parts) > 0 {
choice.Message.Content = candidate.Content.Parts[0].Text if candidate.Content.Parts[0].FunctionCall != nil {
choice.Message.ToolCalls = getToolCalls(&candidate)
} else {
choice.Message.Content = candidate.Content.Parts[0].Text
}
} else {
choice.Message.Content = ""
choice.FinishReason = candidate.FinishReason
} }
fullTextResponse.Choices = append(fullTextResponse.Choices, choice) fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
} }
@@ -191,8 +232,6 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *ChatResponse) *openai.ChatC
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) { func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, string) {
responseText := "" responseText := ""
dataChan := make(chan string)
stopChan := make(chan bool)
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 { if atEOF && len(data) == 0 {
@@ -206,14 +245,16 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
} }
return 0, nil, nil return 0, nil, nil
}) })
dataChan := make(chan string)
stopChan := make(chan bool)
go func() { go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
data = strings.TrimSpace(data) data = strings.TrimSpace(data)
if !strings.HasPrefix(data, "\"text\": \"") { if !strings.HasPrefix(data, "data: ") {
continue continue
} }
data = strings.TrimPrefix(data, "\"text\": \"") data = strings.TrimPrefix(data, "data: ")
data = strings.TrimSuffix(data, "\"") data = strings.TrimSuffix(data, "\"")
dataChan <- data dataChan <- data
} }
@@ -223,23 +264,17 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
c.Stream(func(w io.Writer) bool { c.Stream(func(w io.Writer) bool {
select { select {
case data := <-dataChan: case data := <-dataChan:
// this is used to prevent annoying \ related format bug var geminiResponse ChatResponse
data = fmt.Sprintf("{\"content\": \"%s\"}", data) err := json.Unmarshal([]byte(data), &geminiResponse)
type dummyStruct struct { if err != nil {
Content string `json:"content"` logger.SysError("error unmarshalling stream response: " + err.Error())
return true
} }
var dummy dummyStruct response := streamResponseGeminiChat2OpenAI(&geminiResponse)
err := json.Unmarshal([]byte(data), &dummy) if response == nil {
responseText += dummy.Content return true
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = dummy.Content
response := openai.ChatCompletionsStreamResponse{
Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
Object: "chat.completion.chunk",
Created: helper.GetTimestamp(),
Model: "gemini-pro",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
} }
responseText += response.Choices[0].Delta.StringContent()
jsonResponse, err := json.Marshal(response) jsonResponse, err := json.Marshal(response)
if err != nil { if err != nil {
logger.SysError("error marshalling stream response: " + err.Error()) logger.SysError("error marshalling stream response: " + err.Error())

View File

@@ -12,9 +12,15 @@ type InlineData struct {
Data string `json:"data"` Data string `json:"data"`
} }
type FunctionCall struct {
FunctionName string `json:"name"`
Arguments any `json:"args"`
}
type Part struct { type Part struct {
Text string `json:"text,omitempty"` Text string `json:"text,omitempty"`
InlineData *InlineData `json:"inlineData,omitempty"` InlineData *InlineData `json:"inlineData,omitempty"`
FunctionCall *FunctionCall `json:"functionCall,omitempty"`
} }
type ChatContent struct { type ChatContent struct {
@@ -28,7 +34,7 @@ type ChatSafetySettings struct {
} }
type ChatTools struct { type ChatTools struct {
FunctionDeclarations any `json:"functionDeclarations,omitempty"` FunctionDeclarations any `json:"function_declarations,omitempty"`
} }
type ChatGenerationConfig struct { type ChatGenerationConfig struct {

View File

@@ -1,7 +1,11 @@
package minimax package minimax
// https://www.minimaxi.com/document/guides/chat-model/V2?id=65e0736ab2845de20908e2dd
var ModelList = []string{ var ModelList = []string{
"abab5.5s-chat", "abab6.5-chat",
"abab5.5-chat", "abab6.5s-chat",
"abab6-chat", "abab6-chat",
"abab5.5-chat",
"abab5.5s-chat",
} }

View File

@@ -1,5 +1,11 @@
package ollama package ollama
var ModelList = []string{ var ModelList = []string{
"codellama:7b-instruct",
"llama2:7b",
"llama2:latest",
"llama3:latest",
"phi3:latest",
"qwen:0.5b-chat", "qwen:0.5b-chat",
"qwen:7b",
} }

View File

@@ -13,6 +13,7 @@ import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/image"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
@@ -32,9 +33,22 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest {
Stream: request.Stream, Stream: request.Stream,
} }
for _, message := range request.Messages { for _, message := range request.Messages {
openaiContent := message.ParseContent()
var imageUrls []string
var contentText string
for _, part := range openaiContent {
switch part.Type {
case model.ContentTypeText:
contentText = part.Text
case model.ContentTypeImageURL:
_, data, _ := image.GetImageFromUrl(part.ImageURL.Url)
imageUrls = append(imageUrls, data)
}
}
ollamaRequest.Messages = append(ollamaRequest.Messages, Message{ ollamaRequest.Messages = append(ollamaRequest.Messages, Message{
Role: message.Role, Role: message.Role,
Content: message.StringContent(), Content: contentText,
Images: imageUrls,
}) })
} }
return &ollamaRequest return &ollamaRequest
@@ -53,6 +67,7 @@ func responseOllama2OpenAI(response *ChatResponse) *openai.TextResponse {
} }
fullTextResponse := openai.TextResponse{ fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()), Id: fmt.Sprintf("chatcmpl-%s", random.GetUUID()),
Model: response.Model,
Object: "chat.completion", Object: "chat.completion",
Created: helper.GetTimestamp(), Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice}, Choices: []openai.TextResponseChoice{choice},

View File

@@ -29,13 +29,13 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
if meta.Mode == relaymode.ImagesGenerations { if meta.Mode == relaymode.ImagesGenerations {
// https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api // https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api
// https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview // https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview
fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.APIVersion) fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.Config.APIVersion)
return fullRequestURL, nil return fullRequestURL, nil
} }
// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
requestURL := strings.Split(meta.RequestURLPath, "?")[0] requestURL := strings.Split(meta.RequestURLPath, "?")[0]
requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.APIVersion) requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.Config.APIVersion)
task := strings.TrimPrefix(requestURL, "/v1/") task := strings.TrimPrefix(requestURL, "/v1/")
model_ := meta.ActualModelName model_ := meta.ActualModelName
model_ = strings.Replace(model_, ".", "", -1) model_ = strings.Replace(model_, ".", "", -1)
@@ -86,9 +86,13 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
if meta.IsStream { if meta.IsStream {
var responseText string var responseText string
err, responseText, usage = StreamHandler(c, resp, meta.Mode) err, responseText, usage = StreamHandler(c, resp, meta.Mode)
if usage == nil { if usage == nil || usage.TotalTokens == 0 {
usage = ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens) usage = ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
} }
if usage.TotalTokens != 0 && usage.PromptTokens == 0 { // some channels don't return prompt tokens & completion tokens
usage.PromptTokens = meta.PromptTokens
usage.CompletionTokens = usage.TotalTokens - meta.PromptTokens
}
} else { } else {
switch meta.Mode { switch meta.Mode {
case relaymode.ImagesGenerations: case relaymode.ImagesGenerations:

View File

@@ -3,12 +3,14 @@ package openai
import ( import (
"github.com/songquanpeng/one-api/relay/adaptor/ai360" "github.com/songquanpeng/one-api/relay/adaptor/ai360"
"github.com/songquanpeng/one-api/relay/adaptor/baichuan" "github.com/songquanpeng/one-api/relay/adaptor/baichuan"
"github.com/songquanpeng/one-api/relay/adaptor/deepseek"
"github.com/songquanpeng/one-api/relay/adaptor/groq" "github.com/songquanpeng/one-api/relay/adaptor/groq"
"github.com/songquanpeng/one-api/relay/adaptor/lingyiwanwu" "github.com/songquanpeng/one-api/relay/adaptor/lingyiwanwu"
"github.com/songquanpeng/one-api/relay/adaptor/minimax" "github.com/songquanpeng/one-api/relay/adaptor/minimax"
"github.com/songquanpeng/one-api/relay/adaptor/mistral" "github.com/songquanpeng/one-api/relay/adaptor/mistral"
"github.com/songquanpeng/one-api/relay/adaptor/moonshot" "github.com/songquanpeng/one-api/relay/adaptor/moonshot"
"github.com/songquanpeng/one-api/relay/adaptor/stepfun" "github.com/songquanpeng/one-api/relay/adaptor/stepfun"
"github.com/songquanpeng/one-api/relay/adaptor/togetherai"
"github.com/songquanpeng/one-api/relay/channeltype" "github.com/songquanpeng/one-api/relay/channeltype"
) )
@@ -22,6 +24,8 @@ var CompatibleChannels = []int{
channeltype.Groq, channeltype.Groq,
channeltype.LingYiWanWu, channeltype.LingYiWanWu,
channeltype.StepFun, channeltype.StepFun,
channeltype.DeepSeek,
channeltype.TogetherAI,
} }
func GetCompatibleChannelMeta(channelType int) (string, []string) { func GetCompatibleChannelMeta(channelType int) (string, []string) {
@@ -44,6 +48,10 @@ func GetCompatibleChannelMeta(channelType int) (string, []string) {
return "lingyiwanwu", lingyiwanwu.ModelList return "lingyiwanwu", lingyiwanwu.ModelList
case channeltype.StepFun: case channeltype.StepFun:
return "stepfun", stepfun.ModelList return "stepfun", stepfun.ModelList
case channeltype.DeepSeek:
return "deepseek", deepseek.ModelList
case channeltype.TogetherAI:
return "together.ai", togetherai.ModelList
default: default:
return "openai", ModelList return "openai", ModelList
} }

View File

@@ -7,6 +7,7 @@ var ModelList = []string{
"gpt-4", "gpt-4-0314", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-0125-preview", "gpt-4", "gpt-4-0314", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-0125-preview",
"gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-0613", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-0613",
"gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09", "gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09",
"gpt-4o", "gpt-4o-2024-05-13",
"gpt-4-vision-preview", "gpt-4-vision-preview",
"text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large",
"text-curie-001", "text-babbage-001", "text-ada-001", "text-davinci-002", "text-davinci-003", "text-curie-001", "text-babbage-001", "text-ada-001", "text-davinci-002", "text-davinci-003",

View File

@@ -15,6 +15,12 @@ import (
"strings" "strings"
) )
const (
dataPrefix = "data: "
done = "[DONE]"
dataPrefixLength = len(dataPrefix)
)
func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) { func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.ErrorWithStatusCode, string, *model.Usage) {
responseText := "" responseText := ""
scanner := bufio.NewScanner(resp.Body) scanner := bufio.NewScanner(resp.Body)
@@ -36,39 +42,46 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E
go func() { go func() {
for scanner.Scan() { for scanner.Scan() {
data := scanner.Text() data := scanner.Text()
if len(data) < 6 { // ignore blank line or wrong format if len(data) < dataPrefixLength { // ignore blank line or wrong format
continue continue
} }
if data[:6] != "data: " && data[:6] != "[DONE]" { if data[:dataPrefixLength] != dataPrefix && data[:dataPrefixLength] != done {
continue continue
} }
dataChan <- data if strings.HasPrefix(data[dataPrefixLength:], done) {
data = data[6:] dataChan <- data
if !strings.HasPrefix(data, "[DONE]") { continue
switch relayMode { }
case relaymode.ChatCompletions: switch relayMode {
var streamResponse ChatCompletionsStreamResponse case relaymode.ChatCompletions:
err := json.Unmarshal([]byte(data), &streamResponse) var streamResponse ChatCompletionsStreamResponse
if err != nil { err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
logger.SysError("error unmarshalling stream response: " + err.Error()) if err != nil {
continue // just ignore the error logger.SysError("error unmarshalling stream response: " + err.Error())
} dataChan <- data // if error happened, pass the data to client
for _, choice := range streamResponse.Choices { continue // just ignore the error
responseText += conv.AsString(choice.Delta.Content) }
} if len(streamResponse.Choices) == 0 {
if streamResponse.Usage != nil { // but for empty choice, we should not pass it to client, this is for azure
usage = streamResponse.Usage continue // just ignore empty choice
} }
case relaymode.Completions: dataChan <- data
var streamResponse CompletionsStreamResponse for _, choice := range streamResponse.Choices {
err := json.Unmarshal([]byte(data), &streamResponse) responseText += conv.AsString(choice.Delta.Content)
if err != nil { }
logger.SysError("error unmarshalling stream response: " + err.Error()) if streamResponse.Usage != nil {
continue usage = streamResponse.Usage
} }
for _, choice := range streamResponse.Choices { case relaymode.Completions:
responseText += choice.Text dataChan <- data
} var streamResponse CompletionsStreamResponse
err := json.Unmarshal([]byte(data[dataPrefixLength:]), &streamResponse)
if err != nil {
logger.SysError("error unmarshalling stream response: " + err.Error())
continue
}
for _, choice := range streamResponse.Choices {
responseText += choice.Text
} }
} }
} }

View File

@@ -134,7 +134,7 @@ type ChatCompletionsStreamResponse struct {
Created int64 `json:"created"` Created int64 `json:"created"`
Model string `json:"model"` Model string `json:"model"`
Choices []ChatCompletionsStreamResponseChoice `json:"choices"` Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
Usage *model.Usage `json:"usage"` Usage *model.Usage `json:"usage,omitempty"`
} }
type CompletionsStreamResponse struct { type CompletionsStreamResponse struct {

View File

@@ -206,3 +206,7 @@ func CountTokenText(text string, model string) int {
tokenEncoder := getTokenEncoder(model) tokenEncoder := getTokenEncoder(model)
return getTokenNum(tokenEncoder, text) return getTokenNum(tokenEncoder, text)
} }
func CountToken(text string) int {
return CountTokenInput(text, "gpt-3.5-turbo")
}

View File

@@ -0,0 +1,10 @@
package togetherai
// https://docs.together.ai/docs/inference-models
var ModelList = []string{
"meta-llama/Llama-3-70b-chat-hf",
"deepseek-ai/deepseek-coder-33b-instruct",
"mistralai/Mixtral-8x22B-Instruct-v0.1",
"Qwen/Qwen1.5-72B-Chat",
}

View File

@@ -14,10 +14,11 @@ import (
type Adaptor struct { type Adaptor struct {
request *model.GeneralOpenAIRequest request *model.GeneralOpenAIRequest
meta *meta.Meta
} }
func (a *Adaptor) Init(meta *meta.Meta) { func (a *Adaptor) Init(meta *meta.Meta) {
a.meta = meta
} }
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) { func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
@@ -26,6 +27,14 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error { func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta) adaptor.SetupCommonRequestHeader(c, req, meta)
version := parseAPIVersionByModelName(meta.ActualModelName)
if version == "" {
version = a.meta.Config.APIVersion
}
if version == "" {
version = "v1.1"
}
a.meta.Config.APIVersion = version
// check DoResponse for auth part // check DoResponse for auth part
return nil return nil
} }
@@ -61,9 +70,9 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
return nil, openai.ErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest) return nil, openai.ErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest)
} }
if meta.IsStream { if meta.IsStream {
err, usage = StreamHandler(c, *a.request, splits[0], splits[1], splits[2]) err, usage = StreamHandler(c, meta, *a.request, splits[0], splits[1], splits[2])
} else { } else {
err, usage = Handler(c, *a.request, splits[0], splits[1], splits[2]) err, usage = Handler(c, meta, *a.request, splits[0], splits[1], splits[2])
} }
return return
} }

View File

@@ -9,12 +9,12 @@ import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/gorilla/websocket" "github.com/gorilla/websocket"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/helper" "github.com/songquanpeng/one-api/common/helper"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/common/random" "github.com/songquanpeng/one-api/common/random"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/constant" "github.com/songquanpeng/one-api/relay/constant"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io" "io"
"net/http" "net/http"
@@ -149,8 +149,8 @@ func buildXunfeiAuthUrl(hostUrl string, apiKey, apiSecret string) string {
return callUrl return callUrl
} }
func StreamHandler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) { func StreamHandler(c *gin.Context, meta *meta.Meta, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret, textRequest.Model) domain, authUrl := getXunfeiAuthUrl(meta.Config.APIVersion, apiKey, apiSecret)
dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId) dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
@@ -179,8 +179,8 @@ func StreamHandler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId
return nil, &usage return nil, &usage
} }
func Handler(c *gin.Context, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) { func Handler(c *gin.Context, meta *meta.Meta, textRequest model.GeneralOpenAIRequest, appId string, apiSecret string, apiKey string) (*model.ErrorWithStatusCode, *model.Usage) {
domain, authUrl := getXunfeiAuthUrl(c, apiKey, apiSecret, textRequest.Model) domain, authUrl := getXunfeiAuthUrl(meta.Config.APIVersion, apiKey, apiSecret)
dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId) dataChan, stopChan, err := xunfeiMakeRequest(textRequest, domain, authUrl, appId)
if err != nil { if err != nil {
return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil return openai.ErrorWrapper(err, "xunfei_request_failed", http.StatusInternalServerError), nil
@@ -268,25 +268,12 @@ func xunfeiMakeRequest(textRequest model.GeneralOpenAIRequest, domain, authUrl,
return dataChan, stopChan, nil return dataChan, stopChan, nil
} }
func getAPIVersion(c *gin.Context, modelName string) string { func parseAPIVersionByModelName(modelName string) string {
query := c.Request.URL.Query()
apiVersion := query.Get("api-version")
if apiVersion != "" {
return apiVersion
}
parts := strings.Split(modelName, "-") parts := strings.Split(modelName, "-")
if len(parts) == 2 { if len(parts) == 2 {
apiVersion = parts[1] return parts[1]
return apiVersion
} }
apiVersion = c.GetString(config.KeyAPIVersion) return ""
if apiVersion != "" {
return apiVersion
}
apiVersion = "v1.1"
logger.SysLog("api_version not found, using default: " + apiVersion)
return apiVersion
} }
// https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E // https://www.xfyun.cn/doc/spark/Web.html#_1-%E6%8E%A5%E5%8F%A3%E8%AF%B4%E6%98%8E
@@ -304,8 +291,7 @@ func apiVersion2domain(apiVersion string) string {
return "general" + apiVersion return "general" + apiVersion
} }
func getXunfeiAuthUrl(c *gin.Context, apiKey string, apiSecret string, modelName string) (string, string) { func getXunfeiAuthUrl(apiVersion string, apiKey string, apiSecret string) (string, string) {
apiVersion := getAPIVersion(c, modelName)
domain := apiVersion2domain(apiVersion) domain := apiVersion2domain(apiVersion)
authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret) authUrl := buildXunfeiAuthUrl(fmt.Sprintf("wss://spark-api.xf-yun.com/%s/chat", apiVersion), apiKey, apiSecret)
return domain, authUrl return domain, authUrl

View File

@@ -62,8 +62,8 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
} }
switch relayMode { switch relayMode {
case relaymode.Embeddings: case relaymode.Embeddings:
baiduEmbeddingRequest := ConvertEmbeddingRequest(*request) baiduEmbeddingRequest, err := ConvertEmbeddingRequest(*request)
return baiduEmbeddingRequest, nil return baiduEmbeddingRequest, err
default: default:
// TopP (0.0, 1.0) // TopP (0.0, 1.0)
request.TopP = math.Min(0.99, request.TopP) request.TopP = math.Min(0.99, request.TopP)
@@ -129,11 +129,15 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
return return
} }
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest { func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) (*EmbeddingRequest, error) {
return &EmbeddingRequest{ inputs := request.ParseInput()
Model: "embedding-2", if len(inputs) != 1 {
Input: request.Input.(string), return nil, errors.New("invalid input length, zhipu only support one input")
} }
return &EmbeddingRequest{
Model: request.Model,
Input: inputs[0],
}, nil
} }
func (a *Adaptor) GetModelList() []string { func (a *Adaptor) GetModelList() []string {

View File

@@ -13,6 +13,10 @@ const (
Gemini Gemini
Ollama Ollama
AwsClaude AwsClaude
Coze
Cohere
Cloudflare
DeepL
Dummy // this one is only for count, do not add any channel after this Dummy // this one is only for count, do not add any channel after this
) )

View File

@@ -2,8 +2,9 @@ package ratio
import ( import (
"encoding/json" "encoding/json"
"github.com/songquanpeng/one-api/common/logger"
"strings" "strings"
"github.com/songquanpeng/one-api/common/logger"
) )
const ( const (
@@ -31,6 +32,8 @@ var ModelRatio = map[string]float64{
"gpt-4-turbo-preview": 5, // $0.01 / 1K tokens "gpt-4-turbo-preview": 5, // $0.01 / 1K tokens
"gpt-4-turbo": 5, // $0.01 / 1K tokens "gpt-4-turbo": 5, // $0.01 / 1K tokens
"gpt-4-turbo-2024-04-09": 5, // $0.01 / 1K tokens "gpt-4-turbo-2024-04-09": 5, // $0.01 / 1K tokens
"gpt-4o": 2.5, // $0.005 / 1K tokens
"gpt-4o-2024-05-13": 2.5, // $0.005 / 1K tokens
"gpt-4-vision-preview": 5, // $0.01 / 1K tokens "gpt-4-vision-preview": 5, // $0.01 / 1K tokens
"gpt-3.5-turbo": 0.25, // $0.0005 / 1K tokens "gpt-3.5-turbo": 0.25, // $0.0005 / 1K tokens
"gpt-3.5-turbo-0301": 0.75, "gpt-3.5-turbo-0301": 0.75,
@@ -137,6 +140,8 @@ var ModelRatio = map[string]float64{
"Baichuan2-Turbo-192k": 0.016 * RMB, "Baichuan2-Turbo-192k": 0.016 * RMB,
"Baichuan2-53B": 0.02 * RMB, "Baichuan2-53B": 0.02 * RMB,
// https://api.minimax.chat/document/price // https://api.minimax.chat/document/price
"abab6.5-chat": 0.03 * RMB,
"abab6.5s-chat": 0.01 * RMB,
"abab6-chat": 0.1 * RMB, "abab6-chat": 0.1 * RMB,
"abab5.5-chat": 0.015 * RMB, "abab5.5-chat": 0.015 * RMB,
"abab5.5s-chat": 0.005 * RMB, "abab5.5s-chat": 0.005 * RMB,
@@ -162,6 +167,20 @@ var ModelRatio = map[string]float64{
"step-1v-32k": 0.024 * RMB, "step-1v-32k": 0.024 * RMB,
"step-1-32k": 0.024 * RMB, "step-1-32k": 0.024 * RMB,
"step-1-200k": 0.15 * RMB, "step-1-200k": 0.15 * RMB,
// https://cohere.com/pricing
"command": 0.5,
"command-nightly": 0.5,
"command-light": 0.5,
"command-light-nightly": 0.5,
"command-r": 0.5 / 1000 * USD,
"command-r-plus": 3.0 / 1000 * USD,
// https://platform.deepseek.com/api-docs/pricing/
"deepseek-chat": 1.0 / 1000 * RMB,
"deepseek-coder": 1.0 / 1000 * RMB,
// https://www.deepl.com/pro?cta=header-prices
"deepl-zh": 25.0 / 1000 * USD,
"deepl-en": 25.0 / 1000 * USD,
"deepl-ja": 25.0 / 1000 * USD,
} }
var CompletionRatio = map[string]float64{} var CompletionRatio = map[string]float64{}
@@ -217,6 +236,9 @@ func GetModelRatio(name string) float64 {
if strings.HasPrefix(name, "qwen-") && strings.HasSuffix(name, "-internet") { if strings.HasPrefix(name, "qwen-") && strings.HasSuffix(name, "-internet") {
name = strings.TrimSuffix(name, "-internet") name = strings.TrimSuffix(name, "-internet")
} }
if strings.HasPrefix(name, "command-") && strings.HasSuffix(name, "-internet") {
name = strings.TrimSuffix(name, "-internet")
}
ratio, ok := ModelRatio[name] ratio, ok := ModelRatio[name]
if !ok { if !ok {
ratio, ok = DefaultModelRatio[name] ratio, ok = DefaultModelRatio[name]
@@ -260,7 +282,9 @@ func GetCompletionRatio(name string) float64 {
return 4.0 / 3.0 return 4.0 / 3.0
} }
if strings.HasPrefix(name, "gpt-4") { if strings.HasPrefix(name, "gpt-4") {
if strings.HasPrefix(name, "gpt-4-turbo") { if strings.HasPrefix(name, "gpt-4-turbo") ||
strings.HasPrefix(name, "gpt-4o") ||
strings.HasSuffix(name, "preview") {
return 3 return 3
} }
return 2 return 2
@@ -277,6 +301,9 @@ func GetCompletionRatio(name string) float64 {
if strings.HasPrefix(name, "gemini-") { if strings.HasPrefix(name, "gemini-") {
return 3 return 3
} }
if strings.HasPrefix(name, "deepseek-") {
return 2
}
switch name { switch name {
case "llama2-70b-4096": case "llama2-70b-4096":
return 0.8 / 0.64 return 0.8 / 0.64
@@ -284,6 +311,12 @@ func GetCompletionRatio(name string) float64 {
return 2 return 2
case "llama3-70b-8192": case "llama3-70b-8192":
return 0.79 / 0.59 return 0.79 / 0.59
case "command", "command-light", "command-nightly", "command-light-nightly":
return 2
case "command-r":
return 3
case "command-r-plus":
return 5
} }
return 1 return 1
} }

View File

@@ -35,6 +35,12 @@ const (
LingYiWanWu LingYiWanWu
StepFun StepFun
AwsClaude AwsClaude
Coze
Cohere
DeepSeek
Cloudflare
DeepL
TogetherAI
Dummy Dummy
) )

View File

@@ -27,6 +27,14 @@ func ToAPIType(channelType int) int {
apiType = apitype.Ollama apiType = apitype.Ollama
case AwsClaude: case AwsClaude:
apiType = apitype.AwsClaude apiType = apitype.AwsClaude
case Coze:
apiType = apitype.Coze
case Cohere:
apiType = apitype.Cohere
case Cloudflare:
apiType = apitype.Cloudflare
case DeepL:
apiType = apitype.DeepL
} }
return apiType return apiType

View File

@@ -35,6 +35,12 @@ var ChannelBaseURLs = []string{
"https://api.lingyiwanwu.com", // 31 "https://api.lingyiwanwu.com", // 31
"https://api.stepfun.com", // 32 "https://api.stepfun.com", // 32
"", // 33 "", // 33
"https://api.coze.com", // 34
"https://api.cohere.ai", // 35
"https://api.deepseek.com", // 36
"https://api.cloudflare.com", // 37
"https://api-free.deepl.com", // 38
"https://api.together.xyz", // 39
} }
func init() { func init() {

View File

@@ -1,3 +1,5 @@
package constant package constant
var StopFinishReason = "stop" var StopFinishReason = "stop"
var StreamObject = "chat.completion.chunk"
var NonStreamObject = "chat.completion"

View File

@@ -0,0 +1,5 @@
package finishreason
const (
Stop = "stop"
)

View File

@@ -0,0 +1,5 @@
package role
const (
Assistant = "assistant"
)

View File

@@ -10,14 +10,15 @@ import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common" "github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
"github.com/songquanpeng/one-api/relay/adaptor/azure"
"github.com/songquanpeng/one-api/relay/adaptor/openai" "github.com/songquanpeng/one-api/relay/adaptor/openai"
"github.com/songquanpeng/one-api/relay/billing" "github.com/songquanpeng/one-api/relay/billing"
billingratio "github.com/songquanpeng/one-api/relay/billing/ratio" billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
"github.com/songquanpeng/one-api/relay/channeltype" "github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/client" "github.com/songquanpeng/one-api/relay/client"
"github.com/songquanpeng/one-api/relay/meta"
relaymodel "github.com/songquanpeng/one-api/relay/model" relaymodel "github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode" "github.com/songquanpeng/one-api/relay/relaymode"
"io" "io"
@@ -27,14 +28,15 @@ import (
func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode { func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatusCode {
ctx := c.Request.Context() ctx := c.Request.Context()
meta := meta.GetByContext(c)
audioModel := "whisper-1" audioModel := "whisper-1"
tokenId := c.GetInt("token_id") tokenId := c.GetInt(ctxkey.TokenId)
channelType := c.GetInt("channel") channelType := c.GetInt(ctxkey.Channel)
channelId := c.GetInt("channel_id") channelId := c.GetInt(ctxkey.ChannelId)
userId := c.GetInt("id") userId := c.GetInt(ctxkey.Id)
group := c.GetString("group") group := c.GetString(ctxkey.Group)
tokenName := c.GetString("token_name") tokenName := c.GetString(ctxkey.TokenName)
var ttsRequest openai.TextToSpeechRequest var ttsRequest openai.TextToSpeechRequest
if relayMode == relaymode.AudioSpeech { if relayMode == relaymode.AudioSpeech {
@@ -107,7 +109,7 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
}() }()
// map model name // map model name
modelMapping := c.GetString("model_mapping") modelMapping := c.GetString(ctxkey.ModelMapping)
if modelMapping != "" { if modelMapping != "" {
modelMap := make(map[string]string) modelMap := make(map[string]string)
err := json.Unmarshal([]byte(modelMapping), &modelMap) err := json.Unmarshal([]byte(modelMapping), &modelMap)
@@ -121,13 +123,13 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
baseURL := channeltype.ChannelBaseURLs[channelType] baseURL := channeltype.ChannelBaseURLs[channelType]
requestURL := c.Request.URL.String() requestURL := c.Request.URL.String()
if c.GetString("base_url") != "" { if c.GetString(ctxkey.BaseURL) != "" {
baseURL = c.GetString("base_url") baseURL = c.GetString(ctxkey.BaseURL)
} }
fullRequestURL := openai.GetFullRequestURL(baseURL, requestURL, channelType) fullRequestURL := openai.GetFullRequestURL(baseURL, requestURL, channelType)
if channelType == channeltype.Azure { if channelType == channeltype.Azure {
apiVersion := azure.GetAPIVersion(c) apiVersion := meta.Config.APIVersion
if relayMode == relaymode.AudioTranscription { if relayMode == relaymode.AudioTranscription {
// https://learn.microsoft.com/en-us/azure/ai-services/openai/whisper-quickstart?tabs=command-line#rest-api // https://learn.microsoft.com/en-us/azure/ai-services/openai/whisper-quickstart?tabs=command-line#rest-api
fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/transcriptions?api-version=%s", baseURL, audioModel, apiVersion) fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/audio/transcriptions?api-version=%s", baseURL, audioModel, apiVersion)

View File

@@ -53,6 +53,16 @@ func (e GeneralErrorResponse) ToMessage() string {
} }
func RelayErrorHandler(resp *http.Response) (ErrorWithStatusCode *model.ErrorWithStatusCode) { func RelayErrorHandler(resp *http.Response) (ErrorWithStatusCode *model.ErrorWithStatusCode) {
if resp == nil {
return &model.ErrorWithStatusCode{
StatusCode: 500,
Error: model.Error{
Message: "resp is nil",
Type: "upstream_error",
Code: "bad_response",
},
}
}
ErrorWithStatusCode = &model.ErrorWithStatusCode{ ErrorWithStatusCode = &model.ErrorWithStatusCode{
StatusCode: resp.StatusCode, StatusCode: resp.StatusCode,
Error: model.Error{ Error: model.Error{

View File

@@ -18,6 +18,7 @@ import (
"github.com/songquanpeng/one-api/relay/relaymode" "github.com/songquanpeng/one-api/relay/relaymode"
"math" "math"
"net/http" "net/http"
"strings"
) )
func getAndValidateTextRequest(c *gin.Context, relayMode int) (*relaymodel.GeneralOpenAIRequest, error) { func getAndValidateTextRequest(c *gin.Context, relayMode int) (*relaymodel.GeneralOpenAIRequest, error) {
@@ -124,9 +125,9 @@ func getPromptTokens(textRequest *relaymodel.GeneralOpenAIRequest, relayMode int
} }
func getPreConsumedQuota(textRequest *relaymodel.GeneralOpenAIRequest, promptTokens int, ratio float64) int64 { func getPreConsumedQuota(textRequest *relaymodel.GeneralOpenAIRequest, promptTokens int, ratio float64) int64 {
preConsumedTokens := config.PreConsumedQuota preConsumedTokens := config.PreConsumedQuota + int64(promptTokens)
if textRequest.MaxTokens != 0 { if textRequest.MaxTokens != 0 {
preConsumedTokens = int64(promptTokens) + int64(textRequest.MaxTokens) preConsumedTokens += int64(textRequest.MaxTokens)
} }
return int64(float64(preConsumedTokens) * ratio) return int64(float64(preConsumedTokens) * ratio)
} }
@@ -204,3 +205,23 @@ func getMappedModelName(modelName string, mapping map[string]string) (string, bo
} }
return modelName, false return modelName, false
} }
func isErrorHappened(meta *meta.Meta, resp *http.Response) bool {
if resp == nil {
if meta.ChannelType == channeltype.AwsClaude {
return false
}
return true
}
if resp.StatusCode != http.StatusOK {
return true
}
if meta.ChannelType == channeltype.DeepL {
// skip stream check for deepl
return false
}
if meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json") {
return true
}
return false
}

View File

@@ -7,6 +7,7 @@ import (
"errors" "errors"
"fmt" "fmt"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/model" "github.com/songquanpeng/one-api/model"
"github.com/songquanpeng/one-api/relay" "github.com/songquanpeng/one-api/relay"
@@ -69,6 +70,7 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
if adaptor == nil { if adaptor == nil {
return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest) return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
} }
adaptor.Init(meta)
switch meta.ChannelType { switch meta.ChannelType {
case channeltype.Ali: case channeltype.Ali:
@@ -119,11 +121,11 @@ func RelayImageHelper(c *gin.Context, relayMode int) *relaymodel.ErrorWithStatus
logger.SysError("error update user quota cache: " + err.Error()) logger.SysError("error update user quota cache: " + err.Error())
} }
if quota != 0 { if quota != 0 {
tokenName := c.GetString("token_name") tokenName := c.GetString(ctxkey.TokenName)
logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio) logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio)
model.RecordConsumeLog(ctx, meta.UserId, meta.ChannelId, 0, 0, imageRequest.Model, tokenName, quota, logContent) model.RecordConsumeLog(ctx, meta.UserId, meta.ChannelId, 0, 0, imageRequest.Model, tokenName, quota, logContent)
model.UpdateUserUsedQuotaAndRequestCount(meta.UserId, quota) model.UpdateUserUsedQuotaAndRequestCount(meta.UserId, quota)
channelId := c.GetInt("channel_id") channelId := c.GetInt(ctxkey.ChannelId)
model.UpdateChannelUsedQuota(channelId, quota) model.UpdateChannelUsedQuota(channelId, quota)
} }
}(c.Request.Context()) }(c.Request.Context())

View File

@@ -4,10 +4,6 @@ import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"net/http"
"strings"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/logger" "github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay" "github.com/songquanpeng/one-api/relay"
@@ -18,6 +14,8 @@ import (
"github.com/songquanpeng/one-api/relay/channeltype" "github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/meta" "github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model" "github.com/songquanpeng/one-api/relay/model"
"io"
"net/http"
) )
func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode { func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
@@ -53,6 +51,7 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
if adaptor == nil { if adaptor == nil {
return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest) return openai.ErrorWrapper(fmt.Errorf("invalid api type: %d", meta.APIType), "invalid_api_type", http.StatusBadRequest)
} }
adaptor.Init(meta)
// get request body // get request body
var requestBody io.Reader var requestBody io.Reader
@@ -87,12 +86,9 @@ func RelayTextHelper(c *gin.Context) *model.ErrorWithStatusCode {
logger.Errorf(ctx, "DoRequest failed: %s", err.Error()) logger.Errorf(ctx, "DoRequest failed: %s", err.Error())
return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError) return openai.ErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
} }
if resp != nil { if isErrorHappened(meta, resp) {
errorHappened := (resp.StatusCode != http.StatusOK) || (meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json")) billing.ReturnPreConsumedQuota(ctx, preConsumedQuota, meta.TokenId)
if errorHappened { return RelayErrorHandler(resp)
billing.ReturnPreConsumedQuota(ctx, preConsumedQuota, meta.TokenId)
return RelayErrorHandler(resp)
}
} }
// do response // do response

View File

@@ -2,8 +2,8 @@ package meta
import ( import (
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
"github.com/songquanpeng/one-api/common/config" "github.com/songquanpeng/one-api/common/ctxkey"
"github.com/songquanpeng/one-api/relay/adaptor/azure" "github.com/songquanpeng/one-api/model"
"github.com/songquanpeng/one-api/relay/channeltype" "github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/relaymode" "github.com/songquanpeng/one-api/relay/relaymode"
"strings" "strings"
@@ -19,10 +19,9 @@ type Meta struct {
Group string Group string
ModelMapping map[string]string ModelMapping map[string]string
BaseURL string BaseURL string
APIVersion string
APIKey string APIKey string
APIType int APIType int
Config map[string]string Config model.ChannelConfig
IsStream bool IsStream bool
OriginModelName string OriginModelName string
ActualModelName string ActualModelName string
@@ -32,22 +31,22 @@ type Meta struct {
func GetByContext(c *gin.Context) *Meta { func GetByContext(c *gin.Context) *Meta {
meta := Meta{ meta := Meta{
Mode: relaymode.GetByPath(c.Request.URL.Path), Mode: relaymode.GetByPath(c.Request.URL.Path),
ChannelType: c.GetInt("channel"), ChannelType: c.GetInt(ctxkey.Channel),
ChannelId: c.GetInt("channel_id"), ChannelId: c.GetInt(ctxkey.ChannelId),
TokenId: c.GetInt("token_id"), TokenId: c.GetInt(ctxkey.TokenId),
TokenName: c.GetString("token_name"), TokenName: c.GetString(ctxkey.TokenName),
UserId: c.GetInt("id"), UserId: c.GetInt(ctxkey.Id),
Group: c.GetString("group"), Group: c.GetString(ctxkey.Group),
ModelMapping: c.GetStringMapString("model_mapping"), ModelMapping: c.GetStringMapString(ctxkey.ModelMapping),
BaseURL: c.GetString("base_url"), OriginModelName: c.GetString(ctxkey.RequestModel),
APIVersion: c.GetString(config.KeyAPIVersion), BaseURL: c.GetString(ctxkey.BaseURL),
APIKey: strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "), APIKey: strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "),
Config: nil, RequestURLPath: c.Request.URL.String(),
RequestURLPath: c.Request.URL.String(),
} }
if meta.ChannelType == channeltype.Azure { cfg, ok := c.Get(ctxkey.Config)
meta.APIVersion = azure.GetAPIVersion(c) if ok {
meta.Config = cfg.(model.ChannelConfig)
} }
if meta.BaseURL == "" { if meta.BaseURL == "" {
meta.BaseURL = channeltype.ChannelBaseURLs[meta.ChannelType] meta.BaseURL = channeltype.ChannelBaseURLs[meta.ChannelType]

View File

@@ -1,5 +1,5 @@
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en"> <html lang="zh-CN">
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<link rel="icon" href="logo.png" /> <link rel="icon" href="logo.png" />

View File

@@ -1,5 +1,5 @@
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en"> <html lang="zh-CN">
<head> <head>
<title>One API</title> <title>One API</title>
<link rel="icon" href="/favicon.ico" /> <link rel="icon" href="/favicon.ico" />

View File

@@ -11,6 +11,12 @@ export const CHANNEL_OPTIONS = {
value: 14, value: 14,
color: 'primary' color: 'primary'
}, },
// 33: {
// key: 33,
// text: 'AWS Claude',
// value: 33,
// color: 'primary'
// },
3: { 3: {
key: 3, key: 3,
text: 'Azure OpenAI', text: 'Azure OpenAI',
@@ -113,6 +119,36 @@ export const CHANNEL_OPTIONS = {
value: 32, value: 32,
color: 'primary' color: 'primary'
}, },
// 34: {
// key: 34,
// text: 'Coze',
// value: 34,
// color: 'primary'
// },
35: {
key: 35,
text: 'Cohere',
value: 35,
color: 'primary'
},
36: {
key: 36,
text: 'DeepSeek',
value: 36,
color: 'primary'
},
38: {
key: 38,
text: 'DeepL',
value: 38,
color: 'primary'
},
39: {
key: 39,
text: 'together.ai',
value: 39,
color: 'primary'
},
8: { 8: {
key: 8, key: 8,
text: '自定义渠道', text: '自定义渠道',

View File

@@ -1,5 +1,5 @@
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en"> <html lang="zh-CN">
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<link rel="icon" href="logo.png" /> <link rel="icon" href="logo.png" />

View File

@@ -33,7 +33,7 @@ function renderType(type) {
} }
type2label[0] = { value: 0, text: '未知类型', color: 'grey' }; type2label[0] = { value: 0, text: '未知类型', color: 'grey' };
} }
return <Label basic color={type2label[type]?.color}>{type2label[type]?.text}</Label>; return <Label basic color={type2label[type]?.color}>{type2label[type] ? type2label[type].text : type}</Label>;
} }
function renderBalance(type, balance) { function renderBalance(type, balance) {

View File

@@ -1,35 +1,41 @@
export const CHANNEL_OPTIONS = [ export const CHANNEL_OPTIONS = [
{ key: 1, text: 'OpenAI', value: 1, color: 'green' }, {key: 1, text: 'OpenAI', value: 1, color: 'green'},
{ key: 14, text: 'Anthropic Claude', value: 14, color: 'black' }, {key: 14, text: 'Anthropic Claude', value: 14, color: 'black'},
{ key: 33, text: 'AWS Claude', value: 33, color: 'black' }, {key: 33, text: 'AWS Claude', value: 33, color: 'black'},
{ key: 3, text: 'Azure OpenAI', value: 3, color: 'olive' }, {key: 3, text: 'Azure OpenAI', value: 3, color: 'olive'},
{ key: 11, text: 'Google PaLM2', value: 11, color: 'orange' }, {key: 11, text: 'Google PaLM2', value: 11, color: 'orange'},
{ key: 24, text: 'Google Gemini', value: 24, color: 'orange' }, {key: 24, text: 'Google Gemini', value: 24, color: 'orange'},
{ key: 28, text: 'Mistral AI', value: 28, color: 'orange' }, {key: 28, text: 'Mistral AI', value: 28, color: 'orange'},
{ key: 15, text: '百度文心千帆', value: 15, color: 'blue' }, {key: 15, text: '百度文心千帆', value: 15, color: 'blue'},
{ key: 17, text: '阿里通义千问', value: 17, color: 'orange' }, {key: 17, text: '阿里通义千问', value: 17, color: 'orange'},
{ key: 18, text: '讯飞星火认知', value: 18, color: 'blue' }, {key: 18, text: '讯飞星火认知', value: 18, color: 'blue'},
{ key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet' }, {key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet'},
{ key: 19, text: '360 智脑', value: 19, color: 'blue' }, {key: 19, text: '360 智脑', value: 19, color: 'blue'},
{ key: 25, text: 'Moonshot AI', value: 25, color: 'black' }, {key: 25, text: 'Moonshot AI', value: 25, color: 'black'},
{ key: 23, text: '腾讯混元', value: 23, color: 'teal' }, {key: 23, text: '腾讯混元', value: 23, color: 'teal'},
{ key: 26, text: '百川大模型', value: 26, color: 'orange' }, {key: 26, text: '百川大模型', value: 26, color: 'orange'},
{ key: 27, text: 'MiniMax', value: 27, color: 'red' }, {key: 27, text: 'MiniMax', value: 27, color: 'red'},
{ key: 29, text: 'Groq', value: 29, color: 'orange' }, {key: 29, text: 'Groq', value: 29, color: 'orange'},
{ key: 30, text: 'Ollama', value: 30, color: 'black' }, {key: 30, text: 'Ollama', value: 30, color: 'black'},
{ key: 31, text: '零一万物', value: 31, color: 'green' }, {key: 31, text: '零一万物', value: 31, color: 'green'},
{ key: 32, text: '阶跃星辰', value: 32, color: 'blue' }, {key: 32, text: '阶跃星辰', value: 32, color: 'blue'},
{ key: 8, text: '自定义渠道', value: 8, color: 'pink' }, {key: 34, text: 'Coze', value: 34, color: 'blue'},
{ key: 22, text: '知识库FastGPT', value: 22, color: 'blue' }, {key: 35, text: 'Cohere', value: 35, color: 'blue'},
{ key: 21, text: '知识库AI Proxy', value: 21, color: 'purple' }, {key: 36, text: 'DeepSeek', value: 36, color: 'black'},
{ key: 20, text: '代理OpenRouter', value: 20, color: 'black' }, {key: 37, text: 'Cloudflare', value: 37, color: 'orange'},
{ key: 2, text: '代理API2D', value: 2, color: 'blue' }, {key: 38, text: 'DeepL', value: 38, color: 'black'},
{ key: 5, text: '代理OpenAI-SB', value: 5, color: 'brown' }, {key: 39, text: 'together.ai', value: 39, color: 'blue'},
{ key: 7, text: '代理OhMyGPT', value: 7, color: 'purple' }, {key: 8, text: '自定义渠道', value: 8, color: 'pink'},
{ key: 10, text: '代理AI Proxy', value: 10, color: 'purple' }, {key: 22, text: '知识库FastGPT', value: 22, color: 'blue'},
{ key: 4, text: '代理CloseAI', value: 4, color: 'teal' }, {key: 21, text: '知识库AI Proxy', value: 21, color: 'purple'},
{ key: 6, text: '代理OpenAI Max', value: 6, color: 'violet' }, {key: 20, text: '代理OpenRouter', value: 20, color: 'black'},
{ key: 9, text: '代理AI.LS', value: 9, color: 'yellow' }, {key: 2, text: '代理API2D', value: 2, color: 'blue'},
{ key: 12, text: '代理:API2GPT', value: 12, color: 'blue' }, {key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown'},
{ key: 13, text: '代理:AIGC2D', value: 13, color: 'purple' } {key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple'},
{key: 10, text: '代理AI Proxy', value: 10, color: 'purple'},
{key: 4, text: '代理CloseAI', value: 4, color: 'teal'},
{key: 6, text: '代理OpenAI Max', value: 6, color: 'violet'},
{key: 9, text: '代理AI.LS', value: 9, color: 'yellow'},
{key: 12, text: '代理API2GPT', value: 12, color: 'blue'},
{key: 13, text: '代理AIGC2D', value: 13, color: 'purple'}
]; ];

View File

@@ -57,7 +57,8 @@ const EditChannel = () => {
const [config, setConfig] = useState({ const [config, setConfig] = useState({
region: '', region: '',
sk: '', sk: '',
ak: '' ak: '',
user_id: ''
}); });
const handleInputChange = (e, { name, value }) => { const handleInputChange = (e, { name, value }) => {
setInputs((inputs) => ({ ...inputs, [name]: value })); setInputs((inputs) => ({ ...inputs, [name]: value }));
@@ -156,8 +157,10 @@ const EditChannel = () => {
}, []); }, []);
const submit = async () => { const submit = async () => {
if (inputs.key === "") { if (inputs.key === '') {
inputs.key = `${config.ak}|${config.sk}|${config.region}`; if (config.ak !== '' && config.sk !== '' && config.region !== '') {
inputs.key = `${config.ak}|${config.sk}|${config.region}`;
}
} }
if (!isEdit && (inputs.name === '' || inputs.key === '')) { if (!isEdit && (inputs.name === '' || inputs.key === '')) {
showInfo('请填写渠道名称和渠道密钥!'); showInfo('请填写渠道名称和渠道密钥!');
@@ -171,7 +174,7 @@ const EditChannel = () => {
showInfo('模型映射必须是合法的 JSON 格式!'); showInfo('模型映射必须是合法的 JSON 格式!');
return; return;
} }
let localInputs = inputs; let localInputs = {...inputs};
if (localInputs.base_url && localInputs.base_url.endsWith('/')) { if (localInputs.base_url && localInputs.base_url.endsWith('/')) {
localInputs.base_url = localInputs.base_url.slice(0, localInputs.base_url.length - 1); localInputs.base_url = localInputs.base_url.slice(0, localInputs.base_url.length - 1);
} }
@@ -352,6 +355,13 @@ const EditChannel = () => {
</Form.Field> </Form.Field>
) )
} }
{
inputs.type === 34 && (
<Message>
对于 Coze 而言模型名称即 Bot ID你可以添加一个前缀 `bot-`例如`bot-123456`
</Message>
)
}
<Form.Field> <Form.Field>
<Form.Dropdown <Form.Dropdown
label='模型' label='模型'
@@ -442,6 +452,18 @@ const EditChannel = () => {
</Form.Field> </Form.Field>
) )
} }
{
inputs.type === 34 && (
<Form.Input
label='User ID'
name='user_id'
required
placeholder={'生成该密钥的用户 ID'}
onChange={handleConfigChange}
value={config.user_id}
autoComplete=''
/>)
}
{ {
inputs.type !== 33 && (batch ? <Form.Field> inputs.type !== 33 && (batch ? <Form.Field>
<Form.TextArea <Form.TextArea
@@ -466,6 +488,21 @@ const EditChannel = () => {
/> />
</Form.Field>) </Form.Field>)
} }
{
inputs.type === 37 && (
<Form.Field>
<Form.Input
label='Account ID'
name='user_id'
required
placeholder={'请输入 Account ID例如d8d7c61dbc334c32d3ced580e4bf42b4'}
onChange={handleConfigChange}
value={config.user_id}
autoComplete=''
/>
</Form.Field>
)
}
{ {
inputs.type !== 33 && !isEdit && ( inputs.type !== 33 && !isEdit && (
<Form.Checkbox <Form.Checkbox