Compare commits

...

18 Commits

Author SHA1 Message Date
Martial BE
c5aa59e297 🐛 fix xunfei request error 2023-12-29 11:32:43 +08:00
Martial BE
211a862d54 🎨 Support qwen-vl-plus 2023-12-29 10:59:26 +08:00
Martial BE
c4c89e8e1b 🎨 change gemini safety settings 2023-12-28 16:49:31 +08:00
Martial BE
72983ac734 Model list return by group 2023-12-28 15:48:20 +08:00
Martial BE
4d43dce64b 🐛 fix base 64 encoded format support of gemini-pro-vision 2023-12-28 12:23:39 +08:00
Martial BE
0fa94d3c94 🐛 fix log channel is null 2023-12-28 11:27:52 +08:00
MartialBE
002dba5a75 🐛 fix top-up link is null 2023-12-26 23:32:27 +08:00
Martial BE
fb24d024a7 add channel proxy 2023-12-26 18:42:39 +08:00
Martial BE
eeb867da10 🎨 Change the method of getting channel parameters 2023-12-26 16:40:50 +08:00
Martial BE
47b72b850f Merge branch 'songquanpeng' into sync_upstream 2023-12-25 11:23:28 +08:00
JustSong
f44fbe3fe7 docs: update pr template 2023-12-24 19:24:59 +08:00
JustSong
1c8922153d feat: support gemini-vision-pro 2023-12-24 18:54:32 +08:00
Laisky.Cai
f3c07e1451 fix: openai response should contains model (#841)
* fix: openai response should contains `model`

- Update model attributes in `claudeHandler` for `relay-claude.go`
- Implement model type for fullTextResponse in `relay-gemini.go`
- Add new `Model` field to `OpenAITextResponse` struct in `relay.go`

* chore: set model name response for models

---------

Co-authored-by: JustSong <songquanpeng@foxmail.com>
2023-12-24 16:58:31 +08:00
Bryan
40ceb29e54 fix: fix SearchUsers not working if using PostgreSQL (#778)
* fix SearchUsers

* refactor: using UsingPostgreSQL as condition

---------

Co-authored-by: JustSong <songquanpeng@foxmail.com>
2023-12-24 16:42:00 +08:00
dependabot[bot]
0699ecd0af chore(deps): bump golang.org/x/crypto from 0.14.0 to 0.17.0 (#840)
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.14.0 to 0.17.0.
- [Commits](https://github.com/golang/crypto/compare/v0.14.0...v0.17.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-24 16:29:48 +08:00
moondie
ee9e746520 feat: update ali stream implementation & enable internet search (#856)
* Update relay-ali.go: 改进stream模式,添加联网搜索能力

通义千问支持stream的增量模式,不需要每次去掉上次的前缀;实测qwen-max联网模式效果不错,添加了联网模式。如果别的模型有问题可以改为单独给qwen-max开放

* 删除"stream参数"

刚发现原来阿里api没有这个参数,上次误加了。

* refactor: only enable search when specified

* fix: remove custom suffix when get model ratio

---------

Co-authored-by: JustSong <songquanpeng@foxmail.com>
2023-12-24 16:17:21 +08:00
Buer
a763681c2e fix: fix base64 image parse error (#858) 2023-12-24 15:35:56 +08:00
MartialBE
be613883a1 🐛 Fix statistics error in SQLite 2023-12-22 20:54:37 +08:00
77 changed files with 752 additions and 783 deletions

View File

@@ -6,23 +6,61 @@ import (
"fmt"
"io"
"net/http"
"net/url"
"one-api/types"
"strconv"
"sync"
"time"
"github.com/gin-gonic/gin"
"golang.org/x/net/proxy"
)
var HttpClient *http.Client
var clientPool = &sync.Pool{
New: func() interface{} {
return &http.Client{}
},
}
func init() {
if RelayTimeout == 0 {
HttpClient = &http.Client{}
} else {
HttpClient = &http.Client{
Timeout: time.Duration(RelayTimeout) * time.Second,
func GetHttpClient(proxyAddr string) *http.Client {
client := clientPool.Get().(*http.Client)
if RelayTimeout > 0 {
client.Timeout = time.Duration(RelayTimeout) * time.Second
}
if proxyAddr != "" {
proxyURL, err := url.Parse(proxyAddr)
if err != nil {
SysError("Error parsing proxy address: " + err.Error())
return client
}
switch proxyURL.Scheme {
case "http", "https":
client.Transport = &http.Transport{
Proxy: http.ProxyURL(proxyURL),
}
case "socks5":
dialer, err := proxy.SOCKS5("tcp", proxyURL.Host, nil, proxy.Direct)
if err != nil {
SysError("Error creating SOCKS5 dialer: " + err.Error())
return client
}
client.Transport = &http.Transport{
Dial: dialer.Dial,
}
default:
SysError("Unsupported proxy scheme: " + proxyURL.Scheme)
}
}
return client
}
func PutHttpClient(c *http.Client) {
clientPool.Put(c)
}
type Client struct {
@@ -92,12 +130,14 @@ func (c *Client) NewRequest(method, url string, setters ...requestOption) (*http
return req, nil
}
func SendRequest(req *http.Request, response any, outputResp bool) (*http.Response, *types.OpenAIErrorWithStatusCode) {
func SendRequest(req *http.Request, response any, outputResp bool, proxyAddr string) (*http.Response, *types.OpenAIErrorWithStatusCode) {
// 发送请求
resp, err := HttpClient.Do(req)
client := GetHttpClient(proxyAddr)
resp, err := client.Do(req)
if err != nil {
return nil, ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError)
}
PutHttpClient(client)
if !outputResp {
defer resp.Body.Close()
@@ -210,8 +250,10 @@ func HandleErrorResp(resp *http.Response) (openAIErrorWithStatusCode *types.Open
return
}
func (c *Client) SendRequestRaw(req *http.Request) (body io.ReadCloser, err error) {
resp, err := HttpClient.Do(req)
func (c *Client) SendRequestRaw(req *http.Request, proxyAddr string) (body io.ReadCloser, err error) {
client := GetHttpClient(proxyAddr)
resp, err := client.Do(req)
PutHttpClient(client)
if err != nil {
return
}

View File

@@ -3,6 +3,7 @@ package image
import (
"bytes"
"encoding/base64"
"errors"
"image"
_ "image/gif"
_ "image/jpeg"
@@ -15,7 +16,22 @@ import (
_ "golang.org/x/image/webp"
)
func IsImageUrl(url string) (bool, error) {
resp, err := http.Head(url)
if err != nil {
return false, err
}
if !strings.HasPrefix(resp.Header.Get("Content-Type"), "image/") {
return false, nil
}
return true, nil
}
func GetImageSizeFromUrl(url string) (width int, height int, err error) {
isImage, err := IsImageUrl(url)
if !isImage {
return
}
resp, err := http.Get(url)
if err != nil {
return
@@ -28,6 +44,44 @@ func GetImageSizeFromUrl(url string) (width int, height int, err error) {
return img.Width, img.Height, nil
}
func GetImageFromUrl(url string) (mimeType string, data string, err error) {
if strings.HasPrefix(url, "data:image/") {
dataURLPattern := regexp.MustCompile(`data:image/([^;]+);base64,(.*)`)
matches := dataURLPattern.FindStringSubmatch(url)
if len(matches) == 3 && matches[2] != "" {
mimeType = "image/" + matches[1]
data = matches[2]
return
}
err = errors.New("image base64 decode failed")
return
}
isImage, err := IsImageUrl(url)
if !isImage {
if err == nil {
err = errors.New("invalid image link")
}
return
}
resp, err := http.Get(url)
if err != nil {
return
}
defer resp.Body.Close()
buffer := bytes.NewBuffer(nil)
_, err = buffer.ReadFrom(resp.Body)
if err != nil {
return
}
mimeType = resp.Header.Get("Content-Type")
data = base64.StdEncoding.EncodeToString(buffer.Bytes())
return
}
var (
reg = regexp.MustCompile(`data:image/([^;]+);base64,`)
)

View File

@@ -152,3 +152,51 @@ func TestGetImageSize(t *testing.T) {
})
}
}
func TestGetImageSizeFromBase64(t *testing.T) {
for i, c := range cases {
t.Run("Decode:"+strconv.Itoa(i), func(t *testing.T) {
resp, err := http.Get(c.url)
assert.NoError(t, err)
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
assert.NoError(t, err)
encoded := base64.StdEncoding.EncodeToString(data)
width, height, err := img.GetImageSizeFromBase64(encoded)
assert.NoError(t, err)
assert.Equal(t, c.width, width)
assert.Equal(t, c.height, height)
})
}
}
func TestGetImageFromUrl(t *testing.T) {
for i, c := range cases {
t.Run("Decode:"+strconv.Itoa(i), func(t *testing.T) {
resp, err := http.Get(c.url)
assert.NoError(t, err)
defer resp.Body.Close()
data, err := io.ReadAll(resp.Body)
assert.NoError(t, err)
encoded := base64.StdEncoding.EncodeToString(data)
mimeType, base64Data, err := img.GetImageFromUrl(c.url)
assert.NoError(t, err)
assert.Equal(t, encoded, base64Data)
assert.Equal(t, "image/"+c.format, mimeType)
encodedBase64 := "data:image/" + c.format + ";base64," + encoded
mimeType, base64Data, err = img.GetImageFromUrl(encodedBase64)
assert.NoError(t, err)
assert.Equal(t, encoded, base64Data)
assert.Equal(t, "image/"+c.format, mimeType)
})
}
url := "https://raw.githubusercontent.com/songquanpeng/one-api/main/README.md"
_, _, err := img.GetImageFromUrl(url)
assert.Error(t, err)
encodedBase64 := "data:image/text;base64,"
_, _, err = img.GetImageFromUrl(encodedBase64)
assert.Error(t, err)
}

View File

@@ -84,6 +84,7 @@ var ModelRatio = map[string]float64{
"Embedding-V1": 0.1429, // ¥0.002 / 1k tokens
"PaLM-2": 1,
"gemini-pro": 1, // $0.00025 / 1k characters -> $0.001 / 1k tokens
"gemini-pro-vision": 1, // $0.00025 / 1k characters -> $0.001 / 1k tokens
"chatglm_turbo": 0.3572, // ¥0.005 / 1k tokens
"chatglm_pro": 0.7143, // ¥0.01 / 1k tokens
"chatglm_std": 0.3572, // ¥0.005 / 1k tokens
@@ -92,6 +93,7 @@ var ModelRatio = map[string]float64{
"qwen-plus": 1.4286, // ¥0.02 / 1k tokens
"qwen-max": 1.4286, // ¥0.02 / 1k tokens
"qwen-max-longcontext": 1.4286, // ¥0.02 / 1k tokens
"qwen-vl-plus": 0.5715, // ¥0.008 / 1k tokens
"text-embedding-v1": 0.05, // ¥0.0007 / 1k tokens
"SparkDesk": 1.2858, // ¥0.018 / 1k tokens
"360GPT_S2_V9": 0.8572, // ¥0.012 / 1k tokens
@@ -115,6 +117,9 @@ func UpdateModelRatioByJSONString(jsonStr string) error {
}
func GetModelRatio(name string) float64 {
if strings.HasPrefix(name, "qwen-") && strings.HasSuffix(name, "-internet") {
name = strings.TrimSuffix(name, "-internet")
}
ratio, ok := ModelRatio[name]
if !ok {
SysError("model ratio not found: " + name)

View File

@@ -55,10 +55,9 @@ func updateChannelBalance(channel *model.Channel) (float64, error) {
c, _ := gin.CreateTestContext(w)
c.Request = req
setChannelToContext(c, channel)
req.Header.Set("Content-Type", "application/json")
provider := providers.GetProvider(channel.Type, c)
provider := providers.GetProvider(channel, c)
if provider == nil {
return 0, errors.New("provider not found")
}
@@ -102,7 +101,6 @@ func UpdateChannelBalance(c *gin.Context) {
"message": "",
"balance": balance,
})
return
}
func updateAllChannelsBalance() error {
@@ -146,7 +144,6 @@ func UpdateAllChannelsBalance(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func AutomaticallyUpdateChannels(frequency int) {

View File

@@ -29,7 +29,6 @@ func testChannel(channel *model.Channel, request types.ChatCompletionRequest) (e
c, _ := gin.CreateTestContext(w)
c.Request = req
setChannelToContext(c, channel)
// 创建映射
channelTypeToModel := map[int]string{
common.ChannelTypePaLM: "PaLM-2",
@@ -50,7 +49,7 @@ func testChannel(channel *model.Channel, request types.ChatCompletionRequest) (e
}
request.Model = model
provider := providers.GetProvider(channel.Type, c)
provider := providers.GetProvider(channel, c)
if provider == nil {
return errors.New("channel not implemented"), nil
}
@@ -74,7 +73,7 @@ func testChannel(channel *model.Channel, request types.ChatCompletionRequest) (e
}
if Usage.CompletionTokens == 0 {
return errors.New(fmt.Sprintf("channel %s, message 补全 tokens 非预期返回 0", channel.Name)), nil
return fmt.Errorf("channel %s, message 补全 tokens 非预期返回 0", channel.Name), nil
}
return nil, nil
@@ -132,7 +131,6 @@ func TestChannel(c *gin.Context) {
"message": "",
"time": consumedTime,
})
return
}
var testAllChannelsLock sync.Mutex

View File

@@ -1,12 +1,13 @@
package controller
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/common"
"one-api/model"
"strconv"
"strings"
"github.com/gin-gonic/gin"
)
func GetAllChannels(c *gin.Context) {
@@ -27,7 +28,6 @@ func GetAllChannels(c *gin.Context) {
"message": "",
"data": channels,
})
return
}
func SearchChannels(c *gin.Context) {
@@ -45,7 +45,6 @@ func SearchChannels(c *gin.Context) {
"message": "",
"data": channels,
})
return
}
func GetChannel(c *gin.Context) {
@@ -70,7 +69,6 @@ func GetChannel(c *gin.Context) {
"message": "",
"data": channel,
})
return
}
func AddChannel(c *gin.Context) {
@@ -106,7 +104,6 @@ func AddChannel(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func DeleteChannel(c *gin.Context) {
@@ -124,7 +121,6 @@ func DeleteChannel(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func DeleteDisabledChannel(c *gin.Context) {
@@ -141,7 +137,6 @@ func DeleteDisabledChannel(c *gin.Context) {
"message": "",
"data": rows,
})
return
}
func UpdateChannel(c *gin.Context) {
@@ -167,5 +162,4 @@ func UpdateChannel(c *gin.Context) {
"message": "",
"data": channel,
})
return
}

View File

@@ -5,13 +5,14 @@ import (
"encoding/json"
"errors"
"fmt"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
"net/http"
"one-api/common"
"one-api/model"
"strconv"
"time"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
)
type GitHubOAuthResponse struct {
@@ -211,7 +212,6 @@ func GitHubBind(c *gin.Context) {
"success": true,
"message": "bind",
})
return
}
func GenerateOAuthCode(c *gin.Context) {

View File

@@ -9,7 +9,7 @@ import (
func GetGroups(c *gin.Context) {
groupNames := make([]string, 0)
for groupName, _ := range common.GroupRatio {
for groupName := range common.GroupRatio {
groupNames = append(groupNames, groupName)
}
c.JSON(http.StatusOK, gin.H{

View File

@@ -1,11 +1,12 @@
package controller
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/common"
"one-api/model"
"strconv"
"github.com/gin-gonic/gin"
)
func GetAllLogs(c *gin.Context) {
@@ -33,7 +34,6 @@ func GetAllLogs(c *gin.Context) {
"message": "",
"data": logs,
})
return
}
func GetUserLogs(c *gin.Context) {
@@ -60,7 +60,6 @@ func GetUserLogs(c *gin.Context) {
"message": "",
"data": logs,
})
return
}
func SearchAllLogs(c *gin.Context) {
@@ -78,7 +77,6 @@ func SearchAllLogs(c *gin.Context) {
"message": "",
"data": logs,
})
return
}
func SearchUserLogs(c *gin.Context) {
@@ -97,7 +95,6 @@ func SearchUserLogs(c *gin.Context) {
"message": "",
"data": logs,
})
return
}
func GetLogsStat(c *gin.Context) {
@@ -118,7 +115,6 @@ func GetLogsStat(c *gin.Context) {
//"token": tokenNum,
},
})
return
}
func GetLogsSelfStat(c *gin.Context) {
@@ -139,7 +135,6 @@ func GetLogsSelfStat(c *gin.Context) {
//"token": tokenNum,
},
})
return
}
func DeleteHistoryLogs(c *gin.Context) {
@@ -164,5 +159,4 @@ func DeleteHistoryLogs(c *gin.Context) {
"message": "",
"data": count,
})
return
}

View File

@@ -35,7 +35,6 @@ func GetStatus(c *gin.Context) {
"display_in_currency": common.DisplayInCurrencyEnabled,
},
})
return
}
func GetNotice(c *gin.Context) {
@@ -46,7 +45,6 @@ func GetNotice(c *gin.Context) {
"message": "",
"data": common.OptionMap["Notice"],
})
return
}
func GetAbout(c *gin.Context) {
@@ -57,7 +55,6 @@ func GetAbout(c *gin.Context) {
"message": "",
"data": common.OptionMap["About"],
})
return
}
func GetHomePageContent(c *gin.Context) {
@@ -68,7 +65,6 @@ func GetHomePageContent(c *gin.Context) {
"message": "",
"data": common.OptionMap["HomePageContent"],
})
return
}
func SendEmailVerification(c *gin.Context) {
@@ -121,7 +117,6 @@ func SendEmailVerification(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func SendPasswordResetEmail(c *gin.Context) {
@@ -160,7 +155,6 @@ func SendPasswordResetEmail(c *gin.Context) {
"success": true,
"message": "",
})
return
}
type PasswordResetRequest struct {
@@ -200,5 +194,4 @@ func ResetPassword(c *gin.Context) {
"message": "",
"data": password,
})
return
}

View File

@@ -2,7 +2,11 @@ package controller
import (
"fmt"
"net/http"
"one-api/common"
"one-api/model"
"one-api/types"
"sort"
"github.com/gin-gonic/gin"
)
@@ -25,550 +29,38 @@ type OpenAIModelPermission struct {
}
type OpenAIModels struct {
Id string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
OwnedBy string `json:"owned_by"`
Permission []OpenAIModelPermission `json:"permission"`
Root string `json:"root"`
Parent *string `json:"parent"`
Id string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
OwnedBy *string `json:"owned_by"`
Permission *[]OpenAIModelPermission `json:"permission"`
Root *string `json:"root"`
Parent *string `json:"parent"`
}
var openAIModels []OpenAIModels
var openAIModelsMap map[string]OpenAIModels
func init() {
var permission []OpenAIModelPermission
permission = append(permission, OpenAIModelPermission{
Id: "modelperm-LwHkVFn8AcMItP432fKKDIKJ",
Object: "model_permission",
Created: 1626777600,
AllowCreateEngine: true,
AllowSampling: true,
AllowLogprobs: true,
AllowSearchIndices: false,
AllowView: true,
AllowFineTuning: false,
Organization: "*",
Group: nil,
IsBlocking: false,
})
// https://platform.openai.com/docs/models/model-endpoint-compatibility
openAIModels = []OpenAIModels{
{
Id: "dall-e-2",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "dall-e-2",
Parent: nil,
},
{
Id: "dall-e-3",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "dall-e-3",
Parent: nil,
},
{
Id: "whisper-1",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "whisper-1",
Parent: nil,
},
{
Id: "tts-1",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "tts-1",
Parent: nil,
},
{
Id: "tts-1-1106",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "tts-1-1106",
Parent: nil,
},
{
Id: "tts-1-hd",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "tts-1-hd",
Parent: nil,
},
{
Id: "tts-1-hd-1106",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "tts-1-hd-1106",
Parent: nil,
},
{
Id: "gpt-3.5-turbo",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-3.5-turbo",
Parent: nil,
},
{
Id: "gpt-3.5-turbo-0301",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-3.5-turbo-0301",
Parent: nil,
},
{
Id: "gpt-3.5-turbo-0613",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-3.5-turbo-0613",
Parent: nil,
},
{
Id: "gpt-3.5-turbo-16k",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-3.5-turbo-16k",
Parent: nil,
},
{
Id: "gpt-3.5-turbo-16k-0613",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-3.5-turbo-16k-0613",
Parent: nil,
},
{
Id: "gpt-3.5-turbo-1106",
Object: "model",
Created: 1699593571,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-3.5-turbo-1106",
Parent: nil,
},
{
Id: "gpt-3.5-turbo-instruct",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-3.5-turbo-instruct",
Parent: nil,
},
{
Id: "gpt-4",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-4",
Parent: nil,
},
{
Id: "gpt-4-0314",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-4-0314",
Parent: nil,
},
{
Id: "gpt-4-0613",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-4-0613",
Parent: nil,
},
{
Id: "gpt-4-32k",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-4-32k",
Parent: nil,
},
{
Id: "gpt-4-32k-0314",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-4-32k-0314",
Parent: nil,
},
{
Id: "gpt-4-32k-0613",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-4-32k-0613",
Parent: nil,
},
{
Id: "gpt-4-1106-preview",
Object: "model",
Created: 1699593571,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-4-1106-preview",
Parent: nil,
},
{
Id: "gpt-4-vision-preview",
Object: "model",
Created: 1699593571,
OwnedBy: "openai",
Permission: permission,
Root: "gpt-4-vision-preview",
Parent: nil,
},
{
Id: "text-embedding-ada-002",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-embedding-ada-002",
Parent: nil,
},
{
Id: "text-davinci-003",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-davinci-003",
Parent: nil,
},
{
Id: "text-davinci-002",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-davinci-002",
Parent: nil,
},
{
Id: "text-curie-001",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-curie-001",
Parent: nil,
},
{
Id: "text-babbage-001",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-babbage-001",
Parent: nil,
},
{
Id: "text-ada-001",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-ada-001",
Parent: nil,
},
{
Id: "text-moderation-latest",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-moderation-latest",
Parent: nil,
},
{
Id: "text-moderation-stable",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-moderation-stable",
Parent: nil,
},
{
Id: "text-davinci-edit-001",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "text-davinci-edit-001",
Parent: nil,
},
{
Id: "code-davinci-edit-001",
Object: "model",
Created: 1677649963,
OwnedBy: "openai",
Permission: permission,
Root: "code-davinci-edit-001",
Parent: nil,
},
{
Id: "claude-instant-1",
Object: "model",
Created: 1677649963,
OwnedBy: "anthropic",
Permission: permission,
Root: "claude-instant-1",
Parent: nil,
},
{
Id: "claude-2",
Object: "model",
Created: 1677649963,
OwnedBy: "anthropic",
Permission: permission,
Root: "claude-2",
Parent: nil,
},
{
Id: "claude-2.1",
Object: "model",
Created: 1677649963,
OwnedBy: "anthropic",
Permission: permission,
Root: "claude-2.1",
Parent: nil,
},
{
Id: "claude-2.0",
Object: "model",
Created: 1677649963,
OwnedBy: "anthropic",
Permission: permission,
Root: "claude-2.0",
Parent: nil,
},
{
Id: "ERNIE-Bot",
Object: "model",
Created: 1677649963,
OwnedBy: "baidu",
Permission: permission,
Root: "ERNIE-Bot",
Parent: nil,
},
{
Id: "ERNIE-Bot-turbo",
Object: "model",
Created: 1677649963,
OwnedBy: "baidu",
Permission: permission,
Root: "ERNIE-Bot-turbo",
Parent: nil,
},
{
Id: "ERNIE-Bot-4",
Object: "model",
Created: 1677649963,
OwnedBy: "baidu",
Permission: permission,
Root: "ERNIE-Bot-4",
Parent: nil,
},
{
Id: "Embedding-V1",
Object: "model",
Created: 1677649963,
OwnedBy: "baidu",
Permission: permission,
Root: "Embedding-V1",
Parent: nil,
},
{
Id: "PaLM-2",
Object: "model",
Created: 1677649963,
OwnedBy: "google",
Permission: permission,
Root: "PaLM-2",
Parent: nil,
},
{
Id: "gemini-pro",
Object: "model",
Created: 1677649963,
OwnedBy: "google",
Permission: permission,
Root: "gemini-pro",
Parent: nil,
},
{
Id: "chatglm_turbo",
Object: "model",
Created: 1677649963,
OwnedBy: "zhipu",
Permission: permission,
Root: "chatglm_turbo",
Parent: nil,
},
{
Id: "chatglm_pro",
Object: "model",
Created: 1677649963,
OwnedBy: "zhipu",
Permission: permission,
Root: "chatglm_pro",
Parent: nil,
},
{
Id: "chatglm_std",
Object: "model",
Created: 1677649963,
OwnedBy: "zhipu",
Permission: permission,
Root: "chatglm_std",
Parent: nil,
},
{
Id: "chatglm_lite",
Object: "model",
Created: 1677649963,
OwnedBy: "zhipu",
Permission: permission,
Root: "chatglm_lite",
Parent: nil,
},
{
Id: "qwen-turbo",
Object: "model",
Created: 1677649963,
OwnedBy: "ali",
Permission: permission,
Root: "qwen-turbo",
Parent: nil,
},
{
Id: "qwen-plus",
Object: "model",
Created: 1677649963,
OwnedBy: "ali",
Permission: permission,
Root: "qwen-plus",
Parent: nil,
},
{
Id: "qwen-max",
Object: "model",
Created: 1677649963,
OwnedBy: "ali",
Permission: permission,
Root: "qwen-max",
Parent: nil,
},
{
Id: "qwen-max-longcontext",
Object: "model",
Created: 1677649963,
OwnedBy: "ali",
Permission: permission,
Root: "qwen-max-longcontext",
Parent: nil,
},
{
Id: "text-embedding-v1",
Object: "model",
Created: 1677649963,
OwnedBy: "ali",
Permission: permission,
Root: "text-embedding-v1",
Parent: nil,
},
{
Id: "SparkDesk",
Object: "model",
Created: 1677649963,
OwnedBy: "xunfei",
Permission: permission,
Root: "SparkDesk",
Parent: nil,
},
{
Id: "360GPT_S2_V9",
Object: "model",
Created: 1677649963,
OwnedBy: "360",
Permission: permission,
Root: "360GPT_S2_V9",
Parent: nil,
},
{
Id: "embedding-bert-512-v1",
Object: "model",
Created: 1677649963,
OwnedBy: "360",
Permission: permission,
Root: "embedding-bert-512-v1",
Parent: nil,
},
{
Id: "embedding_s1_v1",
Object: "model",
Created: 1677649963,
OwnedBy: "360",
Permission: permission,
Root: "embedding_s1_v1",
Parent: nil,
},
{
Id: "semantic_similarity_s1_v1",
Object: "model",
Created: 1677649963,
OwnedBy: "360",
Permission: permission,
Root: "semantic_similarity_s1_v1",
Parent: nil,
},
{
Id: "hunyuan",
Object: "model",
Created: 1677649963,
OwnedBy: "tencent",
Permission: permission,
Root: "hunyuan",
Parent: nil,
},
keys := make([]string, 0, len(common.ModelRatio))
for k := range common.ModelRatio {
keys = append(keys, k)
}
sort.Strings(keys)
for _, modelId := range keys {
openAIModels = append(openAIModels, OpenAIModels{
Id: modelId,
Object: "model",
Created: 1677649963,
OwnedBy: nil,
Permission: nil,
Root: nil,
Parent: nil,
})
}
openAIModelsMap = make(map[string]OpenAIModels)
for _, model := range openAIModels {
openAIModelsMap[model.Id] = model
@@ -576,6 +68,35 @@ func init() {
}
func ListModels(c *gin.Context) {
groupName := c.GetString("group")
models, err := model.CacheGetGroupModels(groupName)
if err != nil {
common.AbortWithMessage(c, http.StatusServiceUnavailable, err.Error())
return
}
sort.Strings(models)
groupOpenAIModels := make([]OpenAIModels, 0, len(models))
for _, modelId := range models {
groupOpenAIModels = append(groupOpenAIModels, OpenAIModels{
Id: modelId,
Object: "model",
Created: 1677649963,
OwnedBy: nil,
Permission: nil,
Root: nil,
Parent: nil,
})
}
c.JSON(200, gin.H{
"object": "list",
"data": groupOpenAIModels,
})
}
func ListModelsForAdmin(c *gin.Context) {
c.JSON(200, gin.H{
"object": "list",
"data": openAIModels,

View File

@@ -43,7 +43,7 @@ func RelayChat(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeChatCompletions)
provider, pass := getProvider(c, channel, common.RelayModeChatCompletions)
if pass {
return
}

View File

@@ -43,7 +43,7 @@ func RelayCompletions(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeCompletions)
provider, pass := getProvider(c, channel, common.RelayModeCompletions)
if pass {
return
}

View File

@@ -42,7 +42,7 @@ func RelayEmbeddings(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeEmbeddings)
provider, pass := getProvider(c, channel, common.RelayModeEmbeddings)
if pass {
return
}

View File

@@ -51,7 +51,7 @@ func RelayImageEdits(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeImagesEdits)
provider, pass := getProvider(c, channel, common.RelayModeImagesEdits)
if pass {
return
}

View File

@@ -54,7 +54,7 @@ func RelayImageGenerations(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeImagesGenerations)
provider, pass := getProvider(c, channel, common.RelayModeImagesGenerations)
if pass {
return
}

View File

@@ -46,7 +46,7 @@ func RelayImageVariations(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeImagesVariations)
provider, pass := getProvider(c, channel, common.RelayModeImagesVariations)
if pass {
return
}

View File

@@ -42,7 +42,7 @@ func RelayModerations(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeModerations)
provider, pass := getProvider(c, channel, common.RelayModeModerations)
if pass {
return
}

View File

@@ -38,7 +38,7 @@ func RelaySpeech(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeAudioSpeech)
provider, pass := getProvider(c, channel, common.RelayModeAudioSpeech)
if pass {
return
}

View File

@@ -38,7 +38,7 @@ func RelayTranscriptions(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeAudioTranscription)
provider, pass := getProvider(c, channel, common.RelayModeAudioTranscription)
if pass {
return
}

View File

@@ -38,7 +38,7 @@ func RelayTranslations(c *gin.Context) {
}
// 获取供应商
provider, pass := getProvider(c, channel.Type, common.RelayModeAudioTranslation)
provider, pass := getProvider(c, channel, common.RelayModeAudioTranslation)
if pass {
return
}

View File

@@ -45,7 +45,8 @@ func fetchChannel(c *gin.Context, modelName string) (channel *model.Channel, pas
return
}
setChannelToContext(c, channel)
c.Set("channel_id", channel.Id)
return
}
@@ -84,8 +85,8 @@ func fetchChannelByModel(c *gin.Context, modelName string) (*model.Channel, bool
return channel, false
}
func getProvider(c *gin.Context, channelType int, relayMode int) (providersBase.ProviderInterface, bool) {
provider := providers.GetProvider(channelType, c)
func getProvider(c *gin.Context, channel *model.Channel, relayMode int) (providersBase.ProviderInterface, bool) {
provider := providers.GetProvider(channel, c)
if provider == nil {
common.AbortWithMessage(c, http.StatusNotImplemented, "channel not found")
return nil, true
@@ -99,27 +100,6 @@ func getProvider(c *gin.Context, channelType int, relayMode int) (providersBase.
return provider, false
}
func setChannelToContext(c *gin.Context, channel *model.Channel) {
// c.Set("channel", channel.Type)
c.Set("channel_id", channel.Id)
c.Set("channel_name", channel.Name)
c.Set("api_key", channel.Key)
c.Set("base_url", channel.GetBaseURL())
switch channel.Type {
case common.ChannelTypeAzure:
c.Set("api_version", channel.Other)
case common.ChannelTypeXunfei:
c.Set("api_version", channel.Other)
case common.ChannelTypeGemini:
c.Set("api_version", channel.Other)
case common.ChannelTypeAIProxyLibrary:
c.Set("library_id", channel.Other)
case common.ChannelTypeAli:
c.Set("plugin", channel.Other)
}
}
func shouldDisableChannel(err *types.OpenAIError, statusCode int) bool {
if !common.AutomaticDisableChannelEnabled {
return false

View File

@@ -1,11 +1,12 @@
package controller
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/common"
"one-api/model"
"strconv"
"github.com/gin-gonic/gin"
)
func GetAllTokens(c *gin.Context) {
@@ -27,7 +28,6 @@ func GetAllTokens(c *gin.Context) {
"message": "",
"data": tokens,
})
return
}
func SearchTokens(c *gin.Context) {
@@ -46,7 +46,6 @@ func SearchTokens(c *gin.Context) {
"message": "",
"data": tokens,
})
return
}
func GetToken(c *gin.Context) {
@@ -72,7 +71,6 @@ func GetToken(c *gin.Context) {
"message": "",
"data": token,
})
return
}
func GetTokenStatus(c *gin.Context) {
@@ -138,7 +136,6 @@ func AddToken(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func DeleteToken(c *gin.Context) {
@@ -156,7 +153,6 @@ func DeleteToken(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func UpdateToken(c *gin.Context) {
@@ -224,5 +220,4 @@ func UpdateToken(c *gin.Context) {
"message": "",
"data": cleanToken,
})
return
}

View File

@@ -174,7 +174,6 @@ func Register(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func GetAllUsers(c *gin.Context) {
@@ -195,7 +194,6 @@ func GetAllUsers(c *gin.Context) {
"message": "",
"data": users,
})
return
}
func SearchUsers(c *gin.Context) {
@@ -213,7 +211,6 @@ func SearchUsers(c *gin.Context) {
"message": "",
"data": users,
})
return
}
func GetUser(c *gin.Context) {
@@ -246,7 +243,6 @@ func GetUser(c *gin.Context) {
"message": "",
"data": user,
})
return
}
func GetUserDashboard(c *gin.Context) {
@@ -306,7 +302,6 @@ func GenerateAccessToken(c *gin.Context) {
"message": "",
"data": user.AccessToken,
})
return
}
func GetAffCode(c *gin.Context) {
@@ -334,7 +329,6 @@ func GetAffCode(c *gin.Context) {
"message": "",
"data": user.AffCode,
})
return
}
func GetSelf(c *gin.Context) {
@@ -352,7 +346,6 @@ func GetSelf(c *gin.Context) {
"message": "",
"data": user,
})
return
}
func UpdateUser(c *gin.Context) {
@@ -416,7 +409,6 @@ func UpdateUser(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func UpdateSelf(c *gin.Context) {
@@ -463,7 +455,6 @@ func UpdateSelf(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func DeleteUser(c *gin.Context) {
@@ -525,7 +516,6 @@ func DeleteSelf(c *gin.Context) {
"success": true,
"message": "",
})
return
}
func CreateUser(c *gin.Context) {
@@ -574,7 +564,6 @@ func CreateUser(c *gin.Context) {
"success": true,
"message": "",
})
return
}
type ManageRequest struct {
@@ -691,7 +680,6 @@ func ManageUser(c *gin.Context) {
"message": "",
"data": clearUser,
})
return
}
func EmailBind(c *gin.Context) {
@@ -733,7 +721,6 @@ func EmailBind(c *gin.Context) {
"success": true,
"message": "",
})
return
}
type topUpRequest struct {
@@ -764,5 +751,4 @@ func TopUp(c *gin.Context) {
"message": "",
"data": quota,
})
return
}

View File

@@ -4,12 +4,13 @@ import (
"encoding/json"
"errors"
"fmt"
"github.com/gin-gonic/gin"
"net/http"
"one-api/common"
"one-api/model"
"strconv"
"time"
"github.com/gin-gonic/gin"
)
type wechatLoginResponse struct {
@@ -160,5 +161,4 @@ func WeChatBind(c *gin.Context) {
"success": true,
"message": "",
})
return
}

6
go.mod
View File

@@ -16,7 +16,7 @@ require (
github.com/gorilla/websocket v1.5.0
github.com/pkoukk/tiktoken-go v0.1.5
github.com/stretchr/testify v1.8.3
golang.org/x/crypto v0.14.0
golang.org/x/crypto v0.17.0
golang.org/x/image v0.14.0
gorm.io/driver/mysql v1.4.3
gorm.io/driver/postgres v1.5.2
@@ -58,8 +58,8 @@ require (
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.2.11 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/net v0.17.0 // indirect
golang.org/x/sys v0.13.0 // indirect
golang.org/x/net v0.19.0 // indirect
golang.org/x/sys v0.15.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/protobuf v1.30.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect

10
go.sum
View File

@@ -152,13 +152,15 @@ golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUu
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/image v0.14.0 h1:tNgSxAFe3jC4uYqvZdTr84SZoM1KfwdC9SKIFrLjFn4=
golang.org/x/image v0.14.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -166,8 +168,8 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=

View File

@@ -5,6 +5,7 @@ import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/common"
"runtime/debug"
)
func RelayPanicRecover() gin.HandlerFunc {
@@ -12,6 +13,7 @@ func RelayPanicRecover() gin.HandlerFunc {
defer func() {
if err := recover(); err != nil {
common.SysError(fmt.Sprintf("panic detected: %v", err))
common.SysError(fmt.Sprintf("stacktrace from panic: %s", string(debug.Stack())))
c.JSON(http.StatusInternalServerError, gin.H{
"error": gin.H{
"message": fmt.Sprintf("Panic detected, error: %v. Please submit a issue here: https://github.com/songquanpeng/one-api", err),

View File

@@ -39,6 +39,22 @@ func GetRandomSatisfiedChannel(group string, model string) (*Channel, error) {
return &channel, err
}
func GetGroupModels(group string) ([]string, error) {
var models []string
groupCol := "`group`"
trueVal := "1"
if common.UsingPostgreSQL {
groupCol = `"group"`
trueVal = "true"
}
err := DB.Model(&Ability{}).Where(groupCol+" = ? and enabled = ? ", group, trueVal).Distinct("model").Pluck("model", &models).Error
if err != nil {
return nil, err
}
return models, nil
}
func (channel *Channel) AddAbilities() error {
models_ := strings.Split(channel.Models, ",")
groups_ := strings.Split(channel.Group, ",")

View File

@@ -213,3 +213,22 @@ func CacheGetRandomSatisfiedChannel(group string, model string) (*Channel, error
idx := rand.Intn(endIdx)
return channels[idx], nil
}
func CacheGetGroupModels(group string) ([]string, error) {
if !common.MemoryCacheEnabled {
return GetGroupModels(group)
}
channelSyncLock.RLock()
defer channelSyncLock.RUnlock()
groupModels := group2model2channels[group]
if groupModels == nil {
return nil, errors.New("group not found")
}
models := make([]string, 0)
for model := range groupModels {
models = append(models, model)
}
return models, nil
}

View File

@@ -25,6 +25,7 @@ type Channel struct {
UsedQuota int64 `json:"used_quota" gorm:"bigint;default:0"`
ModelMapping *string `json:"model_mapping" gorm:"type:varchar(1024);default:''"`
Priority *int64 `json:"priority" gorm:"bigint;default:0"`
Proxy string `json:"proxy" gorm:"type:varchar(255);default:''"`
}
func GetAllChannels(startIdx int, num int, selectAll bool) ([]*Channel, error) {

View File

@@ -200,6 +200,10 @@ func SearchLogsByDayAndModel(user_id, start, end int) (LogStatistics []*LogStati
groupSelect = "TO_CHAR(date_trunc('day', to_timestamp(created_at)), 'YYYY-MM-DD') as day"
}
if common.UsingSQLite {
groupSelect = "strftime('%Y-%m-%d', datetime(created_at, 'unixepoch')) as day"
}
err = DB.Raw(`
SELECT `+groupSelect+`,
model_name, count(1) as request_count,

View File

@@ -19,7 +19,7 @@ func (p *Aigc2dProvider) Balance(channel *model.Channel) (float64, error) {
// 发送请求
var response base.BalanceResponse
_, errWithCode := common.SendRequest(req, &response, false)
_, errWithCode := common.SendRequest(req, &response, false, p.Channel.Proxy)
if errWithCode != nil {
return 0, errors.New(errWithCode.OpenAIError.Message)
}

View File

@@ -20,7 +20,7 @@ func (p *AIProxyProvider) Balance(channel *model.Channel) (float64, error) {
// 发送请求
var response AIProxyUserOverviewResponse
_, errWithCode := common.SendRequest(req, &response, false)
_, errWithCode := common.SendRequest(req, &response, false, p.Channel.Proxy)
if errWithCode != nil {
return 0, errors.New(errWithCode.OpenAIError.Message)
}

View File

@@ -2,6 +2,7 @@ package ali
import (
"fmt"
"strings"
"one-api/providers/base"
@@ -28,13 +29,23 @@ type AliProvider struct {
base.BaseProvider
}
func (p *AliProvider) GetFullRequestURL(requestURL string, modelName string) string {
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
if modelName == "qwen-vl-plus" {
requestURL = "/api/v1/services/aigc/multimodal-generation/generation"
}
return fmt.Sprintf("%s%s", baseURL, requestURL)
}
// 获取请求头
func (p *AliProvider) GetRequestHeaders() (headers map[string]string) {
headers = make(map[string]string)
p.CommonRequestHeaders(headers)
headers["Authorization"] = fmt.Sprintf("Bearer %s", p.Context.GetString("api_key"))
if p.Context.GetString("plugin") != "" {
headers["X-DashScope-Plugin"] = p.Context.GetString("plugin")
headers["Authorization"] = fmt.Sprintf("Bearer %s", p.Channel.Key)
if p.Channel.Other != "" {
headers["X-DashScope-Plugin"] = p.Channel.Other
}
return headers

View File

@@ -26,20 +26,12 @@ func (aliResponse *AliChatResponse) ResponseHandler(resp *http.Response) (OpenAI
return
}
choice := types.ChatCompletionChoice{
Index: 0,
Message: types.ChatCompletionMessage{
Role: "assistant",
Content: aliResponse.Output.Text,
},
FinishReason: aliResponse.Output.FinishReason,
}
OpenAIResponse = types.ChatCompletionResponse{
ID: aliResponse.RequestId,
Object: "chat.completion",
Created: common.GetTimestamp(),
Choices: []types.ChatCompletionChoice{choice},
Model: aliResponse.Model,
Choices: aliResponse.Output.ToChatCompletionChoices(),
Usage: &types.Usage{
PromptTokens: aliResponse.Usage.InputTokens,
CompletionTokens: aliResponse.Usage.OutputTokens,
@@ -50,21 +42,57 @@ func (aliResponse *AliChatResponse) ResponseHandler(resp *http.Response) (OpenAI
return
}
const AliEnableSearchModelSuffix = "-internet"
// 获取聊天请求体
func (p *AliProvider) getChatRequestBody(request *types.ChatCompletionRequest) *AliChatRequest {
messages := make([]AliMessage, 0, len(request.Messages))
for i := 0; i < len(request.Messages); i++ {
message := request.Messages[i]
messages = append(messages, AliMessage{
Content: message.StringContent(),
Role: strings.ToLower(message.Role),
})
if request.Model != "qwen-vl-plus" {
messages = append(messages, AliMessage{
Content: message.StringContent(),
Role: strings.ToLower(message.Role),
})
} else {
openaiContent := message.ParseContent()
var parts []AliMessagePart
for _, part := range openaiContent {
if part.Type == types.ContentTypeText {
parts = append(parts, AliMessagePart{
Text: part.Text,
})
} else if part.Type == types.ContentTypeImageURL {
parts = append(parts, AliMessagePart{
Image: part.ImageURL.URL,
})
}
}
messages = append(messages, AliMessage{
Content: parts,
Role: strings.ToLower(message.Role),
})
}
}
enableSearch := false
aliModel := request.Model
if strings.HasSuffix(aliModel, AliEnableSearchModelSuffix) {
enableSearch = true
aliModel = strings.TrimSuffix(aliModel, AliEnableSearchModelSuffix)
}
return &AliChatRequest{
Model: request.Model,
Model: aliModel,
Input: AliInput{
Messages: messages,
},
Parameters: AliParameters{
ResultFormat: "message",
EnableSearch: enableSearch,
IncrementalOutput: request.Stream,
},
}
}
@@ -72,6 +100,7 @@ func (p *AliProvider) getChatRequestBody(request *types.ChatCompletionRequest) *
func (p *AliProvider) ChatAction(request *types.ChatCompletionRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
requestBody := p.getChatRequestBody(request)
fullRequestURL := p.GetFullRequestURL(p.ChatCompletions, request.Model)
headers := p.GetRequestHeaders()
if request.Stream {
@@ -86,7 +115,7 @@ func (p *AliProvider) ChatAction(request *types.ChatCompletionRequest, isModelMa
}
if request.Stream {
usage, errWithCode = p.sendStreamRequest(req)
usage, errWithCode = p.sendStreamRequest(req, request.Model)
if errWithCode != nil {
return
}
@@ -100,7 +129,9 @@ func (p *AliProvider) ChatAction(request *types.ChatCompletionRequest, isModelMa
}
} else {
aliResponse := &AliChatResponse{}
aliResponse := &AliChatResponse{
Model: request.Model,
}
errWithCode = p.SendRequest(req, aliResponse, false)
if errWithCode != nil {
return
@@ -117,10 +148,15 @@ func (p *AliProvider) ChatAction(request *types.ChatCompletionRequest, isModelMa
// 阿里云响应转OpenAI响应
func (p *AliProvider) streamResponseAli2OpenAI(aliResponse *AliChatResponse) *types.ChatCompletionStreamResponse {
// chatChoice := aliResponse.Output.ToChatCompletionChoices()
// jsonBody, _ := json.MarshalIndent(chatChoice, "", " ")
// fmt.Println("requestBody:", string(jsonBody))
var choice types.ChatCompletionStreamChoice
choice.Delta.Content = aliResponse.Output.Text
if aliResponse.Output.FinishReason != "null" {
finishReason := aliResponse.Output.FinishReason
choice.Index = aliResponse.Output.Choices[0].Index
choice.Delta.Content = aliResponse.Output.Choices[0].Message.StringContent()
// fmt.Println("choice.Delta.Content:", chatChoice[0].Message)
if aliResponse.Output.Choices[0].FinishReason != "null" {
finishReason := aliResponse.Output.Choices[0].FinishReason
choice.FinishReason = &finishReason
}
@@ -128,22 +164,24 @@ func (p *AliProvider) streamResponseAli2OpenAI(aliResponse *AliChatResponse) *ty
ID: aliResponse.RequestId,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Model: "ernie-bot",
Model: aliResponse.Model,
Choices: []types.ChatCompletionStreamChoice{choice},
}
return &response
}
// 发送流请求
func (p *AliProvider) sendStreamRequest(req *http.Request) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
func (p *AliProvider) sendStreamRequest(req *http.Request, model string) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
defer req.Body.Close()
usage = &types.Usage{}
// 发送请求
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return nil, common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError)
}
common.PutHttpClient(client)
if common.IsFailureStatusCode(resp) {
return nil, common.HandleErrorResp(resp)
@@ -182,6 +220,7 @@ func (p *AliProvider) sendStreamRequest(req *http.Request) (usage *types.Usage,
}()
common.SetEventStreamHeaders(p.Context)
lastResponseText := ""
index := 0
p.Context.Stream(func(w io.Writer) bool {
select {
case data := <-dataChan:
@@ -196,9 +235,12 @@ func (p *AliProvider) sendStreamRequest(req *http.Request) (usage *types.Usage,
usage.CompletionTokens = aliResponse.Usage.OutputTokens
usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens
}
aliResponse.Model = model
aliResponse.Output.Choices[0].Index = index
index++
response := p.streamResponseAli2OpenAI(&aliResponse)
response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText)
lastResponseText = aliResponse.Output.Text
lastResponseText = aliResponse.Output.Choices[0].Message.StringContent()
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())

View File

@@ -1,5 +1,9 @@
package ali
import (
"one-api/types"
)
type AliError struct {
Code string `json:"code"`
Message string `json:"message"`
@@ -13,20 +17,27 @@ type AliUsage struct {
}
type AliMessage struct {
Content string `json:"content"`
Content any `json:"content"`
Role string `json:"role"`
}
type AliMessagePart struct {
Text string `json:"text,omitempty"`
Image string `json:"image,omitempty"`
}
type AliInput struct {
// Prompt string `json:"prompt"`
Messages []AliMessage `json:"messages"`
}
type AliParameters struct {
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Seed uint64 `json:"seed,omitempty"`
EnableSearch bool `json:"enable_search,omitempty"`
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Seed uint64 `json:"seed,omitempty"`
EnableSearch bool `json:"enable_search,omitempty"`
IncrementalOutput bool `json:"incremental_output,omitempty"`
ResultFormat string `json:"result_format,omitempty"`
}
type AliChatRequest struct {
@@ -35,14 +46,31 @@ type AliChatRequest struct {
Parameters AliParameters `json:"parameters,omitempty"`
}
type AliChoice struct {
FinishReason string `json:"finish_reason"`
Message types.ChatCompletionMessage `json:"message"`
}
type AliOutput struct {
Text string `json:"text"`
FinishReason string `json:"finish_reason"`
Choices []types.ChatCompletionChoice `json:"choices"`
}
func (o *AliOutput) ToChatCompletionChoices() []types.ChatCompletionChoice {
for i := range o.Choices {
_, ok := o.Choices[i].Message.Content.(string)
if ok {
continue
}
o.Choices[i].Message.Content = o.Choices[i].Message.ParseContent()
}
return o.Choices
}
type AliChatResponse struct {
Output AliOutput `json:"output"`
Usage AliUsage `json:"usage"`
Model string `json:"model,omitempty"`
AliError
}

View File

@@ -19,7 +19,7 @@ func (p *Api2dProvider) Balance(channel *model.Channel) (float64, error) {
// 发送请求
var response base.BalanceResponse
_, errWithCode := common.SendRequest(req, &response, false)
_, errWithCode := common.SendRequest(req, &response, false, p.Channel.Proxy)
if errWithCode != nil {
return 0, errors.New(errWithCode.OpenAIError.Message)
}

View File

@@ -19,7 +19,7 @@ func (p *Api2gptProvider) Balance(channel *model.Channel) (float64, error) {
// 发送请求
var response base.BalanceResponse
_, errWithCode := common.SendRequest(req, &response, false)
_, errWithCode := common.SendRequest(req, &response, false, p.Channel.Proxy)
if errWithCode != nil {
return 0, errors.New(errWithCode.OpenAIError.Message)
}

View File

@@ -38,7 +38,7 @@ func (c *ImageAzureResponse) ResponseHandler(resp *http.Response) (OpenAIRespons
for i := 0; i < 3; i++ {
// 休眠 2 秒
time.Sleep(2 * time.Second)
_, errWithCode = common.SendRequest(req, &getImageAzureResponse, false)
_, errWithCode = common.SendRequest(req, &getImageAzureResponse, false, c.Proxy)
fmt.Println("getImageAzureResponse", getImageAzureResponse)
if errWithCode != nil {
return
@@ -81,6 +81,7 @@ func (p *AzureProvider) ImageGenerationsAction(request *types.ImageRequest, isMo
if request.Model == "dall-e-2" {
imageAzureResponse := &ImageAzureResponse{
Header: headers,
Proxy: p.Channel.Proxy,
}
errWithCode = p.SendRequest(req, imageAzureResponse, false)
} else {

View File

@@ -10,6 +10,7 @@ type ImageAzureResponse struct {
Status string `json:"status,omitempty"`
Error ImageAzureError `json:"error,omitempty"`
Header map[string]string `json:"header,omitempty"`
Proxy string `json:"proxy,omitempty"`
}
type ImageAzureError struct {

View File

@@ -27,7 +27,7 @@ type AzureSpeechProvider struct {
// 获取请求头
func (p *AzureSpeechProvider) GetRequestHeaders() (headers map[string]string) {
headers = make(map[string]string)
headers["Ocp-Apim-Subscription-Key"] = p.Context.GetString("api_key")
headers["Ocp-Apim-Subscription-Key"] = p.Channel.Key
headers["Content-Type"] = "application/ssml+xml"
headers["User-Agent"] = "OneAPI"
// headers["X-Microsoft-OutputFormat"] = "audio-16khz-128kbitrate-mono-mp3"

View File

@@ -63,7 +63,7 @@ func (p *BaiduProvider) GetRequestHeaders() (headers map[string]string) {
}
func (p *BaiduProvider) getBaiduAccessToken() (string, error) {
apiKey := p.Context.GetString("api_key")
apiKey := p.Channel.Key
if val, ok := baiduTokenStore.Load(apiKey); ok {
var accessToken BaiduAccessToken
if accessToken, ok = val.(BaiduAccessToken); ok {
@@ -105,10 +105,12 @@ func (p *BaiduProvider) getBaiduAccessTokenHelper(apiKey string) (*BaiduAccessTo
return nil, err
}
resp, err := common.HttpClient.Do(req)
httpClient := common.GetHttpClient(p.Channel.Proxy)
resp, err := httpClient.Do(req)
if err != nil {
return nil, err
}
common.PutHttpClient(httpClient)
defer resp.Body.Close()

View File

@@ -88,13 +88,15 @@ func (p *BaiduProvider) ChatAction(request *types.ChatCompletionRequest, isModel
}
if request.Stream {
usage, errWithCode = p.sendStreamRequest(req)
usage, errWithCode = p.sendStreamRequest(req, request.Model)
if errWithCode != nil {
return
}
} else {
baiduChatRequest := &BaiduChatResponse{}
baiduChatRequest := &BaiduChatResponse{
Model: request.Model,
}
errWithCode = p.SendRequest(req, baiduChatRequest, false)
if errWithCode != nil {
return
@@ -117,21 +119,23 @@ func (p *BaiduProvider) streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStrea
ID: baiduResponse.Id,
Object: "chat.completion.chunk",
Created: baiduResponse.Created,
Model: "ernie-bot",
Model: baiduResponse.Model,
Choices: []types.ChatCompletionStreamChoice{choice},
}
return &response
}
func (p *BaiduProvider) sendStreamRequest(req *http.Request) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
func (p *BaiduProvider) sendStreamRequest(req *http.Request, model string) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
defer req.Body.Close()
usage = &types.Usage{}
// 发送请求
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return nil, common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError)
}
common.PutHttpClient(client)
if common.IsFailureStatusCode(resp) {
return nil, common.HandleErrorResp(resp)
@@ -180,6 +184,7 @@ func (p *BaiduProvider) sendStreamRequest(req *http.Request) (usage *types.Usage
usage.PromptTokens = baiduResponse.Usage.PromptTokens
usage.CompletionTokens = baiduResponse.Usage.TotalTokens - baiduResponse.Usage.PromptTokens
}
baiduResponse.Model = model
response := p.streamResponseBaidu2OpenAI(&baiduResponse)
jsonResponse, err := json.Marshal(response)
if err != nil {

View File

@@ -32,6 +32,7 @@ type BaiduChatResponse struct {
IsTruncated bool `json:"is_truncated"`
NeedClearHistory bool `json:"need_clear_history"`
Usage *types.Usage `json:"usage"`
Model string `json:"model,omitempty"`
BaiduError
}

View File

@@ -6,6 +6,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/model"
"one-api/types"
"strings"
@@ -28,17 +29,22 @@ type BaseProvider struct {
ImagesVariations string
Proxy string
Context *gin.Context
Channel *model.Channel
}
// 获取基础URL
func (p *BaseProvider) GetBaseURL() string {
if p.Context.GetString("base_url") != "" {
return p.Context.GetString("base_url")
if p.Channel.GetBaseURL() != "" {
return p.Channel.GetBaseURL()
}
return p.BaseURL
}
func (p *BaseProvider) SetChannel(channel *model.Channel) {
p.Channel = channel
}
// 获取完整请求URL
func (p *BaseProvider) GetFullRequestURL(requestURL string, modelName string) string {
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
@@ -59,7 +65,7 @@ func (p *BaseProvider) CommonRequestHeaders(headers map[string]string) {
func (p *BaseProvider) SendRequest(req *http.Request, response ProviderResponseHandler, rawOutput bool) (openAIErrorWithStatusCode *types.OpenAIErrorWithStatusCode) {
defer req.Body.Close()
resp, openAIErrorWithStatusCode := common.SendRequest(req, response, true)
resp, openAIErrorWithStatusCode := common.SendRequest(req, response, true, p.Channel.Proxy)
if openAIErrorWithStatusCode != nil {
return
}
@@ -102,10 +108,12 @@ func (p *BaseProvider) SendRequestRaw(req *http.Request) (openAIErrorWithStatusC
defer req.Body.Close()
// 发送请求
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError)
}
common.PutHttpClient(client)
defer resp.Body.Close()

View File

@@ -12,6 +12,7 @@ type ProviderInterface interface {
GetFullRequestURL(requestURL string, modelName string) string
GetRequestHeaders() (headers map[string]string)
SupportAPI(relayMode int) bool
SetChannel(channel *model.Channel)
}
// 完成接口

View File

@@ -28,7 +28,7 @@ func (p *ClaudeProvider) GetRequestHeaders() (headers map[string]string) {
headers = make(map[string]string)
p.CommonRequestHeaders(headers)
headers["x-api-key"] = p.Context.GetString("api_key")
headers["x-api-key"] = p.Channel.Key
anthropicVersion := p.Context.Request.Header.Get("anthropic-version")
if anthropicVersion == "" {
anthropicVersion = "2023-06-01"

View File

@@ -38,6 +38,7 @@ func (claudeResponse *ClaudeResponse) ResponseHandler(resp *http.Response) (Open
Object: "chat.completion",
Created: common.GetTimestamp(),
Choices: []types.ChatCompletionChoice{choice},
Model: claudeResponse.Model,
}
completionTokens := common.CountTokenText(claudeResponse.Completion, claudeResponse.Model)
@@ -141,10 +142,12 @@ func (p *ClaudeProvider) sendStreamRequest(req *http.Request) (*types.OpenAIErro
defer req.Body.Close()
// 发送请求
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError), ""
}
common.PutHttpClient(client)
if common.IsFailureStatusCode(resp) {
return common.HandleErrorResp(resp), ""

View File

@@ -18,7 +18,7 @@ func (p *CloseaiProxyProvider) Balance(channel *model.Channel) (float64, error)
// 发送请求
var response OpenAICreditGrants
_, errWithCode := common.SendRequest(req, &response, false)
_, errWithCode := common.SendRequest(req, &response, false, p.Channel.Proxy)
if errWithCode != nil {
return 0, errors.New(errWithCode.OpenAIError.Message)
}

View File

@@ -28,11 +28,11 @@ type GeminiProvider struct {
func (p *GeminiProvider) GetFullRequestURL(requestURL string, modelName string) string {
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
version := "v1"
if p.Context.GetString("api_version") != "" {
version = p.Context.GetString("api_version")
if p.Channel.Other != "" {
version = p.Channel.Other
}
return fmt.Sprintf("%s/%s/models/%s:%s?key=%s", baseURL, version, modelName, requestURL, p.Context.GetString("api_key"))
return fmt.Sprintf("%s/%s/models/%s:%s", baseURL, version, modelName, requestURL)
}
@@ -40,6 +40,7 @@ func (p *GeminiProvider) GetFullRequestURL(requestURL string, modelName string)
func (p *GeminiProvider) GetRequestHeaders() (headers map[string]string) {
headers = make(map[string]string)
p.CommonRequestHeaders(headers)
headers["x-goog-api-key"] = p.Channel.Key
return headers
}

View File

@@ -7,11 +7,16 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/image"
"one-api/providers/base"
"one-api/types"
"strings"
)
const (
GeminiVisionMaxImageNum = 16
)
func (response *GeminiChatResponse) ResponseHandler(resp *http.Response) (OpenAIResponse any, errWithCode *types.OpenAIErrorWithStatusCode) {
if len(response.Candidates) == 0 {
return nil, &types.OpenAIErrorWithStatusCode{
@@ -29,6 +34,7 @@ func (response *GeminiChatResponse) ResponseHandler(resp *http.Response) (OpenAI
ID: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Object: "chat.completion",
Created: common.GetTimestamp(),
Model: response.Model,
Choices: make([]types.ChatCompletionChoice, 0, len(response.Candidates)),
}
for i, candidate := range response.Candidates {
@@ -46,7 +52,7 @@ func (response *GeminiChatResponse) ResponseHandler(resp *http.Response) (OpenAI
fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
}
completionTokens := common.CountTokenText(response.GetResponseText(), "gemini-pro")
completionTokens := common.CountTokenText(response.GetResponseText(), response.Model)
response.Usage.CompletionTokens = completionTokens
response.Usage.TotalTokens = response.Usage.PromptTokens + completionTokens
@@ -54,27 +60,27 @@ func (response *GeminiChatResponse) ResponseHandler(resp *http.Response) (OpenAI
}
// Setting safety to the lowest possible values since Gemini is already powerless enough
func (p *GeminiProvider) getChatRequestBody(request *types.ChatCompletionRequest) (requestBody *GeminiChatRequest) {
func (p *GeminiProvider) getChatRequestBody(request *types.ChatCompletionRequest) (requestBody *GeminiChatRequest, errWithCode *types.OpenAIErrorWithStatusCode) {
geminiRequest := GeminiChatRequest{
Contents: make([]GeminiChatContent, 0, len(request.Messages)),
//SafetySettings: []GeminiChatSafetySettings{
// {
// Category: "HARM_CATEGORY_HARASSMENT",
// Threshold: "BLOCK_ONLY_HIGH",
// },
// {
// Category: "HARM_CATEGORY_HATE_SPEECH",
// Threshold: "BLOCK_ONLY_HIGH",
// },
// {
// Category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
// Threshold: "BLOCK_ONLY_HIGH",
// },
// {
// Category: "HARM_CATEGORY_DANGEROUS_CONTENT",
// Threshold: "BLOCK_ONLY_HIGH",
// },
//},
SafetySettings: []GeminiChatSafetySettings{
{
Category: "HARM_CATEGORY_HARASSMENT",
Threshold: "BLOCK_NONE",
},
{
Category: "HARM_CATEGORY_HATE_SPEECH",
Threshold: "BLOCK_NONE",
},
{
Category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
Threshold: "BLOCK_NONE",
},
{
Category: "HARM_CATEGORY_DANGEROUS_CONTENT",
Threshold: "BLOCK_NONE",
},
},
GenerationConfig: GeminiChatGenerationConfig{
Temperature: request.Temperature,
TopP: request.TopP,
@@ -98,6 +104,34 @@ func (p *GeminiProvider) getChatRequestBody(request *types.ChatCompletionRequest
},
},
}
openaiContent := message.ParseContent()
var parts []GeminiPart
imageNum := 0
for _, part := range openaiContent {
if part.Type == types.ContentTypeText {
parts = append(parts, GeminiPart{
Text: part.Text,
})
} else if part.Type == types.ContentTypeImageURL {
imageNum += 1
if imageNum > GeminiVisionMaxImageNum {
continue
}
mimeType, data, err := image.GetImageFromUrl(part.ImageURL.URL)
if err != nil {
return nil, common.ErrorWrapper(err, "image_url_invalid", http.StatusBadRequest)
}
parts = append(parts, GeminiPart{
InlineData: &GeminiInlineData{
MimeType: mimeType,
Data: data,
},
})
}
}
content.Parts = parts
// there's no assistant role in gemini and API shall vomit if Role is not user or model
if content.Role == "assistant" {
content.Role = "model"
@@ -123,11 +157,14 @@ func (p *GeminiProvider) getChatRequestBody(request *types.ChatCompletionRequest
}
}
return &geminiRequest
return &geminiRequest, nil
}
func (p *GeminiProvider) ChatAction(request *types.ChatCompletionRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
requestBody := p.getChatRequestBody(request)
requestBody, errWithCode := p.getChatRequestBody(request)
if errWithCode != nil {
return
}
fullRequestURL := p.GetFullRequestURL("generateContent", request.Model)
headers := p.GetRequestHeaders()
if request.Stream {
@@ -142,7 +179,7 @@ func (p *GeminiProvider) ChatAction(request *types.ChatCompletionRequest, isMode
if request.Stream {
var responseText string
errWithCode, responseText = p.sendStreamRequest(req)
errWithCode, responseText = p.sendStreamRequest(req, request.Model)
if errWithCode != nil {
return
}
@@ -155,6 +192,7 @@ func (p *GeminiProvider) ChatAction(request *types.ChatCompletionRequest, isMode
} else {
var geminiResponse = &GeminiChatResponse{
Model: request.Model,
Usage: &types.Usage{
PromptTokens: promptTokens,
},
@@ -170,25 +208,27 @@ func (p *GeminiProvider) ChatAction(request *types.ChatCompletionRequest, isMode
}
func (p *GeminiProvider) streamResponseClaude2OpenAI(geminiResponse *GeminiChatResponse) *types.ChatCompletionStreamResponse {
var choice types.ChatCompletionStreamChoice
choice.Delta.Content = geminiResponse.GetResponseText()
choice.FinishReason = &base.StopFinishReason
var response types.ChatCompletionStreamResponse
response.Object = "chat.completion.chunk"
response.Model = "gemini"
response.Choices = []types.ChatCompletionStreamChoice{choice}
return &response
}
// func (p *GeminiProvider) streamResponseClaude2OpenAI(geminiResponse *GeminiChatResponse) *types.ChatCompletionStreamResponse {
// var choice types.ChatCompletionStreamChoice
// choice.Delta.Content = geminiResponse.GetResponseText()
// choice.FinishReason = &base.StopFinishReason
// var response types.ChatCompletionStreamResponse
// response.Object = "chat.completion.chunk"
// response.Model = "gemini"
// response.Choices = []types.ChatCompletionStreamChoice{choice}
// return &response
// }
func (p *GeminiProvider) sendStreamRequest(req *http.Request) (*types.OpenAIErrorWithStatusCode, string) {
func (p *GeminiProvider) sendStreamRequest(req *http.Request, model string) (*types.OpenAIErrorWithStatusCode, string) {
defer req.Body.Close()
// 发送请求
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError), ""
}
common.PutHttpClient(client)
if common.IsFailureStatusCode(resp) {
return common.HandleErrorResp(resp), ""
@@ -235,7 +275,7 @@ func (p *GeminiProvider) sendStreamRequest(req *http.Request) (*types.OpenAIErro
Content string `json:"content"`
}
var dummy dummyStruct
err := json.Unmarshal([]byte(data), &dummy)
json.Unmarshal([]byte(data), &dummy)
responseText += dummy.Content
var choice types.ChatCompletionStreamChoice
choice.Delta.Content = dummy.Content
@@ -243,7 +283,7 @@ func (p *GeminiProvider) sendStreamRequest(req *http.Request) (*types.OpenAIErro
ID: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Model: "gemini-pro",
Model: model,
Choices: []types.ChatCompletionStreamChoice{choice},
}
jsonResponse, err := json.Marshal(response)

View File

@@ -46,6 +46,7 @@ type GeminiChatResponse struct {
Candidates []GeminiChatCandidate `json:"candidates"`
PromptFeedback GeminiChatPromptFeedback `json:"promptFeedback"`
Usage *types.Usage `json:"usage,omitempty"`
Model string `json:"model,omitempty"`
}
type GeminiChatCandidate struct {

View File

@@ -20,7 +20,7 @@ func (p *OpenAIProvider) Balance(channel *model.Channel) (float64, error) {
// 发送请求
var subscription OpenAISubscriptionResponse
_, errWithCode := common.SendRequest(req, &subscription, false)
_, errWithCode := common.SendRequest(req, &subscription, false, p.Channel.Proxy)
if errWithCode != nil {
return 0, errors.New(errWithCode.OpenAIError.Message)
}
@@ -38,7 +38,7 @@ func (p *OpenAIProvider) Balance(channel *model.Channel) (float64, error) {
return 0, err
}
usage := OpenAIUsageResponse{}
_, errWithCode = common.SendRequest(req, &usage, false)
_, errWithCode = common.SendRequest(req, &usage, false, p.Channel.Proxy)
balance := subscription.HardLimitUSD - usage.TotalUsage/100
channel.UpdateBalance(balance)

View File

@@ -59,7 +59,7 @@ func (p *OpenAIProvider) GetFullRequestURL(requestURL string, modelName string)
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
if p.IsAzure {
apiVersion := p.Context.GetString("api_version")
apiVersion := p.Channel.Other
if modelName == "dall-e-2" {
// 因为dall-e-3需要api-version=2023-12-01-preview但是该版本
// 已经没有dall-e-2了所以暂时写死
@@ -85,9 +85,9 @@ func (p *OpenAIProvider) GetRequestHeaders() (headers map[string]string) {
headers = make(map[string]string)
p.CommonRequestHeaders(headers)
if p.IsAzure {
headers["api-key"] = p.Context.GetString("api_key")
headers["api-key"] = p.Channel.Key
} else {
headers["Authorization"] = fmt.Sprintf("Bearer %s", p.Context.GetString("api_key"))
headers["Authorization"] = fmt.Sprintf("Bearer %s", p.Channel.Key)
}
return headers
@@ -111,10 +111,12 @@ func (p *OpenAIProvider) GetRequestBody(request any, isModelMapped bool) (reques
func (p *OpenAIProvider) sendStreamRequest(req *http.Request, response OpenAIProviderStreamResponseHandler) (openAIErrorWithStatusCode *types.OpenAIErrorWithStatusCode, responseText string) {
defer req.Body.Close()
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError), ""
}
common.PutHttpClient(client)
if common.IsFailureStatusCode(resp) {
return common.HandleErrorResp(resp), ""

View File

@@ -21,7 +21,7 @@ func (p *OpenaiSBProvider) Balance(channel *model.Channel) (float64, error) {
// 发送请求
var response OpenAISBUsageResponse
_, errWithCode := common.SendRequest(req, &response, false)
_, errWithCode := common.SendRequest(req, &response, false, p.Channel.Proxy)
if err != nil {
return 0, errors.New(errWithCode.OpenAIError.Message)
}

View File

@@ -29,6 +29,7 @@ type PalmProvider struct {
func (p *PalmProvider) GetRequestHeaders() (headers map[string]string) {
headers = make(map[string]string)
p.CommonRequestHeaders(headers)
headers["x-goog-api-key"] = p.Channel.Key
return headers
}
@@ -37,5 +38,5 @@ func (p *PalmProvider) GetRequestHeaders() (headers map[string]string) {
func (p *PalmProvider) GetFullRequestURL(requestURL string, modelName string) string {
baseURL := strings.TrimSuffix(p.GetBaseURL(), "/")
return fmt.Sprintf("%s%s?key=%s", baseURL, requestURL, p.Context.GetString("api_key"))
return fmt.Sprintf("%s%s", baseURL, requestURL)
}

View File

@@ -43,6 +43,7 @@ func (palmResponse *PaLMChatResponse) ResponseHandler(resp *http.Response) (Open
palmResponse.Usage.TotalTokens = palmResponse.Usage.PromptTokens + completionTokens
fullTextResponse.Usage = palmResponse.Usage
fullTextResponse.Model = palmResponse.Model
return fullTextResponse, nil
}
@@ -133,10 +134,12 @@ func (p *PalmProvider) sendStreamRequest(req *http.Request) (*types.OpenAIErrorW
defer req.Body.Close()
// 发送请求
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError), ""
}
common.PutHttpClient(client)
if common.IsFailureStatusCode(resp) {
return common.HandleErrorResp(resp), ""

View File

@@ -2,6 +2,7 @@ package providers
import (
"one-api/common"
"one-api/model"
"one-api/providers/aigc2d"
"one-api/providers/aiproxy"
"one-api/providers/ali"
@@ -55,19 +56,23 @@ func init() {
}
// 获取供应商
func GetProvider(channelType int, c *gin.Context) base.ProviderInterface {
factory, ok := providerFactories[channelType]
func GetProvider(channel *model.Channel, c *gin.Context) base.ProviderInterface {
factory, ok := providerFactories[channel.Type]
var provider base.ProviderInterface
if !ok {
// 处理未找到的供应商工厂
baseURL := common.ChannelBaseURLs[channelType]
if c.GetString("base_url") != "" {
baseURL = c.GetString("base_url")
baseURL := common.ChannelBaseURLs[channel.Type]
if channel.GetBaseURL() != "" {
baseURL = channel.GetBaseURL()
}
if baseURL != "" {
return openai.CreateOpenAIProvider(c, baseURL)
if baseURL == "" {
return nil
}
return nil
provider = openai.CreateOpenAIProvider(c, baseURL)
}
return factory.Create(c)
provider = factory.Create(c)
provider.SetChannel(channel)
return provider
}

View File

@@ -52,7 +52,7 @@ func (p *TencentProvider) parseTencentConfig(config string) (appId int64, secret
}
func (p *TencentProvider) getTencentSign(req TencentChatRequest) string {
apiKey := p.Context.GetString("api_key")
apiKey := p.Channel.Key
appId, secretId, secretKey, err := p.parseTencentConfig(apiKey)
if err != nil {
return ""

View File

@@ -27,6 +27,7 @@ func (TencentResponse *TencentChatResponse) ResponseHandler(resp *http.Response)
Object: "chat.completion",
Created: common.GetTimestamp(),
Usage: TencentResponse.Usage,
Model: TencentResponse.Model,
}
if len(TencentResponse.Choices) > 0 {
choice := types.ChatCompletionChoice{
@@ -100,7 +101,7 @@ func (p *TencentProvider) ChatAction(request *types.ChatCompletionRequest, isMod
if request.Stream {
var responseText string
errWithCode, responseText = p.sendStreamRequest(req)
errWithCode, responseText = p.sendStreamRequest(req, request.Model)
if errWithCode != nil {
return
}
@@ -112,7 +113,9 @@ func (p *TencentProvider) ChatAction(request *types.ChatCompletionRequest, isMod
usage.TotalTokens = promptTokens + usage.CompletionTokens
} else {
tencentResponse := &TencentChatResponse{}
tencentResponse := &TencentChatResponse{
Model: request.Model,
}
errWithCode = p.SendRequest(req, tencentResponse, false)
if errWithCode != nil {
return
@@ -128,7 +131,7 @@ func (p *TencentProvider) streamResponseTencent2OpenAI(TencentResponse *TencentC
response := types.ChatCompletionStreamResponse{
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Model: "tencent-hunyuan",
Model: TencentResponse.Model,
}
if len(TencentResponse.Choices) > 0 {
var choice types.ChatCompletionStreamChoice
@@ -141,13 +144,15 @@ func (p *TencentProvider) streamResponseTencent2OpenAI(TencentResponse *TencentC
return &response
}
func (p *TencentProvider) sendStreamRequest(req *http.Request) (*types.OpenAIErrorWithStatusCode, string) {
func (p *TencentProvider) sendStreamRequest(req *http.Request, model string) (*types.OpenAIErrorWithStatusCode, string) {
defer req.Body.Close()
// 发送请求
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError), ""
}
common.PutHttpClient(client)
if common.IsFailureStatusCode(resp) {
return common.HandleErrorResp(resp), ""
@@ -195,6 +200,7 @@ func (p *TencentProvider) sendStreamRequest(req *http.Request) (*types.OpenAIErr
common.SysError("error unmarshalling stream response: " + err.Error())
return true
}
TencentResponse.Model = model
response := p.streamResponseTencent2OpenAI(&TencentResponse)
if len(response.Choices) != 0 {
responseText += response.Choices[0].Delta.Content

View File

@@ -58,4 +58,5 @@ type TencentChatResponse struct {
Error TencentError `json:"error,omitempty"` // 错误信息 注意:此字段可能返回 null表示取不到有效值
Note string `json:"note,omitempty"` // 注释
ReqID string `json:"req_id,omitempty"` // 唯一请求 Id每次请求都会返回。用于反馈接口入参
Model string `json:"model,omitempty"` // 模型名称
}

View File

@@ -42,7 +42,7 @@ func (p *XunfeiProvider) GetRequestHeaders() (headers map[string]string) {
// 获取完整请求 URL
func (p *XunfeiProvider) GetFullRequestURL(requestURL string, modelName string) string {
splits := strings.Split(p.Context.GetString("api_key"), "|")
splits := strings.Split(p.Channel.Key, "|")
if len(splits) != 3 {
return ""
}
@@ -58,7 +58,7 @@ func (p *XunfeiProvider) getXunfeiAuthUrl(apiKey string, apiSecret string) (stri
query := p.Context.Request.URL.Query()
apiVersion := query.Get("api-version")
if apiVersion == "" {
apiVersion = p.Context.GetString("api_version")
apiVersion = p.Channel.Other
}
if apiVersion == "" {
apiVersion = "v1.1"

View File

@@ -49,7 +49,7 @@ func (p *ZhipuProvider) GetFullRequestURL(requestURL string, modelName string) s
}
func (p *ZhipuProvider) getZhipuToken() string {
apikey := p.Context.GetString("api_key")
apikey := p.Channel.Key
data, ok := zhipuTokens.Load(apikey)
if ok {
tokenData := data.(zhipuTokenData)

View File

@@ -28,6 +28,7 @@ func (zhipuResponse *ZhipuResponse) ResponseHandler(resp *http.Response) (OpenAI
ID: zhipuResponse.Data.TaskId,
Object: "chat.completion",
Created: common.GetTimestamp(),
Model: zhipuResponse.Model,
Choices: make([]types.ChatCompletionChoice, 0, len(zhipuResponse.Data.Choices)),
Usage: &zhipuResponse.Data.Usage,
}
@@ -94,13 +95,15 @@ func (p *ZhipuProvider) ChatAction(request *types.ChatCompletionRequest, isModel
}
if request.Stream {
errWithCode, usage = p.sendStreamRequest(req)
errWithCode, usage = p.sendStreamRequest(req, request.Model)
if errWithCode != nil {
return
}
} else {
zhipuResponse := &ZhipuResponse{}
zhipuResponse := &ZhipuResponse{
Model: request.Model,
}
errWithCode = p.SendRequest(req, zhipuResponse, false)
if errWithCode != nil {
return
@@ -132,20 +135,22 @@ func (p *ZhipuProvider) streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStrea
ID: zhipuResponse.RequestId,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Model: "chatglm",
Model: zhipuResponse.Model,
Choices: []types.ChatCompletionStreamChoice{choice},
}
return &response, &zhipuResponse.Usage
}
func (p *ZhipuProvider) sendStreamRequest(req *http.Request) (*types.OpenAIErrorWithStatusCode, *types.Usage) {
func (p *ZhipuProvider) sendStreamRequest(req *http.Request, model string) (*types.OpenAIErrorWithStatusCode, *types.Usage) {
defer req.Body.Close()
// 发送请求
resp, err := common.HttpClient.Do(req)
client := common.GetHttpClient(p.Channel.Proxy)
resp, err := client.Do(req)
if err != nil {
return common.ErrorWrapper(err, "http_request_failed", http.StatusInternalServerError), nil
}
common.PutHttpClient(client)
if common.IsFailureStatusCode(resp) {
return common.HandleErrorResp(resp), nil
@@ -159,7 +164,7 @@ func (p *ZhipuProvider) sendStreamRequest(req *http.Request) (*types.OpenAIError
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := strings.Index(string(data), "\n\n"); i >= 0 && strings.Index(string(data), ":") >= 0 {
if i := strings.Index(string(data), "\n\n"); i >= 0 && strings.Contains(string(data), ":") {
return i + 2, data[0:i], nil
}
if atEOF {
@@ -195,6 +200,7 @@ func (p *ZhipuProvider) sendStreamRequest(req *http.Request) (*types.OpenAIError
select {
case data := <-dataChan:
response := p.streamResponseZhipu2OpenAI(data)
response.Model = model
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
@@ -209,6 +215,7 @@ func (p *ZhipuProvider) sendStreamRequest(req *http.Request) (*types.OpenAIError
common.SysError("error unmarshalling stream response: " + err.Error())
return true
}
zhipuResponse.Model = model
response, zhipuUsage := p.streamMetaResponseZhipu2OpenAI(&zhipuResponse)
jsonResponse, err := json.Marshal(response)
if err != nil {

View File

@@ -31,6 +31,7 @@ type ZhipuResponse struct {
Msg string `json:"msg"`
Success bool `json:"success"`
Data ZhipuResponseData `json:"data"`
Model string `json:"model,omitempty"`
}
type ZhipuStreamMetaResponse struct {
@@ -38,6 +39,7 @@ type ZhipuStreamMetaResponse struct {
TaskId string `json:"task_id"`
TaskStatus string `json:"task_status"`
types.Usage `json:"usage"`
Model string `json:"model,omitempty"`
}
type zhipuTokenData struct {

View File

@@ -1,3 +1,9 @@
[//]: # (请按照以下格式关联 issue)
[//]: # (请在提交 PR 前确认所提交的功能可用,附上截图即可,这将有助于项目维护者 review & merge 该 PR谢谢)
[//]: # (项目维护者一般仅在周末处理 PR因此如若未能及时回复希望能理解)
[//]: # (开发者交流群910657413)
[//]: # (请在提交 PR 之前删除上面的注释)
close #issue_number
我已确认该 PR 已自测通过,相关截图如下:

View File

@@ -67,7 +67,7 @@ func SetApiRouter(router *gin.Engine) {
{
channelRoute.GET("/", controller.GetAllChannels)
channelRoute.GET("/search", controller.SearchChannels)
channelRoute.GET("/models", controller.ListModels)
channelRoute.GET("/models", controller.ListModelsForAdmin)
channelRoute.GET("/:id", controller.GetChannel)
channelRoute.GET("/test", controller.TestAllChannels)
channelRoute.GET("/test/:id", controller.TestChannel)

View File

@@ -11,7 +11,7 @@ func SetRelayRouter(router *gin.Engine) {
router.Use(middleware.CORS())
// https://platform.openai.com/docs/api-reference/introduction
modelsRouter := router.Group("/v1/models")
modelsRouter.Use(middleware.TokenAuth())
modelsRouter.Use(middleware.TokenAuth(), middleware.Distribute())
{
modelsRouter.GET("", controller.ListModels)
modelsRouter.GET("/:model", controller.RetrieveModel)

View File

@@ -1,5 +1,10 @@
package types
const (
ContentTypeText = "text"
ContentTypeImageURL = "image_url"
)
type ChatCompletionMessage struct {
Role string `json:"role"`
Content any `json:"content"`
@@ -22,17 +27,61 @@ func (m ChatCompletionMessage) StringContent() string {
if !ok {
continue
}
if contentMap["type"] == "text" {
if subStr, ok := contentMap["text"].(string); ok {
contentStr += subStr
}
if subStr, ok := contentMap["text"].(string); ok && subStr != "" {
contentStr += subStr
}
}
return contentStr
}
return ""
}
func (m ChatCompletionMessage) ParseContent() []ChatMessagePart {
var contentList []ChatMessagePart
content, ok := m.Content.(string)
if ok {
contentList = append(contentList, ChatMessagePart{
Type: ContentTypeText,
Text: content,
})
return contentList
}
anyList, ok := m.Content.([]any)
if ok {
for _, contentItem := range anyList {
contentMap, ok := contentItem.(map[string]any)
if !ok {
continue
}
if subStr, ok := contentMap["text"].(string); ok && subStr != "" {
contentList = append(contentList, ChatMessagePart{
Type: ContentTypeText,
Text: subStr,
})
} else if subObj, ok := contentMap["image_url"].(map[string]any); ok {
contentList = append(contentList, ChatMessagePart{
Type: ContentTypeImageURL,
ImageURL: &ChatMessageImageURL{
URL: subObj["url"].(string),
},
})
} else if subObj, ok := contentMap["image"].(string); ok {
contentList = append(contentList, ChatMessagePart{
Type: ContentTypeImageURL,
ImageURL: &ChatMessageImageURL{
URL: subObj,
},
})
}
}
return contentList
}
return nil
}
type ChatMessageImageURL struct {
URL string `json:"url,omitempty"`
Detail string `json:"detail,omitempty"`

View File

@@ -1,9 +1,9 @@
import { isAdmin } from 'utils/common';
import { useNavigate } from 'react-router-dom';
const navigate = useNavigate();
const useAuth = () => {
const userIsAdmin = isAdmin();
const navigate = useNavigate();
if (!userIsAdmin) {
navigate('/panel/404');

View File

@@ -35,6 +35,7 @@ const validationSchema = Yup.object().shape({
type: Yup.number().required('渠道 不能为空'),
key: Yup.string().when('is_edit', { is: false, then: Yup.string().required('密钥 不能为空') }),
other: Yup.string(),
proxy: Yup.string(),
models: Yup.array().min(1, '模型 不能为空'),
groups: Yup.array().min(1, '用户组 不能为空'),
base_url: Yup.string().when('type', {
@@ -442,6 +443,27 @@ const EditModal = ({ open, channelId, onCancel, onOk }) => {
<FormHelperText id="helper-tex-channel-model_mapping-label"> {inputPrompt.model_mapping} </FormHelperText>
)}
</FormControl>
<FormControl fullWidth error={Boolean(touched.proxy && errors.proxy)} sx={{ ...theme.typography.otherInput }}>
<InputLabel htmlFor="channel-proxy-label">{inputLabel.proxy}</InputLabel>
<OutlinedInput
id="channel-proxy-label"
label={inputLabel.proxy}
type="text"
value={values.proxy}
name="proxy"
onBlur={handleBlur}
onChange={handleChange}
inputProps={{}}
aria-describedby="helper-text-channel-proxy-label"
/>
{touched.proxy && errors.proxy ? (
<FormHelperText error id="helper-tex-channel-proxy-label">
{errors.proxy}
</FormHelperText>
) : (
<FormHelperText id="helper-tex-channel-proxy-label"> {inputPrompt.proxy} </FormHelperText>
)}
</FormControl>
<DialogActions>
<Button onClick={onCancel}>取消</Button>
<Button disableElevation disabled={isSubmitting} type="submit" variant="contained" color="primary">

View File

@@ -0,0 +1,53 @@
import PropTypes from 'prop-types';
import { Tooltip, Stack, Container } from '@mui/material';
import Label from 'ui-component/Label';
import { styled } from '@mui/material/styles';
import { showSuccess } from 'utils/common';
const TooltipContainer = styled(Container)({
maxHeight: '250px',
overflow: 'auto',
'&::-webkit-scrollbar': {
width: '0px' // Set the width to 0 to hide the scrollbar
}
});
const NameLabel = ({ name, models }) => {
let modelMap = [];
modelMap = models.split(',');
modelMap.sort();
return (
<Tooltip
title={
<TooltipContainer>
<Stack spacing={1}>
{modelMap.map((item, index) => {
return (
<Label
variant="ghost"
key={index}
onClick={() => {
navigator.clipboard.writeText(item);
showSuccess('复制模型名称成功!');
}}
>
{item}
</Label>
);
})}
</Stack>
</TooltipContainer>
}
placement="top"
>
{name}
</Tooltip>
);
};
NameLabel.propTypes = {
group: PropTypes.string
};
export default NameLabel;

View File

@@ -29,6 +29,7 @@ import TableSwitch from 'ui-component/Switch';
import ResponseTimeLabel from './ResponseTimeLabel';
import GroupLabel from './GroupLabel';
import NameLabel from './NameLabel';
import { IconDotsVertical, IconEdit, IconTrash, IconPencil } from '@tabler/icons-react';
@@ -102,7 +103,9 @@ export default function ChannelTableRow({ item, manageChannel, handleOpenModal,
<TableRow tabIndex={item.id}>
<TableCell>{item.id}</TableCell>
<TableCell>{item.name}</TableCell>
<TableCell>
<NameLabel name={item.name} models={item.models} />
</TableCell>
<TableCell>
<GroupLabel group={item.group} />

View File

@@ -5,6 +5,7 @@ const defaultConfig = {
key: '',
base_url: '',
other: '',
proxy: '',
model_mapping: '',
models: [],
groups: ['default']
@@ -15,6 +16,7 @@ const defaultConfig = {
base_url: '渠道API地址',
key: '密钥',
other: '其他参数',
proxy: '代理地址',
models: '模型',
model_mapping: '模型映射关系',
groups: '用户组'
@@ -25,6 +27,7 @@ const defaultConfig = {
base_url: '可空请输入中转API地址例如通过cloudflare中转',
key: '请输入渠道对应的鉴权密钥',
other: '',
proxy: '单独设置代理地址支持http和socks5例如http://127.0.0.1:1080',
models: '请选择该渠道所支持的模型',
model_mapping:
'请输入要修改的模型映射关系格式为api请求模型ID:实际转发给渠道的模型ID使用JSON数组表示例如{"gpt-3.5": "gpt-35"}',
@@ -71,7 +74,17 @@ const typeConfig = {
other: '插件参数'
},
input: {
models: ['qwen-turbo', 'qwen-plus', 'qwen-max', 'qwen-max-longcontext', 'text-embedding-v1']
models: [
'qwen-turbo',
'qwen-plus',
'qwen-max',
'qwen-max-longcontext',
'text-embedding-v1',
'qwen-turbo-internet',
'qwen-plus-internet',
'qwen-max-internet',
'qwen-max-longcontext-internet'
]
},
prompt: {
other: '请输入插件参数,即 X-DashScope-Plugin 请求头的取值'

View File

@@ -61,7 +61,7 @@ const TopupCard = () => {
};
useEffect(() => {
let status = localStorage.getItem('status');
let status = localStorage.getItem('siteInfo');
if (status) {
status = JSON.parse(status);
if (status.top_up_link) {