Compare commits

...

83 Commits

Author SHA1 Message Date
1808837298@qq.com
9fe1f35fd1 fix: 第三方登录注销 #500 2024-09-25 17:15:59 +08:00
1808837298@qq.com
972ac1ee0f fix: 第三方登录注销 #500 2024-09-25 17:13:28 +08:00
1808837298@qq.com
0f95502b04 feat: 更新令牌生成算法 2024-09-25 16:31:25 +08:00
1808837298@qq.com
b58b1dc0ec feat: 更新令牌生成算法 2024-09-25 16:31:25 +08:00
1808837298@qq.com
05d9aa61df feat: 不自动生成系统访问令牌 2024-09-25 16:31:25 +08:00
1808837298@qq.com
221894d972 fix: error user role 2024-09-24 17:49:57 +08:00
1808837298@qq.com
50eab6b4e4 chore: 更新令牌分组描述 2024-09-22 19:43:06 +08:00
1808837298@qq.com
ed972eef06 feat: pricing page support multi groups #487 2024-09-22 17:44:57 +08:00
CalciumIon
c6ff785a83 feat: 无可选分组时关闭令牌分组功能 #485 2024-09-19 03:01:33 +08:00
CalciumIon
2e734e0c37 chore: 令牌分组描述歧义 2024-09-19 02:52:25 +08:00
CalciumIon
af33f36c7b feat: update gemini flash completion ratio #479 2024-09-18 20:39:06 +08:00
CalciumIon
3aa86a8cd9 feat: update gemini completion ratio #479 2024-09-18 20:37:22 +08:00
CalciumIon
af7fecbfa7 fix: 使用令牌分组时 "/v1/models" 返回模型不正确 #481 2024-09-18 19:19:37 +08:00
CalciumIon
3fbdd502b6 fix: token group #477 2024-09-18 18:55:11 +08:00
CalciumIon
052bc2075b feat: 令牌分组 2024-09-18 05:19:49 +08:00
Calcium-Ion
5f3798053f Create FUNDING.yml 2024-09-18 01:41:31 +08:00
CalciumIon
e31022c676 Update logo 2024-09-18 01:25:00 +08:00
Calcium-Ion
fff7609f06 Merge pull request #439 from guoruqiang/main
改进了聊天页面,增加了初始令牌,方便用户注册后即可使用聊天功能。
2024-09-17 23:14:19 +08:00
CalciumIon
9032b5cfbf fix: 初始令牌 2024-09-17 23:07:16 +08:00
CalciumIon
131453dac8 Update README.md 2024-09-17 23:01:34 +08:00
CalciumIon
ed948c121a Merge branch 'main' into g-main
# Conflicts:
#	web/src/App.js
2024-09-17 22:50:59 +08:00
CalciumIon
a03cd15505 fix: '/v1/models' #474 2024-09-17 22:41:54 +08:00
CalciumIon
02f5137781 fix: '/v1/models' #474 2024-09-17 22:39:58 +08:00
CalciumIon
e6df0ed20c fix: '/vi/models' #474 2024-09-17 22:36:20 +08:00
CalciumIon
f505afdc10 feat: 添加令牌ip白名单功能 2024-09-17 20:49:51 +08:00
CalciumIon
feb1d76942 feat: 优化界面显示 2024-09-17 19:55:18 +08:00
CalciumIon
6263616cd9 Update README.md 2024-09-17 03:18:12 +08:00
GuoRuqiang
6bbf1d4843 Merge branch 'Calcium-Ion:main' into main 2024-09-14 19:00:03 +08:00
1808837298@qq.com
13c993d87e feat: format o1 model max tokens param 2024-09-14 16:11:38 +08:00
CalciumIon
cb73889353 feat: support o1 channel test 2024-09-13 03:17:04 +08:00
CalciumIon
804aad3f37 feat: support o1 channel test 2024-09-13 03:15:32 +08:00
CalciumIon
3af62a3efa feat: support OpenAI o1-preview and o1-mini 2024-09-13 01:22:27 +08:00
CalciumIon
be54369c12 chore: update footer 2024-09-12 18:43:01 +08:00
CalciumIon
0cbf8e07e7 feat: support ollama multi-text embedding 2024-09-12 18:29:45 +08:00
Calcium-Ion
1675679be9 Merge pull request #464 from Yan-Zero/main
fix: tool use in claude and add gemini mapping
2024-09-12 05:04:19 +08:00
Yan
0b5f2a7089 add gemini exp 2024-09-11 19:37:03 +08:00
Yan Tau
b5bb708072 Merge branch 'Calcium-Ion:main' into main 2024-09-11 19:29:50 +08:00
CalciumIon
2650ec9b59 feat: claude response return model name 2024-09-11 19:12:55 +08:00
CalciumIon
d168a685c1 fix: cohere SafetyMode 2024-09-11 19:12:32 +08:00
GuoRuqiang
a0d20896b3 Merge branch 'Calcium-Ion:main' into main 2024-09-08 15:56:54 +08:00
Calcium-Ion
5cab06d1ce Merge pull request #459 from HynoR/main
chore: 适配cohere的safety参数
2024-09-05 18:37:47 +08:00
CalciumIon
e3b3fdec48 feat: update chatgpt-4o token encoder 2024-09-05 18:35:34 +08:00
CalciumIon
5863aa8061 feat: remove lobe chat link #457 2024-09-05 18:34:04 +08:00
Yan
0ada2371b6 fix: tool use in claude 2024-09-05 00:53:00 +08:00
CalciumIon
8bc1e956cf fix: email 2024-09-04 19:44:29 +08:00
GuoRuqiang
a0673ef2b6 Merge branch 'Calcium-Ion:main' into main 2024-09-02 21:53:54 +08:00
HynoR
416f831a6c Merge remote-tracking branch 'origin/main' 2024-09-02 06:47:58 +07:00
HynoR
0b4317ce28 Update Cohere Safety Setting 2024-09-02 06:47:49 +07:00
Calcium-Ion
12e2481acb Merge pull request #451 from Nana7mi1/main
feat: support more zhipu models
2024-09-02 01:12:10 +08:00
Calcium-Ion
270709064d Merge pull request #455 from HynoR/feat/cohere-update
Feat: 更新Cohere新模型和定价
2024-09-02 01:11:55 +08:00
CalciumIon
0830ef3305 feat: support jina embedding 2024-09-02 01:11:19 +08:00
HynoR
722cc174b7 Cohere Update 2024-09-01 15:21:05 +07:00
Nanami
97c18d0c7f feat: support more zhipu models 2024-08-31 10:20:22 +08:00
GuoRuqiang
2223aeb022 Merge branch 'Calcium-Ion:main' into main 2024-08-29 19:42:03 +08:00
CalciumIon
4b1e83c42d feat: support siliconflow embedding #447 2024-08-29 00:19:30 +08:00
GuoRuqiang
ecf2f7f212 Merge branch 'Calcium-Ion:main' into main 2024-08-28 21:44:54 +08:00
CalciumIon
01fd8b53a6 feat: 检测vertex渠道部署地区是否填写 2024-08-28 18:47:27 +08:00
CalciumIon
e60f200192 feat: 支持vertex ai渠道多个部署地区 2024-08-28 18:43:40 +08:00
GuoRuqiang
033359e93c Merge branch 'Calcium-Ion:main' into main 2024-08-28 10:44:14 +08:00
CalciumIon
c41820541d Update go.mod 2024-08-27 20:30:46 +08:00
CalciumIon
228f0c5ee5 Update README.md 2024-08-27 20:25:55 +08:00
Calcium-Ion
8a5e074f14 Merge pull request #448 from Calcium-Ion/vertex
feat: support vertex ai
2024-08-27 20:21:01 +08:00
CalciumIon
ac4262c542 feat: support vertex ai #377 2024-08-27 20:19:51 +08:00
GuoRuqiang
1379d7f184 Merge pull request #2 from j471782517/main
增加环境变量GENERATE_DEFAULT_TOKEN 设置之后将生成初始令牌,默认关闭。
2024-08-25 02:53:47 +08:00
Jin Weihan
716bf6f48a 增加环境变量GENERATE_DEFAULT_TOKEN 设置之后将生成初始令牌,默认关闭。 2024-08-24 18:44:37 +00:00
GuoRuqiang
2422eb2820 Merge branch 'Calcium-Ion:main' into main 2024-08-25 01:55:23 +08:00
CalciumIon
46e03683ce fix: channel auto ban 2024-08-24 17:27:14 +08:00
CalciumIon
ff0985f06e fix: channel auto ban #443 2024-08-24 17:23:24 +08:00
CalciumIon
a8ac8a25d5 feat: format claude messages when first role is not user 2024-08-24 17:15:55 +08:00
Xyfacai
5b2082ba58 Merge branch 'main' of https://github.com/Calcium-Ion/new-api 2024-08-24 13:36:44 +08:00
Xyfacai
967ccabb56 fix: 修复 dall-e-2 请求报错 2024-08-24 13:36:41 +08:00
CalciumIon
144513f1d8 feat: rerank model mapping (close #444) 2024-08-23 23:21:37 +08:00
Calcium-Ion
e3087e9bea Merge pull request #445 from OswinWu/fix-outlook-ofb
fix: 多地区outlook邮箱和ofb邮箱Auth
2024-08-23 23:16:37 +08:00
OswinWu
484a8595e4 fix: 多地区outlook邮箱和ofb邮箱Auth 2024-08-23 17:16:09 +08:00
GuoRuqiang
c97e2875b4 增加注册自动生成初始令牌。 2024-08-18 15:12:59 +00:00
GuoRuqiang
64794630c8 修改提示时间。 2024-08-17 16:59:31 +00:00
GuoRuqiang
fc5055c766 update App.js 2024-08-17 16:20:41 +00:00
GuoRuqiang
27eb358497 重新修改了chat 2024-08-17 16:17:24 +00:00
GuoRuqiang
6810ee0a28 Update Chat
修改chat界面,配合nextChat等前端可以自动传入第一个已启用令牌,
2024-08-17 23:09:45 +08:00
CalciumIon
7c4d9d225e feat: support SiliconFlow (close #437, close #403) 2024-08-16 18:27:26 +08:00
CalciumIon
d0f76a5c61 feat: support gpt-4o-gizmo-* (close #436) 2024-08-16 17:25:03 +08:00
CalciumIon
a5ec11e463 fix: add email missing Message-ID 2024-08-16 16:16:38 +08:00
CalciumIon
b3d8e3e9ae fix: lobechat #430 2024-08-16 14:59:32 +08:00
90 changed files with 2167 additions and 629 deletions

12
.github/FUNDING.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: ['https://afdian.com/a/new-api'] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

View File

@@ -1,6 +1,13 @@
<div align="center">
![new-api](/web/public/logo.png)
# New API
<a href="https://trendshift.io/repositories/8227" target="_blank"><img src="https://trendshift.io/api/badge/repositories/8227" alt="Calcium-Ion%2Fnew-api | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
</div>
> [!NOTE]
> 本项目为开源项目,在[One API](https://github.com/songquanpeng/one-api)的基础上进行二次开发
@@ -54,10 +61,12 @@
8. [Suno API](https://github.com/Suno-API/Suno-API) 接口,[对接文档](Suno.md)
9. Rerank模型目前支持[Cohere](https://cohere.ai/)和[Jina](https://jina.ai/)[对接文档](Rerank.md)
10. Dify
11. Vertex AI目前兼容ClaudeGeminiLlama3.1
您可以在渠道中添加自定义模型gpt-4-gizmo-*此模型并非OpenAI官方模型而是第三方模型使用官方key无法调用。
## 比原版One API多出的配置
- `GENERATE_DEFAULT_TOKEN`:是否为新注册用户生成初始令牌,默认为 `false`
- `STREAMING_TIMEOUT`:设置流式一次回复的超时时间,默认为 30 秒。
- `DIFY_DEBUG`:设置 Dify 渠道是否输出工作流和节点信息到客户端,默认为 `true`
- `FORCE_STREAM_OPTION`是否覆盖客户端stream_options参数请求上游返回流模式usage默认为 `true`建议开启不影响客户端传入stream_options参数返回结果。
@@ -65,7 +74,7 @@
- `GET_MEDIA_TOKEN_NOT_STREAM`:是否在非流(`stream=false`情况下统计图片token默认为 `true`
- `UPDATE_TASK`是否更新异步任务Midjourney、Suno默认为 `true`,关闭后将不会更新任务进度。
- `GEMINI_MODEL_MAP`Gemini模型指定版本(v1/v1beta),使用“模型:版本”指定,","分隔,例如:-e GEMINI_MODEL_MAP="gemini-1.5-pro-latest:v1beta,gemini-1.5-pro-001:v1beta",为空则使用默认配置
- `COHERE_SAFETY_SETTING`Cohere模型[安全设置](https://docs.cohere.com/docs/safety-modes#overview),可选值为 `NONE`, `CONTEXTUAL``STRICT`,默认为 `NONE`
## 部署
### 部署要求
- 本地数据库默认SQLiteDocker 部署默认使用 SQLite必须挂载 `/data` 目录到宿主机)
@@ -114,24 +123,18 @@ docker run --name new-api -d --restart always -p 3000:3000 -e SQL_DSN="root:1234
## Suno接口设置文档
[对接文档](Suno.md)
## 交流群
<img src="https://github.com/Calcium-Ion/new-api/assets/61247483/de536a8a-0161-47a7-a0a2-66ef6de81266" width="300">
## 界面截图
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/ad0e7aae-0203-471c-9716-2d83768927d4)
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/d1ac216e-0804-4105-9fdc-66b35022d861)
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/3ca0b282-00ff-4c96-bf9d-e29ef615c605)
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/f4f40ed4-8ccb-43d7-a580-90677827646d)
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/3ca0b282-00ff-4c96-bf9d-e29ef615c605)
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/90d7d763-6a77-4b36-9f76-2bb30f18583d)
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/e414228a-3c35-429a-b298-6451d76d9032)
夜间模式
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/1c66b593-bb9e-4757-9720-ff2759539242)
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/5b3228e8-2556-44f7-97d6-4f8d8ee6effa)
![image](https://github.com/Calcium-Ion/new-api/assets/61247483/af9a07ee-5101-4b3d-8bd9-ae21a4fd7e9e)
## 交流群
<img src="https://github.com/Calcium-Ion/new-api/assets/61247483/de536a8a-0161-47a7-a0a2-66ef6de81266" width="200">
## 相关项目
- [One API](https://github.com/songquanpeng/one-api):原版项目
- [Midjourney-Proxy](https://github.com/novicezk/midjourney-proxy)Midjourney接口支持

View File

@@ -112,6 +112,9 @@ var RelayTimeout = GetEnvOrDefault("RELAY_TIMEOUT", 0) // unit is second
var GeminiSafetySetting = GetEnvOrDefaultString("GEMINI_SAFETY_SETTING", "BLOCK_NONE")
// https://docs.cohere.com/docs/safety-modes Type; NONE/CONTEXTUAL/STRICT
var CohereSafetySetting = GetEnvOrDefaultString("COHERE_SAFETY_SETTING", "NONE")
const (
RequestIdKey = "X-Oneapi-Request-Id"
)
@@ -123,6 +126,10 @@ const (
RoleRootUser = 100
)
func IsValidateRole(role int) bool {
return role == RoleGuestUser || role == RoleCommonUser || role == RoleAdminUser || role == RoleRootUser
}
var (
FileUploadPermission = RoleGuestUser
FileDownloadPermission = RoleGuestUser
@@ -213,6 +220,8 @@ const (
ChannelTypeDify = 37
ChannelTypeJina = 38
ChannelCloudflare = 39
ChannelTypeSiliconFlow = 40
ChannelTypeVertexAi = 41
ChannelTypeDummy // this one is only for count, do not add any channel after this
@@ -259,4 +268,6 @@ var ChannelBaseURLs = []string{
"", //37
"https://api.jina.ai", //38
"https://api.cloudflare.com", //39
"https://api.siliconflow.cn", //40
"", //41
}

View File

@@ -3,6 +3,7 @@ package common
import (
"errors"
"net/smtp"
"strings"
)
type outlookAuth struct {
@@ -30,3 +31,10 @@ func (a *outlookAuth) Next(fromServer []byte, more bool) ([]byte, error) {
}
return nil, nil
}
func isOutlookServer(server string) bool {
// 兼容多地区的outlook邮箱和ofb邮箱
// 其实应该加一个Option来区分是否用LOGIN的方式登录
// 先临时兼容一下
return strings.Contains(server, "outlook") || strings.Contains(server, "onmicrosoft")
}

View File

@@ -9,17 +9,26 @@ import (
"time"
)
func generateMessageID() string {
domain := strings.Split(SMTPAccount, "@")[1]
return fmt.Sprintf("<%d.%s@%s>", time.Now().UnixNano(), GetRandomString(12), domain)
}
func SendEmail(subject string, receiver string, content string) error {
if SMTPFrom == "" { // for compatibility
SMTPFrom = SMTPAccount
}
if SMTPServer == "" && SMTPAccount == "" {
return fmt.Errorf("SMTP 服务器未配置")
}
encodedSubject := fmt.Sprintf("=?UTF-8?B?%s?=", base64.StdEncoding.EncodeToString([]byte(subject)))
mail := []byte(fmt.Sprintf("To: %s\r\n"+
"From: %s<%s>\r\n"+
"Subject: %s\r\n"+
"Date: %s\r\n"+
"Message-ID: %s\r\n"+ // 添加 Message-ID 头
"Content-Type: text/html; charset=UTF-8\r\n\r\n%s\r\n",
receiver, SystemName, SMTPFrom, encodedSubject, time.Now().Format(time.RFC1123Z), content))
receiver, SystemName, SMTPFrom, encodedSubject, time.Now().Format(time.RFC1123Z), generateMessageID(), content))
auth := smtp.PlainAuth("", SMTPAccount, SMTPToken, SMTPServer)
addr := fmt.Sprintf("%s:%d", SMTPServer, SMTPPort)
to := strings.Split(receiver, ";")
@@ -62,7 +71,7 @@ func SendEmail(subject string, receiver string, content string) error {
if err != nil {
return err
}
} else if strings.HasSuffix(SMTPAccount, "outlook.com") {
} else if isOutlookServer(SMTPAccount) {
auth = LoginAuth(SMTPAccount, SMTPToken)
err = smtp.SendMail(addr, auth, SMTPAccount, to, mail)
} else {

View File

@@ -23,10 +23,11 @@ const (
var defaultModelRatio = map[string]float64{
//"midjourney": 50,
"gpt-4-gizmo-*": 15,
"gpt-4-all": 15,
"gpt-4o-all": 15,
"gpt-4": 15,
"gpt-4-gizmo-*": 15,
"gpt-4o-gizmo-*": 2.5,
"gpt-4-all": 15,
"gpt-4o-all": 15,
"gpt-4": 15,
//"gpt-4-0314": 15, //deprecated
"gpt-4-0613": 15,
"gpt-4-32k": 30,
@@ -41,6 +42,10 @@ var defaultModelRatio = map[string]float64{
"gpt-4o": 2.5, // $0.01 / 1K tokens
"gpt-4o-2024-05-13": 2.5, // $0.01 / 1K tokens
"gpt-4o-2024-08-06": 1.25, // $0.01 / 1K tokens
"o1-preview": 7.5,
"o1-preview-2024-09-12": 7.5,
"o1-mini": 1.5,
"o1-mini-2024-09-12": 1.5,
"gpt-4o-mini": 0.075,
"gpt-4o-mini-2024-07-18": 0.075,
"gpt-4-turbo": 5, // $0.01 / 1K tokens
@@ -105,8 +110,10 @@ var defaultModelRatio = map[string]float64{
"gemini-pro-vision": 1, // $0.00025 / 1k characters -> $0.001 / 1k tokens
"gemini-1.0-pro-vision-001": 1,
"gemini-1.0-pro-001": 1,
"gemini-1.5-pro-latest": 1,
"gemini-1.5-pro-latest": 1.75, // $3.5 / 1M tokens
"gemini-1.5-pro-exp-0827": 1.75, // $3.5 / 1M tokens
"gemini-1.5-flash-latest": 1,
"gemini-1.5-flash-exp-0827": 1,
"gemini-1.0-pro-latest": 1,
"gemini-1.0-pro-vision-latest": 1,
"gemini-ultra": 1,
@@ -118,6 +125,13 @@ var defaultModelRatio = map[string]float64{
"glm-4v": 0.05 * RMB, // ¥0.05 / 1k tokens
"glm-4-alltools": 0.1 * RMB, // ¥0.1 / 1k tokens
"glm-3-turbo": 0.3572,
"glm-4-plus": 0.05 * RMB,
"glm-4-0520": 0.1 * RMB,
"glm-4-air": 0.001 * RMB,
"glm-4-airx": 0.01 * RMB,
"glm-4-long": 0.001 * RMB,
"glm-4-flash": 0,
"glm-4v-plus": 0.01 * RMB,
"qwen-turbo": 0.8572, // ¥0.012 / 1k tokens
"qwen-plus": 10, // ¥0.14 / 1k tokens
"text-embedding-v1": 0.05, // ¥0.0007 / 1k tokens
@@ -136,26 +150,28 @@ var defaultModelRatio = map[string]float64{
"hunyuan": 7.143, // ¥0.1 / 1k tokens // https://cloud.tencent.com/document/product/1729/97731#e0e6be58-60c8-469f-bdeb-6c264ce3b4d0
// https://platform.lingyiwanwu.com/docs#-计费单元
// 已经按照 7.2 来换算美元价格
"yi-34b-chat-0205": 0.18,
"yi-34b-chat-200k": 0.864,
"yi-vl-plus": 0.432,
"yi-large": 20.0 / 1000 * RMB,
"yi-medium": 2.5 / 1000 * RMB,
"yi-vision": 6.0 / 1000 * RMB,
"yi-medium-200k": 12.0 / 1000 * RMB,
"yi-spark": 1.0 / 1000 * RMB,
"yi-large-rag": 25.0 / 1000 * RMB,
"yi-large-turbo": 12.0 / 1000 * RMB,
"yi-large-preview": 20.0 / 1000 * RMB,
"yi-large-rag-preview": 25.0 / 1000 * RMB,
"command": 0.5,
"command-nightly": 0.5,
"command-light": 0.5,
"command-light-nightly": 0.5,
"command-r": 0.25,
"command-r-plus ": 1.5,
"deepseek-chat": 0.07,
"deepseek-coder": 0.07,
"yi-34b-chat-0205": 0.18,
"yi-34b-chat-200k": 0.864,
"yi-vl-plus": 0.432,
"yi-large": 20.0 / 1000 * RMB,
"yi-medium": 2.5 / 1000 * RMB,
"yi-vision": 6.0 / 1000 * RMB,
"yi-medium-200k": 12.0 / 1000 * RMB,
"yi-spark": 1.0 / 1000 * RMB,
"yi-large-rag": 25.0 / 1000 * RMB,
"yi-large-turbo": 12.0 / 1000 * RMB,
"yi-large-preview": 20.0 / 1000 * RMB,
"yi-large-rag-preview": 25.0 / 1000 * RMB,
"command": 0.5,
"command-nightly": 0.5,
"command-light": 0.5,
"command-light-nightly": 0.5,
"command-r": 0.25,
"command-r-plus": 1.5,
"command-r-08-2024": 0.075,
"command-r-plus-08-2024": 1.25,
"deepseek-chat": 0.07,
"deepseek-coder": 0.07,
// Perplexity online 模型对搜索额外收费,有需要应自行调整,此处不计入搜索费用
"llama-3-sonar-small-32k-chat": 0.2 / 1000 * USD,
"llama-3-sonar-small-32k-online": 0.2 / 1000 * USD,
@@ -187,8 +203,8 @@ var defaultModelPrice = map[string]float64{
}
var (
modelPriceMap = make(map[string]float64)
modelPriceMapMutex = sync.RWMutex{}
modelPriceMap map[string]float64 = nil
modelPriceMapMutex = sync.RWMutex{}
)
var (
modelRatioMap map[string]float64 = nil
@@ -197,8 +213,9 @@ var (
var CompletionRatio map[string]float64 = nil
var defaultCompletionRatio = map[string]float64{
"gpt-4-gizmo-*": 2,
"gpt-4-all": 2,
"gpt-4-gizmo-*": 2,
"gpt-4o-gizmo-*": 3,
"gpt-4-all": 2,
}
func GetModelPriceMap() map[string]float64 {
@@ -232,6 +249,9 @@ func GetModelPrice(name string, printErr bool) (float64, bool) {
if strings.HasPrefix(name, "gpt-4-gizmo") {
name = "gpt-4-gizmo-*"
}
if strings.HasPrefix(name, "gpt-4o-gizmo") {
name = "gpt-4o-gizmo-*"
}
price, ok := modelPriceMap[name]
if !ok {
if printErr {
@@ -312,16 +332,8 @@ func GetCompletionRatio(name string) float64 {
if strings.HasPrefix(name, "gpt-4-gizmo") {
name = "gpt-4-gizmo-*"
}
if strings.HasPrefix(name, "gpt-3.5") {
if name == "gpt-3.5-turbo" || strings.HasSuffix(name, "0125") {
// https://openai.com/blog/new-embedding-models-and-api-updates
// Updated GPT-3.5 Turbo model and lower pricing
return 3
}
if strings.HasSuffix(name, "1106") {
return 2
}
return 4.0 / 3.0
if strings.HasPrefix(name, "gpt-4o-gizmo") {
name = "gpt-4o-gizmo-*"
}
if strings.HasPrefix(name, "gpt-4") && !strings.HasSuffix(name, "-all") && !strings.HasSuffix(name, "-gizmo-*") {
if strings.HasPrefix(name, "gpt-4-turbo") || strings.HasSuffix(name, "preview") {
@@ -335,6 +347,9 @@ func GetCompletionRatio(name string) float64 {
}
return 2
}
if strings.HasPrefix(name, "o1-") {
return 4
}
if name == "chatgpt-4o-latest" {
return 3
}
@@ -345,10 +360,24 @@ func GetCompletionRatio(name string) float64 {
} else if strings.Contains(name, "claude-3") {
return 5
}
if strings.HasPrefix(name, "gpt-3.5") {
if name == "gpt-3.5-turbo" || strings.HasSuffix(name, "0125") {
// https://openai.com/blog/new-embedding-models-and-api-updates
// Updated GPT-3.5 Turbo model and lower pricing
return 3
}
if strings.HasSuffix(name, "1106") {
return 2
}
return 4.0 / 3.0
}
if strings.HasPrefix(name, "mistral-") {
return 3
}
if strings.HasPrefix(name, "gemini-") {
if strings.Contains(name, "flash") {
return 4
}
return 3
}
if strings.HasPrefix(name, "command") {
@@ -357,6 +386,10 @@ func GetCompletionRatio(name string) float64 {
return 3
case "command-r-plus":
return 5
case "command-r-08-2024":
return 4
case "command-r-plus-08-2024":
return 4
default:
return 2
}

View File

@@ -31,14 +31,6 @@ func MapToJsonStr(m map[string]interface{}) string {
return string(bytes)
}
func MapToJsonStrFloat(m map[string]float64) string {
bytes, err := json.Marshal(m)
if err != nil {
return ""
}
return string(bytes)
}
func StrToMap(str string) map[string]interface{} {
m := make(map[string]interface{})
err := json.Unmarshal([]byte(str), &m)
@@ -48,6 +40,11 @@ func StrToMap(str string) map[string]interface{} {
return m
}
func IsJsonStr(str string) bool {
var js map[string]interface{}
return json.Unmarshal([]byte(str), &js) == nil
}
func String2Int(str string) int {
num, err := strconv.Atoi(str)
if err != nil {

23
common/user_groups.go Normal file
View File

@@ -0,0 +1,23 @@
package common
import (
"encoding/json"
)
var UserUsableGroups = map[string]string{
"default": "默认分组",
"vip": "vip分组",
}
func UserUsableGroups2JSONString() string {
jsonBytes, err := json.Marshal(UserUsableGroups)
if err != nil {
SysError("error marshalling user groups: " + err.Error())
}
return string(jsonBytes)
}
func UpdateUserUsableGroupsByJSONString(jsonStr string) error {
UserUsableGroups = make(map[string]string)
return json.Unmarshal([]byte(jsonStr), &UserUsableGroups)
}

View File

@@ -1,10 +1,13 @@
package common
import (
crand "crypto/rand"
"encoding/base64"
"fmt"
"github.com/google/uuid"
"html/template"
"log"
"math/big"
"math/rand"
"net"
"os/exec"
@@ -128,6 +131,11 @@ func IntMax(a int, b int) int {
}
}
func IsIP(s string) bool {
ip := net.ParseIP(s)
return ip != nil
}
func GetUUID() string {
code := uuid.New().String()
code = strings.Replace(code, "-", "", -1)
@@ -137,24 +145,35 @@ func GetUUID() string {
const keyChars = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
func init() {
rand.Seed(time.Now().UnixNano())
rand.New(rand.NewSource(time.Now().UnixNano()))
}
func GenerateKey() string {
//rand.Seed(time.Now().UnixNano())
key := make([]byte, 48)
for i := 0; i < 16; i++ {
key[i] = keyChars[rand.Intn(len(keyChars))]
}
uuid_ := GetUUID()
for i := 0; i < 32; i++ {
c := uuid_[i]
if i%2 == 0 && c >= 'a' && c <= 'z' {
c = c - 'a' + 'A'
func GenerateRandomCharsKey(length int) (string, error) {
b := make([]byte, length)
maxI := big.NewInt(int64(len(keyChars)))
for i := range b {
n, err := crand.Int(crand.Reader, maxI)
if err != nil {
return "", err
}
key[i+16] = c
b[i] = keyChars[n.Int64()]
}
return string(key)
return string(b), nil
}
func GenerateRandomKey(length int) (string, error) {
bytes := make([]byte, length*3/4) // 对于48位的输出这里应该是36
if _, err := crand.Read(bytes); err != nil {
return "", err
}
return base64.StdEncoding.EncodeToString(bytes), nil
}
func GenerateKey() (string, error) {
//rand.Seed(time.Now().UnixNano())
return GenerateRandomCharsKey(48)
}
func GetRandomInt(max int) int {

View File

@@ -20,14 +20,16 @@ var GetMediaTokenNotStream = common.GetEnvOrDefaultBool("GET_MEDIA_TOKEN_NOT_STR
var UpdateTask = common.GetEnvOrDefaultBool("UPDATE_TASK", true)
var GeminiModelMap = map[string]string{
"gemini-1.5-pro-latest": "v1beta",
"gemini-1.5-pro-001": "v1beta",
"gemini-1.5-pro": "v1beta",
"gemini-1.5-pro-exp-0801": "v1beta",
"gemini-1.5-flash-latest": "v1beta",
"gemini-1.5-flash-001": "v1beta",
"gemini-1.5-flash": "v1beta",
"gemini-ultra": "v1beta",
"gemini-1.5-pro-latest": "v1beta",
"gemini-1.5-pro-001": "v1beta",
"gemini-1.5-pro": "v1beta",
"gemini-1.5-pro-exp-0801": "v1beta",
"gemini-1.5-pro-exp-0827": "v1beta",
"gemini-1.5-flash-latest": "v1beta",
"gemini-1.5-flash-exp-0827": "v1beta",
"gemini-1.5-flash-001": "v1beta",
"gemini-1.5-flash": "v1beta",
"gemini-ultra": "v1beta",
}
func InitEnv() {
@@ -44,3 +46,6 @@ func InitEnv() {
}
}
}
// 是否生成初始令牌,默认关闭。
var GenerateDefaultToken = common.GetEnvOrDefaultBool("GENERATE_DEFAULT_TOKEN", false)

View File

@@ -20,6 +20,7 @@ import (
"one-api/relay/constant"
"one-api/service"
"strconv"
"strings"
"sync"
"time"
@@ -81,8 +82,7 @@ func testChannel(channel *model.Channel, testModel string) (err error, openAIErr
return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
}
request := buildTestRequest()
request.Model = testModel
request := buildTestRequest(testModel)
meta.UpstreamModelName = testModel
common.SysLog(fmt.Sprintf("testing channel %d with model %s", channel.Id, testModel))
@@ -141,17 +141,22 @@ func testChannel(channel *model.Channel, testModel string) (err error, openAIErr
return nil, nil
}
func buildTestRequest() *dto.GeneralOpenAIRequest {
func buildTestRequest(model string) *dto.GeneralOpenAIRequest {
testRequest := &dto.GeneralOpenAIRequest{
Model: "", // this will be set later
MaxTokens: 1,
Stream: false,
Model: "", // this will be set later
Stream: false,
}
if strings.HasPrefix(model, "o1-") {
testRequest.MaxCompletionTokens = 1
} else {
testRequest.MaxTokens = 1
}
content, _ := json.Marshal("hi")
testMessage := dto.Message{
Role: "user",
Content: content,
}
testRequest.Model = model
testRequest.Messages = append(testRequest.Messages, testMessage)
return testRequest
}
@@ -226,26 +231,22 @@ func testAllChannels(notify bool) error {
tok := time.Now()
milliseconds := tok.Sub(tik).Milliseconds()
ban := false
if milliseconds > disableThreshold {
err = errors.New(fmt.Sprintf("响应时间 %.2fs 超过阈值 %.2fs", float64(milliseconds)/1000.0, float64(disableThreshold)/1000.0))
ban = true
}
shouldBanChannel := false
// request error disables the channel
if openaiWithStatusErr != nil {
oaiErr := openaiWithStatusErr.Error
err = errors.New(fmt.Sprintf("type %s, httpCode %d, code %v, message %s", oaiErr.Type, openaiWithStatusErr.StatusCode, oaiErr.Code, oaiErr.Message))
ban = service.ShouldDisableChannel(channel.Type, openaiWithStatusErr)
shouldBanChannel = service.ShouldDisableChannel(channel.Type, openaiWithStatusErr)
}
// parse *int to bool
if !channel.GetAutoBan() {
ban = false
if milliseconds > disableThreshold {
err = errors.New(fmt.Sprintf("响应时间 %.2fs 超过阈值 %.2fs", float64(milliseconds)/1000.0, float64(disableThreshold)/1000.0))
shouldBanChannel = true
}
// disable channel
if ban && isChannelEnabled {
if isChannelEnabled && shouldBanChannel && channel.GetAutoBan() {
service.DisableChannel(channel.Id, channel.Name, err.Error())
}

View File

@@ -198,6 +198,28 @@ func AddChannel(c *gin.Context) {
}
channel.CreatedTime = common.GetTimestamp()
keys := strings.Split(channel.Key, "\n")
if channel.Type == common.ChannelTypeVertexAi {
if channel.Other == "" {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "部署地区不能为空",
})
return
} else {
if common.IsJsonStr(channel.Other) {
// must have default
regionMap := common.StrToMap(channel.Other)
if regionMap["default"] == nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "部署地区必须包含default字段",
})
return
}
}
}
keys = []string{channel.Key}
}
channels := make([]model.Channel, 0, len(keys))
for _, key := range keys {
if key == "" {
@@ -297,6 +319,27 @@ func UpdateChannel(c *gin.Context) {
})
return
}
if channel.Type == common.ChannelTypeVertexAi {
if channel.Other == "" {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "部署地区不能为空",
})
return
} else {
if common.IsJsonStr(channel.Other) {
// must have default
regionMap := common.StrToMap(channel.Other)
if regionMap["default"] == nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "部署地区必须包含default字段",
})
return
}
}
}
}
err = channel.Update()
if err != nil {
c.JSON(http.StatusOK, gin.H{

View File

@@ -112,7 +112,9 @@ func GitHubOAuth(c *gin.Context) {
user := model.User{
GitHubId: githubUser.Login,
}
// IsGitHubIdAlreadyTaken is unscoped
if model.IsGitHubIdAlreadyTaken(user.GitHubId) {
// FillUserByGitHubId is scoped
err := user.FillUserByGitHubId()
if err != nil {
c.JSON(http.StatusOK, gin.H{
@@ -121,6 +123,14 @@ func GitHubOAuth(c *gin.Context) {
})
return
}
// if user.Id == 0 , user has been deleted
if user.Id == 0 {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "用户已注销",
})
return
}
} else {
if common.RegisterEnabled {
user.Username = "github_" + strconv.Itoa(model.GetMaxUserId()+1)

View File

@@ -17,3 +17,18 @@ func GetGroups(c *gin.Context) {
"data": groupNames,
})
}
func GetUserGroups(c *gin.Context) {
usableGroups := make(map[string]string)
for groupName, _ := range common.GroupRatio {
// UserUsableGroups contains the groups that the user can use
if _, ok := common.UserUsableGroups[groupName]; ok {
usableGroups[groupName] = common.UserUsableGroups[groupName]
}
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
"data": usableGroups,
})
}

View File

@@ -137,31 +137,63 @@ func init() {
}
func ListModels(c *gin.Context) {
userId := c.GetInt("id")
user, err := model.GetUserById(userId, true)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": err.Error(),
})
return
}
models := model.GetGroupModels(user.Group)
userOpenAiModels := make([]dto.OpenAIModels, 0)
permission := getPermission()
for _, s := range models {
if _, ok := openAIModelsMap[s]; ok {
userOpenAiModels = append(userOpenAiModels, openAIModelsMap[s])
modelLimitEnable := c.GetBool("token_model_limit_enabled")
if modelLimitEnable {
s, ok := c.Get("token_model_limit")
var tokenModelLimit map[string]bool
if ok {
tokenModelLimit = s.(map[string]bool)
} else {
userOpenAiModels = append(userOpenAiModels, dto.OpenAIModels{
Id: s,
Object: "model",
Created: 1626777600,
OwnedBy: "custom",
Permission: permission,
Root: s,
Parent: nil,
tokenModelLimit = map[string]bool{}
}
for allowModel, _ := range tokenModelLimit {
if _, ok := openAIModelsMap[allowModel]; ok {
userOpenAiModels = append(userOpenAiModels, openAIModelsMap[allowModel])
} else {
userOpenAiModels = append(userOpenAiModels, dto.OpenAIModels{
Id: allowModel,
Object: "model",
Created: 1626777600,
OwnedBy: "custom",
Permission: permission,
Root: allowModel,
Parent: nil,
})
}
}
} else {
userId := c.GetInt("id")
userGroup, err := model.GetUserGroup(userId)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "get user group failed",
})
return
}
group := userGroup
tokenGroup := c.GetString("token_group")
if tokenGroup != "" {
group = tokenGroup
}
models := model.GetGroupModels(group)
for _, s := range models {
if _, ok := openAIModelsMap[s]; ok {
userOpenAiModels = append(userOpenAiModels, openAIModelsMap[s])
} else {
userOpenAiModels = append(userOpenAiModels, dto.OpenAIModels{
Id: s,
Object: "model",
Created: 1626777600,
OwnedBy: "custom",
Permission: permission,
Root: s,
Parent: nil,
})
}
}
}
c.JSON(200, gin.H{

View File

@@ -7,18 +7,11 @@ import (
)
func GetPricing(c *gin.Context) {
userId := c.GetInt("id")
// if no login, get default group ratio
groupRatio := common.GetGroupRatio("default")
group, err := model.CacheGetUserGroup(userId)
if err == nil {
groupRatio = common.GetGroupRatio(group)
}
pricing := model.GetPricing(group)
pricing := model.GetPricing()
c.JSON(200, gin.H{
"success": true,
"data": pricing,
"group_ratio": groupRatio,
"group_ratio": common.GroupRatio,
})
}

View File

@@ -121,6 +121,9 @@ func shouldRetry(c *gin.Context, openaiErr *dto.OpenAIErrorWithStatusCode, retry
if openaiErr == nil {
return false
}
if openaiErr.LocalError {
return false
}
if retryTimes <= 0 {
return false
}
@@ -151,9 +154,6 @@ func shouldRetry(c *gin.Context, openaiErr *dto.OpenAIErrorWithStatusCode, retry
// azure处理超时不重试
return false
}
if openaiErr.LocalError {
return false
}
if openaiErr.StatusCode/100 == 2 {
return false
}

View File

@@ -5,6 +5,7 @@ import (
"crypto/sha256"
"encoding/hex"
"io"
"net/http"
"one-api/common"
"one-api/model"
"sort"
@@ -48,6 +49,13 @@ func TelegramBind(c *gin.Context) {
})
return
}
if user.Id == 0 {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "用户已注销",
})
return
}
user.TelegramId = telegramId
if err := user.Update(false); err != nil {
c.JSON(200, gin.H{

View File

@@ -123,10 +123,19 @@ func AddToken(c *gin.Context) {
})
return
}
key, err := common.GenerateKey()
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "生成令牌失败",
})
common.SysError("failed to generate token key: " + err.Error())
return
}
cleanToken := model.Token{
UserId: c.GetInt("id"),
Name: token.Name,
Key: common.GenerateKey(),
Key: key,
CreatedTime: common.GetTimestamp(),
AccessedTime: common.GetTimestamp(),
ExpiredTime: token.ExpiredTime,
@@ -134,6 +143,8 @@ func AddToken(c *gin.Context) {
UnlimitedQuota: token.UnlimitedQuota,
ModelLimitsEnabled: token.ModelLimitsEnabled,
ModelLimits: token.ModelLimits,
AllowIps: token.AllowIps,
Group: token.Group,
}
err = cleanToken.Insert()
if err != nil {
@@ -221,6 +232,8 @@ func UpdateToken(c *gin.Context) {
cleanToken.UnlimitedQuota = token.UnlimitedQuota
cleanToken.ModelLimitsEnabled = token.ModelLimitsEnabled
cleanToken.ModelLimits = token.ModelLimits
cleanToken.AllowIps = token.AllowIps
cleanToken.Group = token.Group
}
err = cleanToken.Update()
if err != nil {

View File

@@ -7,10 +7,12 @@ import (
"one-api/common"
"one-api/model"
"strconv"
"strings"
"sync"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
"one-api/constant"
)
type LoginRequest struct {
@@ -157,8 +159,9 @@ func Register(c *gin.Context) {
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": err.Error(),
"message": "数据库错误,请稍后重试",
})
common.SysError(fmt.Sprintf("CheckUserExistOrDeleted error: %v", err))
return
}
if exist {
@@ -186,6 +189,48 @@ func Register(c *gin.Context) {
})
return
}
// 获取插入后的用户ID
var insertedUser model.User
if err := model.DB.Where("username = ?", cleanUser.Username).First(&insertedUser).Error; err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "用户注册失败或用户ID获取失败",
})
return
}
// 生成默认令牌
if constant.GenerateDefaultToken {
key, err := common.GenerateKey()
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "生成默认令牌失败",
})
common.SysError("failed to generate token key: " + err.Error())
return
}
// 生成默认令牌
token := model.Token{
UserId: insertedUser.Id, // 使用插入后的用户ID
Name: cleanUser.Username + "的初始令牌",
Key: key,
CreatedTime: common.GetTimestamp(),
AccessedTime: common.GetTimestamp(),
ExpiredTime: -1, // 永不过期
RemainQuota: 500000, // 示例额度
UnlimitedQuota: true,
ModelLimitsEnabled: false,
}
if err := token.Insert(); err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "创建默认令牌失败",
})
return
}
}
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "",
@@ -276,7 +321,18 @@ func GenerateAccessToken(c *gin.Context) {
})
return
}
user.AccessToken = common.GetUUID()
// get rand int 28-32
randI := common.GetRandomInt(4)
key, err := common.GenerateRandomKey(29 + randI)
if err != nil {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "生成失败",
})
common.SysError("failed to generate key: " + err.Error())
return
}
user.SetAccessToken(key)
if model.DB.Where("access_token = ?", user.AccessToken).First(user).RowsAffected != 0 {
c.JSON(http.StatusOK, gin.H{
@@ -582,6 +638,7 @@ func DeleteSelf(c *gin.Context) {
func CreateUser(c *gin.Context) {
var user model.User
err := json.NewDecoder(c.Request.Body).Decode(&user)
user.Username = strings.TrimSpace(user.Username)
if err != nil || user.Username == "" || user.Password == "" {
c.JSON(http.StatusOK, gin.H{
"success": false,
@@ -629,8 +686,8 @@ func CreateUser(c *gin.Context) {
}
type ManageRequest struct {
Username string `json:"username"`
Action string `json:"action"`
Id int `json:"id"`
Action string `json:"action"`
}
// ManageUser Only admin user can do this
@@ -646,7 +703,7 @@ func ManageUser(c *gin.Context) {
return
}
user := model.User{
Username: req.Username,
Id: req.Id,
}
// Fill attributes
model.DB.Unscoped().Where(&user).First(&user)

View File

@@ -78,6 +78,13 @@ func WeChatAuth(c *gin.Context) {
})
return
}
if user.Id == 0 {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "用户已注销",
})
return
}
} else {
if common.RegisterEnabled {
user.Username = "wechat_" + strconv.Itoa(model.GetMaxUserId()+1)

View File

@@ -1,14 +1,17 @@
package dto
type RerankRequest struct {
Documents []any `json:"documents"`
Query string `json:"query"`
Model string `json:"model"`
TopN int `json:"top_n"`
Documents []any `json:"documents"`
Query string `json:"query"`
Model string `json:"model"`
TopN int `json:"top_n"`
ReturnDocuments bool `json:"return_documents,omitempty"`
MaxChunkPerDoc int `json:"max_chunk_per_doc,omitempty"`
OverLapTokens int `json:"overlap_tokens,omitempty"`
}
type RerankResponseDocument struct {
Document any `json:"document"`
Document any `json:"document,omitempty"`
Index int `json:"index"`
RelevanceScore float64 `json:"relevance_score"`
}

View File

@@ -7,31 +7,32 @@ type ResponseFormat struct {
}
type GeneralOpenAIRequest struct {
Model string `json:"model,omitempty"`
Messages []Message `json:"messages,omitempty"`
Prompt any `json:"prompt,omitempty"`
Stream bool `json:"stream,omitempty"`
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
MaxTokens uint `json:"max_tokens,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Stop any `json:"stop,omitempty"`
N int `json:"n,omitempty"`
Input any `json:"input,omitempty"`
Instruction string `json:"instruction,omitempty"`
Size string `json:"size,omitempty"`
Functions any `json:"functions,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
ResponseFormat any `json:"response_format,omitempty"`
Seed float64 `json:"seed,omitempty"`
Tools []ToolCall `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
User string `json:"user,omitempty"`
LogProbs bool `json:"logprobs,omitempty"`
TopLogProbs int `json:"top_logprobs,omitempty"`
Dimensions int `json:"dimensions,omitempty"`
Model string `json:"model,omitempty"`
Messages []Message `json:"messages,omitempty"`
Prompt any `json:"prompt,omitempty"`
Stream bool `json:"stream,omitempty"`
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
MaxTokens uint `json:"max_tokens,omitempty"`
MaxCompletionTokens uint `json:"max_completion_tokens,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Stop any `json:"stop,omitempty"`
N int `json:"n,omitempty"`
Input any `json:"input,omitempty"`
Instruction string `json:"instruction,omitempty"`
Size string `json:"size,omitempty"`
Functions any `json:"functions,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
ResponseFormat any `json:"response_format,omitempty"`
Seed float64 `json:"seed,omitempty"`
Tools []ToolCall `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
User string `json:"user,omitempty"`
LogProbs bool `json:"logprobs,omitempty"`
TopLogProbs int `json:"top_logprobs,omitempty"`
Dimensions int `json:"dimensions,omitempty"`
}
type OpenAITools struct {

View File

@@ -34,6 +34,7 @@ type OpenAITextResponseChoice struct {
type OpenAITextResponse struct {
Id string `json:"id"`
Model string `json:"model"`
Object string `json:"object"`
Created int64 `json:"created"`
Choices []OpenAITextResponseChoice `json:"choices"`

22
go.mod
View File

@@ -1,7 +1,9 @@
module one-api
// +heroku goVersion go1.18
go 1.18
go 1.21
toolchain go1.22.4
require (
github.com/Calcium-Ion/go-epay v0.0.2
@@ -9,6 +11,7 @@ require (
github.com/aws/aws-sdk-go-v2 v1.26.1
github.com/aws/aws-sdk-go-v2/credentials v1.17.11
github.com/aws/aws-sdk-go-v2/service/bedrockruntime v1.7.4
github.com/bytedance/gopkg v0.0.0-20220118071334-3db87571198b
github.com/gin-contrib/cors v1.4.0
github.com/gin-contrib/gzip v0.0.6
github.com/gin-contrib/sessions v0.0.5
@@ -24,7 +27,7 @@ require (
github.com/pkoukk/tiktoken-go v0.1.7
github.com/samber/lo v1.39.0
github.com/shirou/gopsutil v3.21.11+incompatible
golang.org/x/crypto v0.21.0
golang.org/x/crypto v0.26.0
golang.org/x/image v0.15.0
gorm.io/driver/mysql v1.4.3
gorm.io/driver/postgres v1.5.2
@@ -38,9 +41,8 @@ require (
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.5 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.5 // indirect
github.com/aws/smithy-go v1.20.2 // indirect
github.com/bytedance/gopkg v0.0.0-20220118071334-3db87571198b // indirect
github.com/bytedance/sonic v1.9.1 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/dlclark/regexp2 v1.11.0 // indirect
@@ -51,6 +53,7 @@ require (
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-sql-driver/mysql v1.6.0 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/gorilla/context v1.1.1 // indirect
github.com/gorilla/securecookie v1.1.1 // indirect
github.com/gorilla/sessions v1.2.1 // indirect
@@ -69,6 +72,7 @@ require (
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
github.com/stretchr/testify v1.9.0 // indirect
github.com/tklauser/go-sysconf v0.3.12 // indirect
github.com/tklauser/numcpus v0.6.1 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
@@ -76,10 +80,10 @@ require (
github.com/yusufpapurcu/wmi v1.2.3 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/exp v0.0.0-20240404231335-c0f41cb1a7a0 // indirect
golang.org/x/net v0.21.0 // indirect
golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.18.0 // indirect
golang.org/x/text v0.14.0 // indirect
google.golang.org/protobuf v1.30.0 // indirect
golang.org/x/net v0.28.0 // indirect
golang.org/x/sync v0.8.0 // indirect
golang.org/x/sys v0.24.0 // indirect
golang.org/x/text v0.17.0 // indirect
google.golang.org/protobuf v1.34.2 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

40
go.sum
View File

@@ -23,8 +23,8 @@ github.com/bytedance/gopkg v0.0.0-20220118071334-3db87571198b/go.mod h1:2ZlV9BaU
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
@@ -37,6 +37,7 @@ github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cu
github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gin-contrib/cors v1.4.0 h1:oJ6gwtUl3lqV0WEIwM/LxPF1QZ5qe2lGWdY2+bz7y0g=
@@ -57,6 +58,7 @@ github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
@@ -81,7 +83,8 @@ github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzq
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -142,8 +145,11 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs=
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
@@ -172,7 +178,8 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
@@ -191,18 +198,18 @@ golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUu
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw=
golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54=
golang.org/x/exp v0.0.0-20240404231335-c0f41cb1a7a0 h1:985EYyeCOxTpcgOTJpflJUwOeEz0CQOdPt73OzpE9F8=
golang.org/x/exp v0.0.0-20240404231335-c0f41cb1a7a0/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI=
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -214,26 +221,27 @@ golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg=
golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=

View File

@@ -10,6 +10,17 @@ import (
"strings"
)
func validUserInfo(username string, role int) bool {
// check username is empty
if strings.TrimSpace(username) == "" {
return false
}
if !common.IsValidateRole(role) {
return false
}
return true
}
func authHelper(c *gin.Context, minRole int) {
session := sessions.Default(c)
username := session.Get("username")
@@ -30,6 +41,14 @@ func authHelper(c *gin.Context, minRole int) {
}
user := model.ValidateAccessToken(accessToken)
if user != nil && user.Username != "" {
if !validUserInfo(user.Username, user.Role) {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "无权进行此操作,用户信息无效",
})
c.Abort()
return
}
// Token is valid
username = user.Username
role = user.Role
@@ -91,6 +110,14 @@ func authHelper(c *gin.Context, minRole int) {
c.Abort()
return
}
if !validUserInfo(username.(string), role.(int)) {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "无权进行此操作,用户信息无效",
})
c.Abort()
return
}
c.Set("username", username)
c.Set("role", role)
c.Set("id", id)
@@ -175,6 +202,8 @@ func TokenAuth() func(c *gin.Context) {
} else {
c.Set("token_model_limit_enabled", false)
}
c.Set("allow_ips", token.GetIpLimitsMap())
c.Set("token_group", token.Group)
if len(parts) > 1 {
if model.IsAdmin(token.UserId) {
c.Set("specific_channel_id", parts[1])

View File

@@ -22,6 +22,14 @@ type ModelRequest struct {
func Distribute() func(c *gin.Context) {
return func(c *gin.Context) {
allowIpsMap := c.GetStringMap("allow_ips")
if len(allowIpsMap) != 0 {
clientIp := c.ClientIP()
if _, ok := allowIpsMap[clientIp]; !ok {
abortWithOpenAiMessage(c, http.StatusForbidden, "您的 IP 不在令牌允许访问的列表中")
return
}
}
userId := c.GetInt("id")
var channel *model.Channel
channelId, ok := c.Get("specific_channel_id")
@@ -31,6 +39,20 @@ func Distribute() func(c *gin.Context) {
return
}
userGroup, _ := model.CacheGetUserGroup(userId)
tokenGroup := c.GetString("token_group")
if tokenGroup != "" {
// check common.UserUsableGroups[userGroup]
if _, ok := common.UserUsableGroups[tokenGroup]; !ok {
abortWithOpenAiMessage(c, http.StatusForbidden, fmt.Sprintf("令牌分组 %s 已被禁用", tokenGroup))
return
}
// check group in common.GroupRatio
if _, ok := common.GroupRatio[tokenGroup]; !ok {
abortWithOpenAiMessage(c, http.StatusForbidden, fmt.Sprintf("分组 %s 已被弃用", tokenGroup))
return
}
userGroup = tokenGroup
}
c.Set("group", userGroup)
if ok {
id, err := strconv.Atoi(channelId.(string))
@@ -199,6 +221,8 @@ func SetupContextForSelectedChannel(c *gin.Context, channel *model.Channel, mode
switch channel.Type {
case common.ChannelTypeAzure:
c.Set("api_version", channel.Other)
case common.ChannelTypeVertexAi:
c.Set("region", channel.Other)
case common.ChannelTypeXunfei:
c.Set("api_version", channel.Other)
case common.ChannelTypeGemini:

View File

@@ -36,6 +36,12 @@ func GetEnabledModels() []string {
return models
}
func GetAllEnableAbilities() []Ability {
var abilities []Ability
DB.Find(&abilities, "enabled = ?", true)
return abilities
}
func getPriority(group string, model string, retry int) (int, error) {
groupCol := "`group`"
trueVal := "1"

View File

@@ -270,6 +270,9 @@ func CacheGetRandomSatisfiedChannel(group string, model string, retry int) (*Cha
if strings.HasPrefix(model, "gpt-4-gizmo") {
model = "gpt-4-gizmo-*"
}
if strings.HasPrefix(model, "gpt-4o-gizmo") {
model = "gpt-4o-gizmo-*"
}
// if memory cache is disabled, get channel directly from database
if !common.MemoryCacheEnabled {

View File

@@ -32,7 +32,7 @@ func createRootAccountIfNeed() error {
Role: common.RoleRootUser,
Status: common.UserStatusEnabled,
DisplayName: "Root User",
AccessToken: common.GetUUID(),
AccessToken: nil,
Quota: 100000000,
}
DB.Create(&rootUser)

View File

@@ -86,6 +86,7 @@ func InitOptionMap() {
common.OptionMap["ModelRatio"] = common.ModelRatio2JSONString()
common.OptionMap["ModelPrice"] = common.ModelPrice2JSONString()
common.OptionMap["GroupRatio"] = common.GroupRatio2JSONString()
common.OptionMap["UserUsableGroups"] = common.UserUsableGroups2JSONString()
common.OptionMap["CompletionRatio"] = common.CompletionRatio2JSONString()
common.OptionMap["TopUpLink"] = common.TopUpLink
common.OptionMap["ChatLink"] = common.ChatLink
@@ -303,6 +304,8 @@ func updateOptionMap(key string, value string) (err error) {
err = common.UpdateModelRatioByJSONString(value)
case "GroupRatio":
err = common.UpdateGroupRatioByJSONString(value)
case "UserUsableGroups":
err = common.UpdateUserUsableGroupsByJSONString(value)
case "CompletionRatio":
err = common.UpdateCompletionRatioByJSONString(value)
case "ModelPrice":

View File

@@ -7,14 +7,13 @@ import (
)
type Pricing struct {
Available bool `json:"available"`
ModelName string `json:"model_name"`
QuotaType int `json:"quota_type"`
ModelRatio float64 `json:"model_ratio"`
ModelPrice float64 `json:"model_price"`
OwnerBy string `json:"owner_by"`
CompletionRatio float64 `json:"completion_ratio"`
EnableGroup []string `json:"enable_group,omitempty"`
EnableGroup []string `json:"enable_groups,omitempty"`
}
var (
@@ -23,40 +22,47 @@ var (
updatePricingLock sync.Mutex
)
func GetPricing(group string) []Pricing {
func GetPricing() []Pricing {
updatePricingLock.Lock()
defer updatePricingLock.Unlock()
if time.Since(lastGetPricingTime) > time.Minute*1 || len(pricingMap) == 0 {
updatePricing()
}
if group != "" {
userPricingMap := make([]Pricing, 0)
models := GetGroupModels(group)
for _, pricing := range pricingMap {
if !common.StringsContains(models, pricing.ModelName) {
pricing.Available = false
}
userPricingMap = append(userPricingMap, pricing)
}
return userPricingMap
}
//if group != "" {
// userPricingMap := make([]Pricing, 0)
// models := GetGroupModels(group)
// for _, pricing := range pricingMap {
// if !common.StringsContains(models, pricing.ModelName) {
// pricing.Available = false
// }
// userPricingMap = append(userPricingMap, pricing)
// }
// return userPricingMap
//}
return pricingMap
}
func updatePricing() {
//modelRatios := common.GetModelRatios()
enabledModels := GetEnabledModels()
allModels := make(map[string]int)
for i, model := range enabledModels {
allModels[model] = i
enableAbilities := GetAllEnableAbilities()
modelGroupsMap := make(map[string][]string)
for _, ability := range enableAbilities {
groups := modelGroupsMap[ability.Model]
if groups == nil {
groups = make([]string, 0)
}
if !common.StringsContains(groups, ability.Group) {
groups = append(groups, ability.Group)
}
modelGroupsMap[ability.Model] = groups
}
pricingMap = make([]Pricing, 0)
for model, _ := range allModels {
for model, groups := range modelGroupsMap {
pricing := Pricing{
Available: true,
ModelName: model,
ModelName: model,
EnableGroup: groups,
}
modelPrice, findPrice := common.GetModelPrice(model, false)
if findPrice {

View File

@@ -23,10 +23,34 @@ type Token struct {
UnlimitedQuota bool `json:"unlimited_quota" gorm:"default:false"`
ModelLimitsEnabled bool `json:"model_limits_enabled" gorm:"default:false"`
ModelLimits string `json:"model_limits" gorm:"type:varchar(1024);default:''"`
AllowIps *string `json:"allow_ips" gorm:"default:''"`
UsedQuota int `json:"used_quota" gorm:"default:0"` // used quota
Group string `json:"group" gorm:"default:''"`
DeletedAt gorm.DeletedAt `gorm:"index"`
}
func (token *Token) GetIpLimitsMap() map[string]any {
// delete empty spaces
//split with \n
ipLimitsMap := make(map[string]any)
if token.AllowIps == nil {
return ipLimitsMap
}
cleanIps := strings.ReplaceAll(*token.AllowIps, " ", "")
if cleanIps == "" {
return ipLimitsMap
}
ips := strings.Split(cleanIps, "\n")
for _, ip := range ips {
ip = strings.TrimSpace(ip)
ip = strings.ReplaceAll(ip, ",", "")
if common.IsIP(ip) {
ipLimitsMap[ip] = true
}
}
return ipLimitsMap
}
func GetAllUserTokens(userId int, startIdx int, num int) ([]*Token, error) {
var tokens []*Token
var err error
@@ -130,7 +154,8 @@ func (token *Token) Insert() error {
// Update Make sure your token's fields is completed, because this will update non-zero values
func (token *Token) Update() error {
var err error
err = DB.Model(token).Select("name", "status", "expired_time", "remain_quota", "unlimited_quota", "model_limits_enabled", "model_limits").Updates(token).Error
err = DB.Model(token).Select("name", "status", "expired_time", "remain_quota", "unlimited_quota",
"model_limits_enabled", "model_limits", "allow_ips", "group").Updates(token).Error
return err
}

View File

@@ -25,7 +25,7 @@ type User struct {
WeChatId string `json:"wechat_id" gorm:"column:wechat_id;index"`
TelegramId string `json:"telegram_id" gorm:"column:telegram_id;index"`
VerificationCode string `json:"verification_code" gorm:"-:all"` // this field is only for Email verification, don't save it to database!
AccessToken string `json:"access_token" gorm:"type:char(32);column:access_token;uniqueIndex"` // this token is for system management
AccessToken *string `json:"access_token" gorm:"type:char(32);column:access_token;uniqueIndex"` // this token is for system management
Quota int `json:"quota" gorm:"type:int;default:0"`
UsedQuota int `json:"used_quota" gorm:"type:int;default:0;column:used_quota"` // used quota
RequestCount int `json:"request_count" gorm:"type:int;default:0;"` // request number
@@ -38,6 +38,17 @@ type User struct {
DeletedAt gorm.DeletedAt `gorm:"index"`
}
func (user *User) GetAccessToken() string {
if user.AccessToken == nil {
return ""
}
return *user.AccessToken
}
func (user *User) SetAccessToken(token string) {
user.AccessToken = &token
}
// CheckUserExistOrDeleted check if user exist or deleted, if not exist, return false, nil, if deleted or exist, return true, nil
func CheckUserExistOrDeleted(username string, email string) (bool, error) {
var user User
@@ -201,7 +212,7 @@ func (user *User) Insert(inviterId int) error {
}
}
user.Quota = common.QuotaForNewUser
user.AccessToken = common.GetUUID()
//user.SetAccessToken(common.GetUUID())
user.AffCode = common.GetRandomString(4)
result := DB.Create(user)
if result.Error != nil {
@@ -295,11 +306,12 @@ func (user *User) ValidateAndFill() (err error) {
// that means if your fields value is 0, '', false or other zero values,
// it wont be used to build query conditions
password := user.Password
if user.Username == "" || password == "" {
username := strings.TrimSpace(user.Username)
if username == "" || password == "" {
return errors.New("用户名或密码为空")
}
// find buy username or email
DB.Where("username = ? OR email = ?", user.Username, user.Username).First(user)
DB.Where("username = ? OR email = ?", username, username).First(user)
okay := common.ValidatePasswordAndHash(password, user.Password)
if !okay || user.Status != common.UserStatusEnabled {
return errors.New("用户名或密码错误,或用户已被封禁")
@@ -339,14 +351,6 @@ func (user *User) FillUserByWeChatId() error {
return nil
}
func (user *User) FillUserByUsername() error {
if user.Username == "" {
return errors.New("username 为空!")
}
DB.Where(User{Username: user.Username}).First(user)
return nil
}
func (user *User) FillUserByTelegramId() error {
if user.TelegramId == "" {
return errors.New("Telegram id 为空!")
@@ -359,23 +363,19 @@ func (user *User) FillUserByTelegramId() error {
}
func IsEmailAlreadyTaken(email string) bool {
return DB.Where("email = ?", email).Find(&User{}).RowsAffected == 1
return DB.Unscoped().Where("email = ?", email).Find(&User{}).RowsAffected == 1
}
func IsWeChatIdAlreadyTaken(wechatId string) bool {
return DB.Where("wechat_id = ?", wechatId).Find(&User{}).RowsAffected == 1
return DB.Unscoped().Where("wechat_id = ?", wechatId).Find(&User{}).RowsAffected == 1
}
func IsGitHubIdAlreadyTaken(githubId string) bool {
return DB.Where("github_id = ?", githubId).Find(&User{}).RowsAffected == 1
}
func IsUsernameAlreadyTaken(username string) bool {
return DB.Where("username = ?", username).Find(&User{}).RowsAffected == 1
return DB.Unscoped().Where("github_id = ?", githubId).Find(&User{}).RowsAffected == 1
}
func IsTelegramIdAlreadyTaken(telegramId string) bool {
return DB.Where("telegram_id = ?", telegramId).Find(&User{}).RowsAffected == 1
return DB.Unscoped().Where("telegram_id = ?", telegramId).Find(&User{}).RowsAffected == 1
}
func ResetUserPasswordByEmail(email string, password string) error {

View File

@@ -79,9 +79,9 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
err, usage = claudeStreamHandler(c, resp, info, a.RequestMode)
err, usage = ClaudeStreamHandler(c, resp, info, a.RequestMode)
} else {
err, usage = claudeHandler(a.RequestMode, c, resp, info.PromptTokens, info.UpstreamModelName)
err, usage = ClaudeHandler(c, resp, a.RequestMode, info)
}
return
}

View File

@@ -4,7 +4,6 @@ import (
"bufio"
"encoding/json"
"fmt"
"github.com/gin-gonic/gin"
"io"
"net/http"
"one-api/common"
@@ -12,6 +11,8 @@ import (
relaycommon "one-api/relay/common"
"one-api/service"
"strings"
"github.com/gin-gonic/gin"
)
func stopReasonClaude2OpenAI(reason string) string {
@@ -108,13 +109,10 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
}
}
formatMessages := make([]dto.Message, 0)
var lastMessage *dto.Message
lastMessage := dto.Message{
Role: "tool",
}
for i, message := range textRequest.Messages {
//if message.Role == "system" {
// if i != 0 {
// message.Role = "user"
// }
//}
if message.Role == "" {
textRequest.Messages[i].Role = "user"
}
@@ -122,7 +120,13 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
Role: message.Role,
Content: message.Content,
}
if lastMessage != nil && lastMessage.Role == message.Role {
if message.Role == "tool" {
fmtMessage.ToolCallId = message.ToolCallId
}
if message.Role == "assistant" && message.ToolCalls != nil {
fmtMessage.ToolCalls = message.ToolCalls
}
if lastMessage.Role == message.Role && lastMessage.Role != "tool" {
if lastMessage.IsStringContent() && message.IsStringContent() {
content, _ := json.Marshal(strings.Trim(fmt.Sprintf("%s %s", lastMessage.StringContent(), message.StringContent()), "\""))
fmtMessage.Content = content
@@ -135,10 +139,11 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
fmtMessage.Content = content
}
formatMessages = append(formatMessages, fmtMessage)
lastMessage = &textRequest.Messages[i]
lastMessage = fmtMessage
}
claudeMessages := make([]ClaudeMessage, 0)
isFirstMessage := true
for _, message := range formatMessages {
if message.Role == "system" {
if message.IsStringContent() {
@@ -154,10 +159,54 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
claudeRequest.System = content
}
} else {
if isFirstMessage {
isFirstMessage = false
if message.Role != "user" {
// fix: first message is assistant, add user message
claudeMessage := ClaudeMessage{
Role: "user",
Content: []ClaudeMediaMessage{
{
Type: "text",
Text: "...",
},
},
}
claudeMessages = append(claudeMessages, claudeMessage)
}
}
claudeMessage := ClaudeMessage{
Role: message.Role,
}
if message.IsStringContent() {
if message.Role == "tool" {
if len(claudeMessages) > 0 && claudeMessages[len(claudeMessages)-1].Role == "user" {
lastMessage := claudeMessages[len(claudeMessages)-1]
if content, ok := lastMessage.Content.(string); ok {
lastMessage.Content = []ClaudeMediaMessage{
{
Type: "text",
Text: content,
},
}
}
lastMessage.Content = append(lastMessage.Content.([]ClaudeMediaMessage), ClaudeMediaMessage{
Type: "tool_result",
ToolUseId: message.ToolCallId,
Content: message.StringContent(),
})
claudeMessages[len(claudeMessages)-1] = lastMessage
continue
} else {
claudeMessage.Role = "user"
claudeMessage.Content = []ClaudeMediaMessage{
{
Type: "tool_result",
ToolUseId: message.ToolCallId,
Content: message.StringContent(),
},
}
}
} else if message.IsStringContent() && message.ToolCalls == nil {
claudeMessage.Content = message.StringContent()
} else {
claudeMediaMessages := make([]ClaudeMediaMessage, 0)
@@ -190,6 +239,28 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
}
claudeMediaMessages = append(claudeMediaMessages, claudeMediaMessage)
}
if message.ToolCalls != nil {
for _, tc := range message.ToolCalls.([]interface{}) {
toolCallJSON, _ := json.Marshal(tc)
var toolCall dto.ToolCall
err := json.Unmarshal(toolCallJSON, &toolCall)
if err != nil {
common.SysError("tool call is not a dto.ToolCall: " + fmt.Sprintf("%v", tc))
continue
}
inputObj := make(map[string]any)
if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &inputObj); err != nil {
common.SysError("tool call function arguments is not a map[string]any: " + fmt.Sprintf("%v", toolCall.Function.Arguments))
continue
}
claudeMediaMessages = append(claudeMediaMessages, ClaudeMediaMessage{
Type: "tool_use",
Id: toolCall.ID,
Name: toolCall.Function.Name,
Input: inputObj,
})
}
}
claudeMessage.Content = claudeMediaMessages
}
claudeMessages = append(claudeMessages, claudeMessage)
@@ -324,12 +395,13 @@ func ResponseClaude2OpenAI(reqMode int, claudeResponse *ClaudeResponse) *dto.Ope
if len(tools) > 0 {
choice.Message.ToolCalls = tools
}
fullTextResponse.Model = claudeResponse.Model
choices = append(choices, choice)
fullTextResponse.Choices = choices
return &fullTextResponse
}
func claudeStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo, requestMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
func ClaudeStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo, requestMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
var usage *dto.Usage
usage = &dto.Usage{}
@@ -411,7 +483,7 @@ func claudeStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.
return nil, usage
}
func claudeHandler(requestMode int, c *gin.Context, resp *http.Response, promptTokens int, model string) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
func ClaudeHandler(c *gin.Context, resp *http.Response, requestMode int, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
@@ -437,15 +509,15 @@ func claudeHandler(requestMode int, c *gin.Context, resp *http.Response, promptT
}, nil
}
fullTextResponse := ResponseClaude2OpenAI(requestMode, &claudeResponse)
completionTokens, err := service.CountTokenText(claudeResponse.Completion, model)
completionTokens, err := service.CountTokenText(claudeResponse.Completion, info.OriginModelName)
if err != nil {
return service.OpenAIErrorWrapper(err, "count_token_text_failed", http.StatusInternalServerError), nil
}
usage := dto.Usage{}
if requestMode == RequestModeCompletion {
usage.PromptTokens = promptTokens
usage.PromptTokens = info.PromptTokens
usage.CompletionTokens = completionTokens
usage.TotalTokens = promptTokens + completionTokens
usage.TotalTokens = info.PromptTokens + completionTokens
} else {
usage.PromptTokens = claudeResponse.Usage.InputTokens
usage.CompletionTokens = claudeResponse.Usage.OutputTokens

View File

@@ -1,7 +1,10 @@
package cohere
var ModelList = []string{
"command-r", "command-r-plus", "command-light", "command-light-nightly", "command", "command-nightly",
"command-r", "command-r-plus",
"command-r-08-2024", "command-r-plus-08-2024",
"c4ai-aya-23-35b", "c4ai-aya-23-8b",
"command-light", "command-light-nightly", "command", "command-nightly",
"rerank-english-v3.0", "rerank-multilingual-v3.0", "rerank-english-v2.0", "rerank-multilingual-v2.0",
}

View File

@@ -8,6 +8,7 @@ type CohereRequest struct {
Message string `json:"message"`
Stream bool `json:"stream"`
MaxTokens int `json:"max_tokens"`
SafetyMode string `json:"safety_mode,omitempty"`
}
type ChatHistory struct {

View File

@@ -23,6 +23,9 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest {
Stream: textRequest.Stream,
MaxTokens: textRequest.GetMaxTokens(),
}
if common.CohereSafetySetting != "NONE" {
cohereReq.SafetyMode = common.CohereSafetySetting
}
if cohereReq.MaxTokens == 0 {
cohereReq.MaxTokens = 4000
}
@@ -44,6 +47,7 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest {
})
}
}
return &cohereReq
}

View File

@@ -70,9 +70,9 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
err, usage = geminiChatStreamHandler(c, resp, info)
err, usage = GeminiChatStreamHandler(c, resp, info)
} else {
err, usage = geminiChatHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
err, usage = GeminiChatHandler(c, resp)
}
return
}

View File

@@ -6,7 +6,7 @@ const (
var ModelList = []string{
"gemini-1.0-pro-latest", "gemini-1.0-pro-001", "gemini-1.5-pro-latest", "gemini-1.5-flash-latest", "gemini-ultra",
"gemini-1.0-pro-vision-latest", "gemini-1.0-pro-vision-001",
"gemini-1.0-pro-vision-latest", "gemini-1.0-pro-vision-001", "gemini-1.5-pro-exp-0827", "gemini-1.5-flash-exp-0827",
}
var ChannelName = "google gemini"

View File

@@ -220,7 +220,7 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) *dto.Ch
return &response
}
func geminiChatStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
func GeminiChatStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
responseText := ""
id := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
createAt := common.GetTimestamp()
@@ -279,7 +279,7 @@ func geminiChatStreamHandler(c *gin.Context, resp *http.Response, info *relaycom
return nil, usage
}
func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
func GeminiChatHandler(c *gin.Context, resp *http.Response) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil

View File

@@ -32,7 +32,7 @@ func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
if info.RelayMode == constant.RelayModeRerank {
return fmt.Sprintf("%s/v1/rerank", info.BaseUrl), nil
} else if info.RelayMode == constant.RelayModeEmbeddings {
return fmt.Sprintf("%s/v1/embeddings ", info.BaseUrl), nil
return fmt.Sprintf("%s/v1/embeddings", info.BaseUrl), nil
}
return "", errors.New("invalid relay mode")
}
@@ -58,6 +58,8 @@ func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dt
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.RelayMode == constant.RelayModeRerank {
err, usage = jinaRerankHandler(c, resp)
} else if info.RelayMode == constant.RelayModeEmbeddings {
err, usage = jinaEmbeddingHandler(c, resp)
}
return
}

View File

@@ -33,3 +33,28 @@ func jinaRerankHandler(c *gin.Context, resp *http.Response) (*dto.OpenAIErrorWit
_, err = c.Writer.Write(jsonResponse)
return nil, &jinaResp.Usage
}
func jinaEmbeddingHandler(c *gin.Context, resp *http.Response) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
var jinaResp dto.OpenAIEmbeddingResponse
err = json.Unmarshal(responseBody, &jinaResp)
if err != nil {
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
jsonResponse, err := json.Marshal(jinaResp)
if err != nil {
return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, &jinaResp.Usage
}

View File

@@ -17,11 +17,25 @@ type OllamaRequest struct {
PresencePenalty float64 `json:"presence_penalty,omitempty"`
}
type Options struct {
Seed int `json:"seed,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopK int `json:"top_k,omitempty"`
TopP float64 `json:"top_p,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
PresencePenalty float64 `json:"presence_penalty,omitempty"`
NumPredict int `json:"num_predict,omitempty"`
NumCtx int `json:"num_ctx,omitempty"`
}
type OllamaEmbeddingRequest struct {
Model string `json:"model,omitempty"`
Prompt any `json:"prompt,omitempty"`
Model string `json:"model,omitempty"`
Input []string `json:"input"`
Options *Options `json:"options,omitempty"`
}
type OllamaEmbeddingResponse struct {
Error string `json:"error,omitempty"`
Model string `json:"model"`
Embedding []float64 `json:"embedding,omitempty"`
}

View File

@@ -9,7 +9,6 @@ import (
"net/http"
"one-api/dto"
"one-api/service"
"strings"
)
func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest {
@@ -45,8 +44,15 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest {
func requestOpenAI2Embeddings(request dto.GeneralOpenAIRequest) *OllamaEmbeddingRequest {
return &OllamaEmbeddingRequest{
Model: request.Model,
Prompt: strings.Join(request.ParseInput(), " "),
Model: request.Model,
Input: request.ParseInput(),
Options: &Options{
Seed: int(request.Seed),
Temperature: request.Temperature,
TopP: request.TopP,
FrequencyPenalty: request.FrequencyPenalty,
PresencePenalty: request.PresencePenalty,
},
}
}
@@ -64,6 +70,9 @@ func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens in
if err != nil {
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if ollamaEmbeddingResponse.Error != "" {
return service.OpenAIErrorWrapper(err, "ollama_error", resp.StatusCode), nil
}
data := make([]dto.OpenAIEmbeddingResponseItem, 0, 1)
data = append(data, dto.OpenAIEmbeddingResponseItem{
Embedding: ollamaEmbeddingResponse.Embedding,

View File

@@ -78,6 +78,12 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, info *relaycommon.RelayInfo, re
if info.ChannelType != common.ChannelTypeOpenAI {
request.StreamOptions = nil
}
if strings.HasPrefix(request.Model, "o1-") {
if request.MaxCompletionTokens == 0 && request.MaxTokens != 0 {
request.MaxCompletionTokens = request.MaxTokens
request.MaxTokens = 0
}
}
return request, nil
}

View File

@@ -11,6 +11,8 @@ var ModelList = []string{
"chatgpt-4o-latest",
"gpt-4o", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06",
"gpt-4o-mini", "gpt-4o-mini-2024-07-18",
"o1-preview", "o1-preview-2024-09-12",
"o1-mini", "o1-mini-2024-09-12",
"text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large",
"text-curie-001", "text-babbage-001", "text-ada-001",
"text-moderation-latest", "text-moderation-stable",

View File

@@ -0,0 +1,83 @@
package siliconflow
import (
"errors"
"fmt"
"github.com/gin-gonic/gin"
"io"
"net/http"
"one-api/dto"
"one-api/relay/channel"
"one-api/relay/channel/openai"
relaycommon "one-api/relay/common"
"one-api/relay/constant"
)
type Adaptor struct {
}
func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {
//TODO implement me
return nil, errors.New("not implemented")
}
func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
//TODO implement me
return nil, errors.New("not implemented")
}
func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
}
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
if info.RelayMode == constant.RelayModeRerank {
return fmt.Sprintf("%s/v1/rerank", info.BaseUrl), nil
} else if info.RelayMode == constant.RelayModeEmbeddings {
return fmt.Sprintf("%s/v1/embeddings", info.BaseUrl), nil
} else if info.RelayMode == constant.RelayModeChatCompletions {
return fmt.Sprintf("%s/v1/chat/completions", info.BaseUrl), nil
}
return "", errors.New("invalid relay mode")
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
channel.SetupApiRequestHeader(info, c, req)
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", info.ApiKey))
return nil
}
func (a *Adaptor) ConvertRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) {
return request, nil
}
func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error) {
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dto.RerankRequest) (any, error) {
return request, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
switch info.RelayMode {
case constant.RelayModeRerank:
err, usage = siliconflowRerankHandler(c, resp)
case constant.RelayModeChatCompletions:
if info.IsStream {
err, usage = openai.OaiStreamHandler(c, resp, info)
} else {
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
}
case constant.RelayModeEmbeddings:
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
}
return
}
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return ChannelName
}

View File

@@ -0,0 +1,51 @@
package siliconflow
var ModelList = []string{
"THUDM/glm-4-9b-chat",
//"stabilityai/stable-diffusion-xl-base-1.0",
//"TencentARC/PhotoMaker",
"InstantX/InstantID",
//"stabilityai/stable-diffusion-2-1",
//"stabilityai/sd-turbo",
//"stabilityai/sdxl-turbo",
"ByteDance/SDXL-Lightning",
"deepseek-ai/deepseek-llm-67b-chat",
"Qwen/Qwen1.5-14B-Chat",
"Qwen/Qwen1.5-7B-Chat",
"Qwen/Qwen1.5-110B-Chat",
"Qwen/Qwen1.5-32B-Chat",
"01-ai/Yi-1.5-6B-Chat",
"01-ai/Yi-1.5-9B-Chat-16K",
"01-ai/Yi-1.5-34B-Chat-16K",
"THUDM/chatglm3-6b",
"deepseek-ai/DeepSeek-V2-Chat",
"Qwen/Qwen2-72B-Instruct",
"Qwen/Qwen2-7B-Instruct",
"Qwen/Qwen2-57B-A14B-Instruct",
//"stabilityai/stable-diffusion-3-medium",
"deepseek-ai/DeepSeek-Coder-V2-Instruct",
"Qwen/Qwen2-1.5B-Instruct",
"internlm/internlm2_5-7b-chat",
"BAAI/bge-large-en-v1.5",
"BAAI/bge-large-zh-v1.5",
"Pro/Qwen/Qwen2-7B-Instruct",
"Pro/Qwen/Qwen2-1.5B-Instruct",
"Pro/Qwen/Qwen1.5-7B-Chat",
"Pro/THUDM/glm-4-9b-chat",
"Pro/THUDM/chatglm3-6b",
"Pro/01-ai/Yi-1.5-9B-Chat-16K",
"Pro/01-ai/Yi-1.5-6B-Chat",
"Pro/google/gemma-2-9b-it",
"Pro/internlm/internlm2_5-7b-chat",
"Pro/meta-llama/Meta-Llama-3-8B-Instruct",
"Pro/mistralai/Mistral-7B-Instruct-v0.2",
"black-forest-labs/FLUX.1-schnell",
"iic/SenseVoiceSmall",
"netease-youdao/bce-embedding-base_v1",
"BAAI/bge-m3",
"internlm/internlm2_5-20b-chat",
"Qwen/Qwen2-Math-72B-Instruct",
"netease-youdao/bce-reranker-base_v1",
"BAAI/bge-reranker-v2-m3",
}
var ChannelName = "siliconflow"

View File

@@ -0,0 +1,17 @@
package siliconflow
import "one-api/dto"
type SFTokens struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
}
type SFMeta struct {
Tokens SFTokens `json:"tokens"`
}
type SFRerankResponse struct {
Results []dto.RerankResponseDocument `json:"results"`
Meta SFMeta `json:"meta"`
}

View File

@@ -0,0 +1,44 @@
package siliconflow
import (
"encoding/json"
"github.com/gin-gonic/gin"
"io"
"net/http"
"one-api/dto"
"one-api/service"
)
func siliconflowRerankHandler(c *gin.Context, resp *http.Response) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
var siliconflowResp SFRerankResponse
err = json.Unmarshal(responseBody, &siliconflowResp)
if err != nil {
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
usage := &dto.Usage{
PromptTokens: siliconflowResp.Meta.Tokens.InputTokens,
CompletionTokens: siliconflowResp.Meta.Tokens.OutputTokens,
TotalTokens: siliconflowResp.Meta.Tokens.InputTokens + siliconflowResp.Meta.Tokens.OutputTokens,
}
rerankResp := &dto.RerankResponse{
Results: siliconflowResp.Results,
Usage: *usage,
}
jsonResponse, err := json.Marshal(rerankResp)
if err != nil {
return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, usage
}

View File

@@ -0,0 +1,184 @@
package vertex
import (
"encoding/json"
"errors"
"fmt"
"github.com/gin-gonic/gin"
"github.com/jinzhu/copier"
"io"
"net/http"
"one-api/dto"
"one-api/relay/channel"
"one-api/relay/channel/claude"
"one-api/relay/channel/gemini"
"one-api/relay/channel/openai"
relaycommon "one-api/relay/common"
"strings"
)
const (
RequestModeClaude = 1
RequestModeGemini = 2
RequestModeLlama = 3
)
var claudeModelMap = map[string]string{
"claude-3-sonnet-20240229": "claude-3-sonnet@20240229",
"claude-3-opus-20240229": "claude-3-opus@20240229",
"claude-3-haiku-20240307": "claude-3-haiku@20240307",
"claude-3-5-sonnet-20240620": "claude-3-5-sonnet@20240620",
}
const anthropicVersion = "vertex-2023-10-16"
type Adaptor struct {
RequestMode int
AccountCredentials Credentials
}
func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {
//TODO implement me
return nil, errors.New("not implemented")
}
func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
//TODO implement me
return nil, errors.New("not implemented")
}
func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
if strings.HasPrefix(info.UpstreamModelName, "claude") {
a.RequestMode = RequestModeClaude
} else if strings.HasPrefix(info.UpstreamModelName, "gemini") {
a.RequestMode = RequestModeGemini
} else if strings.Contains(info.UpstreamModelName, "llama") {
a.RequestMode = RequestModeLlama
}
}
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
adc := &Credentials{}
if err := json.Unmarshal([]byte(info.ApiKey), adc); err != nil {
return "", fmt.Errorf("failed to decode credentials file: %w", err)
}
region := GetModelRegion(info.ApiVersion, info.OriginModelName)
a.AccountCredentials = *adc
suffix := ""
if a.RequestMode == RequestModeGemini {
if info.IsStream {
suffix = "streamGenerateContent?alt=sse"
} else {
suffix = "generateContent"
}
return fmt.Sprintf(
"https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/google/models/%s:%s",
region,
adc.ProjectID,
region,
info.UpstreamModelName,
suffix,
), nil
} else if a.RequestMode == RequestModeClaude {
if info.IsStream {
suffix = "streamRawPredict?alt=sse"
} else {
suffix = "rawPredict"
}
if v, ok := claudeModelMap[info.UpstreamModelName]; ok {
info.UpstreamModelName = v
}
return fmt.Sprintf(
"https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/anthropic/models/%s:%s",
region,
adc.ProjectID,
region,
info.UpstreamModelName,
suffix,
), nil
} else if a.RequestMode == RequestModeLlama {
return fmt.Sprintf(
"https://%s-aiplatform.googleapis.com/v1beta1/projects/%s/locations/%s/endpoints/openapi/chat/completions",
region,
adc.ProjectID,
region,
), nil
}
return "", errors.New("unsupported request mode")
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
channel.SetupApiRequestHeader(info, c, req)
accessToken, err := getAccessToken(a, info)
if err != nil {
return err
}
req.Header.Set("Authorization", "Bearer "+accessToken)
return nil
}
func (a *Adaptor) ConvertRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
if a.RequestMode == RequestModeClaude {
claudeReq, err := claude.RequestOpenAI2ClaudeMessage(*request)
if err != nil {
return nil, err
}
vertexClaudeReq := &VertexAIClaudeRequest{
AnthropicVersion: anthropicVersion,
}
if err = copier.Copy(vertexClaudeReq, claudeReq); err != nil {
return nil, errors.New("failed to copy claude request")
}
c.Set("request_model", request.Model)
return vertexClaudeReq, nil
} else if a.RequestMode == RequestModeGemini {
geminiRequest := gemini.CovertGemini2OpenAI(*request)
c.Set("request_model", request.Model)
return geminiRequest, nil
} else if a.RequestMode == RequestModeLlama {
return request, nil
}
return nil, errors.New("unsupported request mode")
}
func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dto.RerankRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error) {
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
switch a.RequestMode {
case RequestModeClaude:
err, usage = claude.ClaudeStreamHandler(c, resp, info, claude.RequestModeMessage)
case RequestModeGemini:
err, usage = gemini.GeminiChatStreamHandler(c, resp, info)
case RequestModeLlama:
err, usage = openai.OaiStreamHandler(c, resp, info)
}
} else {
switch a.RequestMode {
case RequestModeClaude:
err, usage = claude.ClaudeHandler(c, resp, claude.RequestModeMessage, info)
case RequestModeGemini:
err, usage = gemini.GeminiChatHandler(c, resp)
case RequestModeLlama:
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.OriginModelName)
}
}
return
}
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return ChannelName
}

View File

@@ -0,0 +1,15 @@
package vertex
var ModelList = []string{
"claude-3-sonnet-20240229",
"claude-3-opus-20240229",
"claude-3-haiku-20240307",
"claude-3-5-sonnet-20240620",
//"gemini-1.5-pro-latest", "gemini-1.5-flash-latest",
"gemini-1.5-pro-001", "gemini-1.5-flash-001", "gemini-pro", "gemini-pro-vision",
"meta/llama3-405b-instruct-maas",
}
var ChannelName = "vertex-ai"

View File

@@ -0,0 +1,17 @@
package vertex
import "one-api/relay/channel/claude"
type VertexAIClaudeRequest struct {
AnthropicVersion string `json:"anthropic_version"`
Messages []claude.ClaudeMessage `json:"messages"`
System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
StopSequences []string `json:"stop_sequences,omitempty"`
Stream bool `json:"stream,omitempty"`
Temperature float64 `json:"temperature,omitempty"`
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
Tools []claude.Tool `json:"tools,omitempty"`
ToolChoice any `json:"tool_choice,omitempty"`
}

View File

@@ -0,0 +1,16 @@
package vertex
import "one-api/common"
func GetModelRegion(other string, localModelName string) string {
// if other is json string
if common.IsJsonStr(other) {
m := common.StrToMap(other)
if m[localModelName] != nil {
return m[localModelName].(string)
} else {
return m["default"].(string)
}
}
return other
}

View File

@@ -0,0 +1,122 @@
package vertex
import (
"crypto/rsa"
"crypto/x509"
"encoding/json"
"encoding/pem"
"errors"
"github.com/bytedance/gopkg/cache/asynccache"
"github.com/golang-jwt/jwt"
"net/http"
"net/url"
relaycommon "one-api/relay/common"
"strings"
"fmt"
"time"
)
type Credentials struct {
ProjectID string `json:"project_id"`
PrivateKeyID string `json:"private_key_id"`
PrivateKey string `json:"private_key"`
ClientEmail string `json:"client_email"`
ClientID string `json:"client_id"`
}
var Cache = asynccache.NewAsyncCache(asynccache.Options{
RefreshDuration: time.Minute * 35,
EnableExpire: true,
ExpireDuration: time.Minute * 30,
Fetcher: func(key string) (interface{}, error) {
return nil, errors.New("not found")
},
})
func getAccessToken(a *Adaptor, info *relaycommon.RelayInfo) (string, error) {
cacheKey := fmt.Sprintf("access-token-%d", info.ChannelId)
val, err := Cache.Get(cacheKey)
if err == nil {
return val.(string), nil
}
signedJWT, err := createSignedJWT(a.AccountCredentials.ClientEmail, a.AccountCredentials.PrivateKey)
if err != nil {
return "", fmt.Errorf("failed to create signed JWT: %w", err)
}
newToken, err := exchangeJwtForAccessToken(signedJWT)
if err != nil {
return "", fmt.Errorf("failed to exchange JWT for access token: %w", err)
}
if err := Cache.SetDefault(cacheKey, newToken); err {
return newToken, nil
}
return newToken, nil
}
func createSignedJWT(email, privateKeyPEM string) (string, error) {
privateKeyPEM = strings.ReplaceAll(privateKeyPEM, "-----BEGIN PRIVATE KEY-----", "")
privateKeyPEM = strings.ReplaceAll(privateKeyPEM, "-----END PRIVATE KEY-----", "")
privateKeyPEM = strings.ReplaceAll(privateKeyPEM, "\r", "")
privateKeyPEM = strings.ReplaceAll(privateKeyPEM, "\n", "")
privateKeyPEM = strings.ReplaceAll(privateKeyPEM, "\\n", "")
block, _ := pem.Decode([]byte("-----BEGIN PRIVATE KEY-----\n" + privateKeyPEM + "\n-----END PRIVATE KEY-----"))
if block == nil {
return "", fmt.Errorf("failed to parse PEM block containing the private key")
}
privateKey, err := x509.ParsePKCS8PrivateKey(block.Bytes)
if err != nil {
return "", err
}
rsaPrivateKey, ok := privateKey.(*rsa.PrivateKey)
if !ok {
return "", fmt.Errorf("not an RSA private key")
}
now := time.Now()
claims := jwt.MapClaims{
"iss": email,
"scope": "https://www.googleapis.com/auth/cloud-platform",
"aud": "https://www.googleapis.com/oauth2/v4/token",
"exp": now.Add(time.Minute * 35).Unix(),
"iat": now.Unix(),
}
token := jwt.NewWithClaims(jwt.SigningMethodRS256, claims)
signedToken, err := token.SignedString(rsaPrivateKey)
if err != nil {
return "", err
}
return signedToken, nil
}
func exchangeJwtForAccessToken(signedJWT string) (string, error) {
authURL := "https://www.googleapis.com/oauth2/v4/token"
data := url.Values{}
data.Set("grant_type", "urn:ietf:params:oauth:grant-type:jwt-bearer")
data.Set("assertion", signedJWT)
resp, err := http.PostForm(authURL, data)
if err != nil {
return "", err
}
defer resp.Body.Close()
var result map[string]interface{}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return "", err
}
if accessToken, ok := result["access_token"].(string); ok {
return accessToken, nil
}
return "", fmt.Errorf("failed to get access token: %v", result)
}

View File

@@ -1,7 +1,7 @@
package zhipu_4v
var ModelList = []string{
"glm-4", "glm-4v", "glm-3-turbo", "glm-4-alltools",
"glm-4", "glm-4v", "glm-3-turbo", "glm-4-alltools", "glm-4-plus", "glm-4-0520", "glm-4-air", "glm-4-airx", "glm-4-long", "glm-4-flash", "glm-4v-plus",
}
var ChannelName = "zhipu_4v"

View File

@@ -22,6 +22,7 @@ type RelayInfo struct {
IsStream bool
RelayMode int
UpstreamModelName string
OriginModelName string
RequestURLPath string
ApiVersion string
PromptTokens int
@@ -57,6 +58,8 @@ func GenRelayInfo(c *gin.Context) *RelayInfo {
TokenUnlimited: tokenUnlimited,
StartTime: startTime,
FirstResponseTime: startTime.Add(-time.Second),
OriginModelName: c.GetString("original_model"),
UpstreamModelName: c.GetString("original_model"),
ApiType: apiType,
ApiVersion: c.GetString("api_version"),
ApiKey: strings.TrimPrefix(c.Request.Header.Get("Authorization"), "Bearer "),
@@ -68,6 +71,9 @@ func GenRelayInfo(c *gin.Context) *RelayInfo {
if info.ChannelType == common.ChannelTypeAzure {
info.ApiVersion = GetAPIVersion(c)
}
if info.ChannelType == common.ChannelTypeVertexAi {
info.ApiVersion = c.GetString("region")
}
if info.ChannelType == common.ChannelTypeOpenAI || info.ChannelType == common.ChannelTypeAnthropic ||
info.ChannelType == common.ChannelTypeAws || info.ChannelType == common.ChannelTypeGemini ||
info.ChannelType == common.ChannelCloudflare {

View File

@@ -23,6 +23,8 @@ const (
APITypeDify
APITypeJina
APITypeCloudflare
APITypeSiliconFlow
APITypeVertexAi
APITypeDummy // this one is only for count, do not add any channel after this
)
@@ -66,6 +68,10 @@ func ChannelType2APIType(channelType int) (int, bool) {
apiType = APITypeJina
case common.ChannelCloudflare:
apiType = APITypeCloudflare
case common.ChannelTypeSiliconFlow:
apiType = APITypeSiliconFlow
case common.ChannelTypeVertexAi:
apiType = APITypeVertexAi
}
if apiType == -1 {
return APITypeOpenAI, false

View File

@@ -38,9 +38,7 @@ func getAndValidImageRequest(c *gin.Context, info *relaycommon.RelayInfo) (*dto.
if imageRequest.Model == "" {
imageRequest.Model = "dall-e-2"
}
if imageRequest.Quality == "" {
imageRequest.Quality = "standard"
}
// Not "256x256", "512x512", or "1024x1024"
if imageRequest.Model == "dall-e-2" || imageRequest.Model == "dall-e" {
if imageRequest.Size != "" && imageRequest.Size != "256x256" && imageRequest.Size != "512x512" && imageRequest.Size != "1024x1024" {
@@ -50,6 +48,9 @@ func getAndValidImageRequest(c *gin.Context, info *relaycommon.RelayInfo) (*dto.
if imageRequest.Size != "" && imageRequest.Size != "1024x1024" && imageRequest.Size != "1024x1792" && imageRequest.Size != "1792x1024" {
return nil, errors.New("size must be one of 256x256, 512x512, or 1024x1024, dall-e-3 1024x1792 or 1792x1024")
}
if imageRequest.Quality == "" {
imageRequest.Quality = "standard"
}
//if imageRequest.N != 1 {
// return nil, errors.New("n must be 1")
//}

View File

@@ -52,7 +52,7 @@ func getAndValidateTextRequest(c *gin.Context, relayInfo *relaycommon.RelayInfo)
}
case relayconstant.RelayModeEmbeddings:
case relayconstant.RelayModeModerations:
if textRequest.Input == "" {
if textRequest.Input == "" || textRequest.Input == nil {
return nil, errors.New("field input is required")
}
case relayconstant.RelayModeEdits:
@@ -317,7 +317,7 @@ func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, modelN
totalTokens := promptTokens + completionTokens
var logContent string
if !usePrice {
logContent = fmt.Sprintf("模型倍率 %.2f分组倍率 %.2f补全倍率 %.2f", modelRatio, groupRatio, completionRatio)
logContent = fmt.Sprintf("模型倍率 %.2f补全倍率 %.2f分组倍率 %.2f", modelRatio, completionRatio, groupRatio)
} else {
logContent = fmt.Sprintf("模型价格 %.2f,分组倍率 %.2f", modelPrice, groupRatio)
}
@@ -354,6 +354,10 @@ func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, modelN
logModel = "gpt-4-gizmo-*"
logContent += fmt.Sprintf(",模型 %s", modelName)
}
if strings.HasPrefix(logModel, "gpt-4o-gizmo") {
logModel = "gpt-4o-gizmo-*"
logContent += fmt.Sprintf(",模型 %s", modelName)
}
if extraContent != "" {
logContent += ", " + extraContent
}

View File

@@ -16,8 +16,10 @@ import (
"one-api/relay/channel/openai"
"one-api/relay/channel/palm"
"one-api/relay/channel/perplexity"
"one-api/relay/channel/siliconflow"
"one-api/relay/channel/task/suno"
"one-api/relay/channel/tencent"
"one-api/relay/channel/vertex"
"one-api/relay/channel/xunfei"
"one-api/relay/channel/zhipu"
"one-api/relay/channel/zhipu_4v"
@@ -62,6 +64,10 @@ func GetAdaptor(apiType int) channel.Adaptor {
return &jina.Adaptor{}
case constant.APITypeCloudflare:
return &cloudflare.Adaptor{}
case constant.APITypeSiliconFlow:
return &siliconflow.Adaptor{}
case constant.APITypeVertexAi:
return &vertex.Adaptor{}
}
return nil
}

View File

@@ -38,6 +38,23 @@ func RerankHelper(c *gin.Context, relayMode int) *dto.OpenAIErrorWithStatusCode
if len(rerankRequest.Documents) == 0 {
return service.OpenAIErrorWrapperLocal(fmt.Errorf("documents is empty"), "invalid_documents", http.StatusBadRequest)
}
// map model name
modelMapping := c.GetString("model_mapping")
//isModelMapped := false
if modelMapping != "" && modelMapping != "{}" {
modelMap := make(map[string]string)
err := json.Unmarshal([]byte(modelMapping), &modelMap)
if err != nil {
return service.OpenAIErrorWrapperLocal(err, "unmarshal_model_mapping_failed", http.StatusInternalServerError)
}
if modelMap[rerankRequest.Model] != "" {
rerankRequest.Model = modelMap[rerankRequest.Model]
// set upstream model name
//isModelMapped = true
}
}
relayInfo.UpstreamModelName = rerankRequest.Model
modelPrice, success := common.GetModelPrice(rerankRequest.Model, false)
groupRatio := common.GetGroupRatio(relayInfo.Group)

View File

@@ -39,6 +39,7 @@ func SetApiRouter(router *gin.Engine) {
//userRoute.POST("/tokenlog", middleware.CriticalRateLimit(), controller.TokenLog)
userRoute.GET("/logout", controller.Logout)
userRoute.GET("/epay/notify", controller.EpayNotify)
userRoute.GET("/groups", controller.GetUserGroups)
selfRoute := userRoute.Group("/")
selfRoute.Use(middleware.UserAuth())

View File

@@ -54,6 +54,8 @@ func ShouldDisableChannel(channelType int, err *relaymodel.OpenAIErrorWithStatus
switch err.Error.Type {
case "insufficient_quota":
return true
case "insufficient_user_quota":
return true
// https://docs.anthropic.com/claude/reference/errors
case "authentication_error":
return true

View File

@@ -52,7 +52,7 @@ func InitTokenEncoders() {
}
func getModelDefaultTokenEncoder(model string) *tiktoken.Tiktoken {
if strings.HasPrefix(model, "gpt-4o") {
if strings.HasPrefix(model, "gpt-4o") || strings.HasPrefix(model, "chatgpt-4o") {
return cl200kTokenEncoder
}
return defaultTokenEncoder

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

After

Width:  |  Height:  |  Size: 21 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.9 KiB

After

Width:  |  Height:  |  Size: 7.6 KiB

View File

@@ -20,11 +20,11 @@ import Redemption from './pages/Redemption';
import TopUp from './pages/TopUp';
import Log from './pages/Log';
import Chat from './pages/Chat';
import Chat2Link from './pages/Chat2Link';
import { Layout } from '@douyinfe/semi-ui';
import Midjourney from './pages/Midjourney';
import Pricing from './pages/Pricing/index.js';
import Task from "./pages/Task/index.js";
// import Detail from './pages/Detail';
const Home = lazy(() => import('./pages/Home'));
const Detail = lazy(() => import('./pages/Detail'));
@@ -58,207 +58,216 @@ function App() {
}, []);
return (
<Layout>
<Layout.Content>
<Routes>
<Route
path='/'
element={
<>
<Routes>
<Route
path='/'
element={
<Suspense fallback={<Loading></Loading>}>
<Home />
</Suspense>
}
/>
<Route
path='/channel'
element={
<PrivateRoute>
<Channel />
</PrivateRoute>
}
/>
<Route
path='/channel/edit/:id'
element={
<Suspense fallback={<Loading></Loading>}>
<EditChannel />
</Suspense>
}
/>
<Route
path='/channel/add'
element={
<Suspense fallback={<Loading></Loading>}>
<EditChannel />
</Suspense>
}
/>
<Route
path='/token'
element={
<PrivateRoute>
<Token />
</PrivateRoute>
}
/>
<Route
path='/redemption'
element={
<PrivateRoute>
<Redemption />
</PrivateRoute>
}
/>
<Route
path='/user'
element={
<PrivateRoute>
<User />
</PrivateRoute>
}
/>
<Route
path='/user/edit/:id'
element={
<Suspense fallback={<Loading></Loading>}>
<EditUser />
</Suspense>
}
/>
<Route
path='/user/edit'
element={
<Suspense fallback={<Loading></Loading>}>
<EditUser />
</Suspense>
}
/>
<Route
path='/user/reset'
element={
<Suspense fallback={<Loading></Loading>}>
<PasswordResetConfirm />
</Suspense>
}
/>
<Route
path='/login'
element={
<Suspense fallback={<Loading></Loading>}>
<LoginForm />
</Suspense>
}
/>
<Route
path='/register'
element={
<Suspense fallback={<Loading></Loading>}>
<RegisterForm />
</Suspense>
}
/>
<Route
path='/reset'
element={
<Suspense fallback={<Loading></Loading>}>
<PasswordResetForm />
</Suspense>
}
/>
<Route
path='/oauth/github'
element={
<Suspense fallback={<Loading></Loading>}>
<GitHubOAuth />
</Suspense>
}
/>
<Route
path='/setting'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<Home />
<Setting />
</Suspense>
}
/>
<Route
path='/channel'
element={
<PrivateRoute>
<Channel />
</PrivateRoute>
}
/>
<Route
path='/channel/edit/:id'
element={
</PrivateRoute>
}
/>
<Route
path='/topup'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<EditChannel />
<TopUp />
</Suspense>
}
/>
<Route
path='/channel/add'
element={
</PrivateRoute>
}
/>
<Route
path='/log'
element={
<PrivateRoute>
<Log />
</PrivateRoute>
}
/>
<Route
path='/detail'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<EditChannel />
<Detail />
</Suspense>
}
/>
<Route
path='/token'
element={
<PrivateRoute>
<Token />
</PrivateRoute>
}
/>
<Route
path='/redemption'
element={
<PrivateRoute>
<Redemption />
</PrivateRoute>
}
/>
<Route
path='/user'
element={
<PrivateRoute>
<User />
</PrivateRoute>
}
/>
<Route
path='/user/edit/:id'
element={
</PrivateRoute>
}
/>
<Route
path='/midjourney'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<EditUser />
<Midjourney />
</Suspense>
}
/>
<Route
path='/user/edit'
element={
</PrivateRoute>
}
/>
<Route
path='/task'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<EditUser />
<Task />
</Suspense>
}
/>
</PrivateRoute>
}
/>
<Route
path='/pricing'
element={
<Suspense fallback={<Loading></Loading>}>
<Pricing />
</Suspense>
}
/>
<Route
path='/about'
element={
<Suspense fallback={<Loading></Loading>}>
<About />
</Suspense>
}
/>
<Route
path='/chat'
element={
<Suspense fallback={<Loading></Loading>}>
<Chat />
</Suspense>
}
/>
{/* 方便使用chat2link直接跳转聊天... */}
<Route
path='/user/reset'
element={
<Suspense fallback={<Loading></Loading>}>
<PasswordResetConfirm />
</Suspense>
}
/>
<Route
path='/login'
element={
<Suspense fallback={<Loading></Loading>}>
<LoginForm />
</Suspense>
}
/>
<Route
path='/register'
element={
<Suspense fallback={<Loading></Loading>}>
<RegisterForm />
</Suspense>
}
/>
<Route
path='/reset'
element={
<Suspense fallback={<Loading></Loading>}>
<PasswordResetForm />
</Suspense>
}
/>
<Route
path='/oauth/github'
element={
<Suspense fallback={<Loading></Loading>}>
<GitHubOAuth />
</Suspense>
}
/>
<Route
path='/setting'
path='/chat2link'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<Setting />
<Chat2Link />
</Suspense>
</PrivateRoute>
}
/>
<Route
path='/topup'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<TopUp />
</Suspense>
</PrivateRoute>
}
/>
<Route
path='/log'
element={
<PrivateRoute>
<Log />
</PrivateRoute>
}
/>
<Route
path='/detail'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<Detail />
</Suspense>
</PrivateRoute>
}
/>
<Route
path='/midjourney'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<Midjourney />
</Suspense>
</PrivateRoute>
}
/>
<Route
path='/task'
element={
<PrivateRoute>
<Suspense fallback={<Loading></Loading>}>
<Task />
</Suspense>
</PrivateRoute>
}
/>
<Route
path='/pricing'
element={
<Suspense fallback={<Loading></Loading>}>
<Pricing />
</Suspense>
}
/>
<Route
path='/about'
element={
<Suspense fallback={<Loading></Loading>}>
<About />
</Suspense>
}
/>
<Route
path='/chat'
element={
<Suspense fallback={<Loading></Loading>}>
<Chat />
</Suspense>
}
/>
<Route path='*' element={<NotFound />} />
</Routes>
</Layout.Content>
</Layout>
</>
);
}

View File

@@ -3,7 +3,7 @@ import React, { useEffect, useState } from 'react';
import { getFooterHTML, getSystemName } from '../helpers';
import { Layout, Tooltip } from '@douyinfe/semi-ui';
const Footer = () => {
const FooterBar = () => {
const systemName = getSystemName();
const [footer, setFooter] = useState(getFooterHTML());
let remainCheckTimes = 5;
@@ -56,21 +56,17 @@ const Footer = () => {
}, []);
return (
<Layout>
<Layout.Content style={{ textAlign: 'center' }}>
{footer ? (
<Tooltip content={defaultFooter}>
<div
className='custom-footer'
dangerouslySetInnerHTML={{ __html: footer }}
></div>
</Tooltip>
) : (
defaultFooter
)}
</Layout.Content>
</Layout>
<div style={{ textAlign: 'center' }}>
{footer ? (
<div
className='custom-footer'
dangerouslySetInnerHTML={{ __html: footer }}
></div>
) : (
defaultFooter
)}
</div>
);
};
export default Footer;
export default FooterBar;

View File

@@ -1,8 +1,9 @@
import React, { useContext, useEffect, useState } from 'react';
import { Dimmer, Loader, Segment } from 'semantic-ui-react';
import { useNavigate, useSearchParams } from 'react-router-dom';
import { API, showError, showSuccess } from '../helpers';
import { API, showError, showSuccess, updateAPI } from '../helpers';
import { UserContext } from '../context/User';
import { setUserData } from '../helpers/data.js';
const GitHubOAuth = () => {
const [searchParams, setSearchParams] = useSearchParams();
@@ -23,8 +24,10 @@ const GitHubOAuth = () => {
} else {
userDispatch({ type: 'login', payload: data });
localStorage.setItem('user', JSON.stringify(data));
setUserData(data);
updateAPI()
showSuccess('登录成功!');
navigate('/');
navigate('/token');
}
} else {
showError(message);

View File

@@ -3,14 +3,23 @@ import { Link, useNavigate } from 'react-router-dom';
import { UserContext } from '../context/User';
import { useSetTheme, useTheme } from '../context/Theme';
import { API, getLogo, getSystemName, showSuccess } from '../helpers';
import { API, getLogo, getSystemName, isMobile, showSuccess } from '../helpers';
import '../index.css';
import fireworks from 'react-fireworks';
import { IconHelpCircle, IconKey, IconUser } from '@douyinfe/semi-icons';
import {
IconHelpCircle,
IconHome,
IconHomeStroked,
IconKey,
IconNoteMoneyStroked,
IconPriceTag,
IconUser
} from '@douyinfe/semi-icons';
import { Avatar, Dropdown, Layout, Nav, Switch } from '@douyinfe/semi-ui';
import { stringToColor } from '../helpers/render';
import Text from '@douyinfe/semi-ui/lib/es/typography/text';
// HeaderBar Buttons
let headerButtons = [
@@ -22,6 +31,21 @@ let headerButtons = [
},
];
let buttons = [
{
text: '首页',
itemKey: 'home',
to: '/',
// icon: <IconHomeStroked />,
},
// {
// text: '模型价格',
// itemKey: 'pricing',
// to: '/pricing',
// icon: <IconNoteMoneyStroked />,
// },
];
if (localStorage.getItem('chat_link')) {
headerButtons.splice(1, 0, {
name: '聊天',
@@ -90,6 +114,7 @@ const HeaderBar = () => {
about: '/about',
login: '/login',
register: '/register',
home: '/',
};
return (
<Link
@@ -103,6 +128,18 @@ const HeaderBar = () => {
selectedKeys={[]}
// items={headerButtons}
onSelect={(key) => {}}
header={isMobile()?{
logo: (
<img src={logo} alt='logo' style={{ marginRight: '0.75em' }} />
),
}:{
logo: (
<img src={logo} alt='logo' />
),
text: systemName,
}}
items={buttons}
footer={
<>
{isNewYear && (
@@ -121,15 +158,19 @@ const HeaderBar = () => {
</Dropdown>
)}
<Nav.Item itemKey={'about'} icon={<IconHelpCircle />} />
<Switch
checkedText='🌞'
size={'large'}
checked={theme === 'dark'}
uncheckedText='🌙'
onChange={(checked) => {
setTheme(checked);
}}
/>
<>
{!isMobile() && (
<Switch
checkedText='🌞'
size={'large'}
checked={theme === 'dark'}
uncheckedText='🌙'
onChange={(checked) => {
setTheme(checked);
}}
/>
)}
</>
{userState.user ? (
<>
<Dropdown
@@ -155,7 +196,7 @@ const HeaderBar = () => {
<Nav.Item
itemKey={'login'}
text={'登录'}
icon={<IconKey />}
// icon={<IconKey />}
/>
<Nav.Item
itemKey={'register'}

View File

@@ -71,6 +71,8 @@ const LoginForm = () => {
if (success) {
userDispatch({ type: 'login', payload: data });
localStorage.setItem('user', JSON.stringify(data));
setUserData(data);
updateAPI()
navigate('/');
showSuccess('登录成功!');
setShowWeChatLoginModal(false);
@@ -143,6 +145,8 @@ const LoginForm = () => {
userDispatch({ type: 'login', payload: data });
localStorage.setItem('user', JSON.stringify(data));
showSuccess('登录成功!');
setUserData(data);
updateAPI()
navigate('/');
} else {
showError(message);

View File

@@ -1,5 +1,5 @@
import React, { useContext, useEffect, useRef, useMemo, useState } from 'react';
import { API, copy, showError, showSuccess } from '../helpers';
import { API, copy, showError, showInfo, showSuccess } from '../helpers';
import {
Banner,
@@ -87,6 +87,7 @@ const ModelPricing = () => {
const [selectedRowKeys, setSelectedRowKeys] = useState([]);
const [modalImageUrl, setModalImageUrl] = useState('');
const [isModalOpenurl, setIsModalOpenurl] = useState(false);
const [selectedGroup, setSelectedGroup] = useState('default');
const rowSelection = useMemo(
() => ({
@@ -120,7 +121,8 @@ const ModelPricing = () => {
title: '可用性',
dataIndex: 'available',
render: (text, record, index) => {
return renderAvailable(text);
// if record.enable_groups contains selectedGroup, then available is true
return renderAvailable(record.enable_groups.includes(selectedGroup));
},
sorter: (a, b) => a.available - b.available,
},
@@ -166,6 +168,43 @@ const ModelPricing = () => {
},
sorter: (a, b) => a.quota_type - b.quota_type,
},
{
title: '可用分组',
dataIndex: 'enable_groups',
render: (text, record, index) => {
// enable_groups is a string array
return (
<Space>
{text.map((group) => {
if (group === selectedGroup) {
return (
<Tag
color='blue'
size='large'
prefixIcon={<IconVerify />}
>
{group}
</Tag>
);
} else {
return (
<Tag
color='blue'
size='large'
onClick={() => {
setSelectedGroup(group);
showInfo('当前查看的分组为:' + group + ',倍率为:' + groupRatio[group]);
}}
>
{group}
</Tag>
);
}
})}
</Space>
);
},
},
{
title: () => (
<span style={{'display':'flex','alignItems':'center'}}>
@@ -201,6 +240,8 @@ const ModelPricing = () => {
<Text>模型{record.quota_type === 0 ? text : '无'}</Text>
<br />
<Text>补全{record.quota_type === 0 ? completionRatio : '无'}</Text>
<br />
<Text>分组{groupRatio[selectedGroup]}</Text>
</>
);
return <div>{content}</div>;
@@ -213,11 +254,11 @@ const ModelPricing = () => {
let content = text;
if (record.quota_type === 0) {
// 这里的 *2 是因为 1倍率=0.002刀,请勿删除
let inputRatioPrice = record.model_ratio * 2 * record.group_ratio;
let inputRatioPrice = record.model_ratio * 2 * groupRatio[selectedGroup];
let completionRatioPrice =
record.model_ratio *
record.completion_ratio * 2 *
record.group_ratio;
groupRatio[selectedGroup];
content = (
<>
<Text>提示 ${inputRatioPrice} / 1M tokens</Text>
@@ -226,7 +267,7 @@ const ModelPricing = () => {
</>
);
} else {
let price = parseFloat(text) * record.group_ratio;
let price = parseFloat(text) * groupRatio[selectedGroup];
content = <>模型价格${price}</>;
}
return <div>{content}</div>;
@@ -237,12 +278,12 @@ const ModelPricing = () => {
const [models, setModels] = useState([]);
const [loading, setLoading] = useState(true);
const [userState, userDispatch] = useContext(UserContext);
const [groupRatio, setGroupRatio] = useState(1);
const [groupRatio, setGroupRatio] = useState({});
const setModelsFormat = (models, groupRatio) => {
for (let i = 0; i < models.length; i++) {
models[i].key = models[i].model_name;
models[i].group_ratio = groupRatio;
models[i].group_ratio = groupRatio[models[i].model_name];
}
// sort by quota_type
models.sort((a, b) => {
@@ -275,6 +316,7 @@ const ModelPricing = () => {
const { success, message, data, group_ratio } = res.data;
if (success) {
setGroupRatio(group_ratio);
setSelectedGroup(userState.user ? userState.user.group : 'default')
setModelsFormat(data, group_ratio);
} else {
showError(message);
@@ -307,14 +349,14 @@ const ModelPricing = () => {
type="success"
fullMode={false}
closeIcon="null"
description={`您的分组为:${userState.user.group},分组倍率为:${groupRatio}`}
description={`您的默认分组为:${userState.user.group},分组倍率为:${groupRatio[userState.user.group]}`}
/>
) : (
<Banner
type='warning'
fullMode={false}
closeIcon="null"
description={`您还未登陆,显示的价格为默认分组倍率: ${groupRatio}`}
description={`您还未登陆,显示的价格为默认分组倍率: ${groupRatio['default']}`}
/>
)}
<br/>

View File

@@ -23,6 +23,7 @@ const OperationSetting = () => {
CompletionRatio: '',
ModelPrice: '',
GroupRatio: '',
UserUsableGroups: '',
TopUpLink: '',
ChatLink: '',
ChatLink2: '', // 添加的新状态变量
@@ -62,6 +63,7 @@ const OperationSetting = () => {
if (
item.key === 'ModelRatio' ||
item.key === 'GroupRatio' ||
item.key === 'UserUsableGroups' ||
item.key === 'CompletionRatio' ||
item.key === 'ModelPrice'
) {

View File

@@ -17,7 +17,7 @@ import {
IconCalendarClock, IconChecklistStroked,
IconComment,
IconCreditCard,
IconGift,
IconGift, IconHelpCircle,
IconHistogram,
IconHome,
IconImage,
@@ -25,10 +25,12 @@ import {
IconLayers,
IconPriceTag,
IconSetting,
IconUser,
IconUser
} from '@douyinfe/semi-icons';
import { Layout, Nav } from '@douyinfe/semi-ui';
import { Avatar, Dropdown, Layout, Nav, Switch } from '@douyinfe/semi-ui';
import { setStatusData } from '../helpers/data.js';
import { stringToColor } from '../helpers/render.js';
import { useSetTheme, useTheme } from '../context/Theme/index.js';
// HeaderBar Buttons
@@ -43,6 +45,8 @@ const SiderBar = () => {
const systemName = getSystemName();
const logo = getLogo();
const [isCollapsed, setIsCollapsed] = useState(defaultIsCollapsed);
const theme = useTheme();
const setTheme = useSetTheme();
const routerMap = {
home: '/',
@@ -63,11 +67,17 @@ const SiderBar = () => {
const headerButtons = useMemo(
() => [
// {
// text: '首页',
// itemKey: 'home',
// to: '/',
// icon: <IconHome />,
// },
{
text: '首页',
itemKey: 'home',
to: '/',
icon: <IconHome />,
text: '模型价格',
itemKey: 'pricing',
to: '/pricing',
icon: <IconPriceTag />,
},
{
text: '渠道',
@@ -104,12 +114,6 @@ const SiderBar = () => {
to: '/topup',
icon: <IconCreditCard />,
},
{
text: '模型价格',
itemKey: 'pricing',
to: '/pricing',
icon: <IconPriceTag />,
},
{
text: '用户管理',
itemKey: 'user',
@@ -205,48 +209,58 @@ const SiderBar = () => {
return (
<>
<Layout>
<div style={{ height: '100%' }}>
<Nav
// bodyStyle={{ maxWidth: 200 }}
style={{ maxWidth: 200 }}
defaultIsCollapsed={
isMobile() ||
localStorage.getItem('default_collapse_sidebar') === 'true'
}
isCollapsed={isCollapsed}
onCollapseChange={(collapsed) => {
setIsCollapsed(collapsed);
}}
selectedKeys={selectedKeys}
renderWrapper={({ itemElement, isSubNav, isInSubNav, props }) => {
return (
<Link
style={{ textDecoration: 'none' }}
to={routerMap[props.itemKey]}
>
{itemElement}
</Link>
);
}}
items={headerButtons}
onSelect={(key) => {
setSelectedKeys([key.itemKey]);
}}
header={{
logo: (
<img src={logo} alt='logo' style={{ marginRight: '0.75em' }} />
),
text: systemName,
}}
// footer={{
// text: '© 2021 NekoAPI',
// }}
>
<Nav.Footer collapseButton={true}></Nav.Footer>
</Nav>
</div>
</Layout>
<Nav
style={{ maxWidth: 220, height: '100%' }}
defaultIsCollapsed={
isMobile() ||
localStorage.getItem('default_collapse_sidebar') === 'true'
}
isCollapsed={isCollapsed}
onCollapseChange={(collapsed) => {
setIsCollapsed(collapsed);
}}
selectedKeys={selectedKeys}
renderWrapper={({ itemElement, isSubNav, isInSubNav, props }) => {
return (
<Link
style={{ textDecoration: 'none' }}
to={routerMap[props.itemKey]}
>
{itemElement}
</Link>
);
}}
items={headerButtons}
onSelect={(key) => {
setSelectedKeys([key.itemKey]);
}}
// header={{
// logo: (
// <img src={logo} alt='logo' style={{ marginRight: '0.75em' }} />
// ),
// text: systemName,
// }}
// footer={{
// text: '© 2021 NekoAPI',
// }}
footer={
<>
{isMobile() && (
<Switch
checkedText='🌞'
size={'small'}
checked={theme === 'dark'}
uncheckedText='🌙'
onChange={(checked) => {
setTheme(checked);
}}
/>
)}
</>
}
>
<Nav.Footer collapseButton={true}></Nav.Footer>
</Nav>
</>
);
};

View File

@@ -8,14 +8,14 @@ import {
} from '../helpers';
import { ITEMS_PER_PAGE } from '../constants';
import { renderQuota } from '../helpers/render';
import {renderGroup, renderQuota} from '../helpers/render';
import {
Button,
Dropdown,
Form,
Modal,
Popconfirm,
Popover,
Popover, Space,
SplitButtonGroup,
Table,
Tag,
@@ -119,7 +119,12 @@ const TokensTable = () => {
dataIndex: 'status',
key: 'status',
render: (text, record, index) => {
return <div>{renderStatus(text, record.model_limits_enabled)}</div>;
return <div>
<Space>
{renderStatus(text, record.model_limits_enabled)}
{renderGroup(record.group)}
</Space>
</div>;
},
},
{
@@ -225,14 +230,14 @@ const TokensTable = () => {
onOpenLink('next-mj', record.key);
},
},
{
node: 'item',
key: 'lobe',
name: 'Lobe Chat',
onClick: () => {
onOpenLink('lobe', record.key);
},
},
// {
// node: 'item',
// key: 'lobe',
// name: 'Lobe Chat',
// onClick: () => {
// onOpenLink('lobe', record.key);
// },
// },
{
node: 'item',
key: 'ama',
@@ -425,7 +430,7 @@ const TokensTable = () => {
url = `opencat://team/join?domain=${encodedServerAddress}&token=sk-${key}`;
break;
case 'lobe':
url = `https://chat-preview.lobehub.com/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${encodedServerAddress}"}}}`;
url = `https://chat-preview.lobehub.com/?settings={"keyVaults":{"openai":{"apiKey":"sk-${key}","baseURL":"${encodedServerAddress}/v1"}}}`;
break;
case 'next-mj':
url =

View File

@@ -151,7 +151,7 @@ const UsersTable = () => {
title='确定?'
okType={'warning'}
onConfirm={() => {
manageUser(record.username, 'promote', record);
manageUser(record.id, 'promote', record);
}}
>
<Button theme='light' type='warning' style={{ marginRight: 1 }}>
@@ -162,7 +162,7 @@ const UsersTable = () => {
title='确定?'
okType={'warning'}
onConfirm={() => {
manageUser(record.username, 'demote', record);
manageUser(record.id, 'demote', record);
}}
>
<Button
@@ -179,7 +179,7 @@ const UsersTable = () => {
type='warning'
style={{ marginRight: 1 }}
onClick={async () => {
manageUser(record.username, 'disable', record);
manageUser(record.id, 'disable', record);
}}
>
禁用
@@ -190,7 +190,7 @@ const UsersTable = () => {
type='secondary'
style={{ marginRight: 1 }}
onClick={async () => {
manageUser(record.username, 'enable', record);
manageUser(record.id, 'enable', record);
}}
disabled={record.status === 3}
>
@@ -214,7 +214,7 @@ const UsersTable = () => {
okType={'danger'}
position={'left'}
onConfirm={() => {
manageUser(record.username, 'delete', record).then(() => {
manageUser(record.id, 'delete', record).then(() => {
removeRecord(record.id);
});
}}
@@ -303,9 +303,9 @@ const UsersTable = () => {
fetchGroups().then();
}, []);
const manageUser = async (username, action, record) => {
const manageUser = async (userId, action, record) => {
const res = await API.post('/api/user/manage', {
username,
id: userId,
action,
});
const { success, message } = res.data;

View File

@@ -0,0 +1,70 @@
// src/hooks/useTokenKeys.js
import { useEffect, useState } from 'react';
import { API, showError } from '../helpers';
async function fetchTokenKeys() {
try {
const response = await API.get('/api/token/?p=0&size=999');
const { success, data } = response.data;
if (success) {
const activeTokens = data.filter((token) => token.status === 1);
return activeTokens.map((token) => token.key);
} else {
throw new Error('Failed to fetch token keys');
}
} catch (error) {
console.error("Error fetching token keys:", error);
return [];
}
}
function getServerAddress() {
let status = localStorage.getItem('status');
let serverAddress = '';
if (status) {
try {
status = JSON.parse(status);
serverAddress = status.server_address || '';
} catch (error) {
console.error("Failed to parse status from localStorage:", error);
}
}
if (!serverAddress) {
serverAddress = window.location.origin;
}
return serverAddress;
}
export function useTokenKeys() {
const [keys, setKeys] = useState([]);
const [chatLink, setChatLink] = useState('');
const [serverAddress, setServerAddress] = useState('');
const [isLoading, setIsLoading] = useState(true);
useEffect(() => {
const loadAllData = async () => {
const fetchedKeys = await fetchTokenKeys();
if (fetchedKeys.length === 0) {
showError('当前没有可用的启用令牌,请确认是否有令牌处于启用状态!');
setTimeout(() => {
window.location.href = '/token';
}, 1500); // 延迟 1.5 秒后跳转
}
setKeys(fetchedKeys);
setIsLoading(false);
const link = localStorage.getItem('chat_link');
setChatLink(link);
const address = getServerAddress();
setServerAddress(address);
};
loadAllData();
}, []);
return { keys, chatLink, serverAddress, isLoading };
}

View File

@@ -5,21 +5,21 @@ export const CHANNEL_OPTIONS = [
text: 'Midjourney Proxy',
value: 2,
color: 'light-blue',
label: 'Midjourney Proxy',
label: 'Midjourney Proxy'
},
{
key: 5,
text: 'Midjourney Proxy Plus',
value: 5,
color: 'blue',
label: 'Midjourney Proxy Plus',
label: 'Midjourney Proxy Plus'
},
{
key: 36,
text: 'Suno API',
value: 36,
color: 'purple',
label: 'Suno API',
label: 'Suno API'
},
{ key: 4, text: 'Ollama', value: 4, color: 'grey', label: 'Ollama' },
{
@@ -27,77 +27,78 @@ export const CHANNEL_OPTIONS = [
text: 'Anthropic Claude',
value: 14,
color: 'indigo',
label: 'Anthropic Claude',
label: 'Anthropic Claude'
},
{
key: 33,
text: 'AWS Claude',
value: 33,
color: 'indigo',
label: 'AWS Claude',
label: 'AWS Claude'
},
{ key: 41, text: 'Vertex AI', value: 41, color: 'blue', label: 'Vertex AI' },
{
key: 3,
text: 'Azure OpenAI',
value: 3,
color: 'teal',
label: 'Azure OpenAI',
label: 'Azure OpenAI'
},
{
key: 24,
text: 'Google Gemini',
value: 24,
color: 'orange',
label: 'Google Gemini',
label: 'Google Gemini'
},
{
key: 34,
text: 'Cohere',
value: 34,
color: 'purple',
label: 'Cohere',
label: 'Cohere'
},
{
key: 15,
text: '百度文心千帆',
value: 15,
color: 'blue',
label: '百度文心千帆',
label: '百度文心千帆'
},
{
key: 17,
text: '阿里通义千问',
value: 17,
color: 'orange',
label: '阿里通义千问',
label: '阿里通义千问'
},
{
key: 18,
text: '讯飞星火认知',
value: 18,
color: 'blue',
label: '讯飞星火认知',
label: '讯飞星火认知'
},
{
key: 16,
text: '智谱 ChatGLM',
value: 16,
color: 'violet',
label: '智谱 ChatGLM',
label: '智谱 ChatGLM'
},
{
key: 26,
text: '智谱 GLM-4V',
value: 26,
color: 'purple',
label: '智谱 GLM-4V',
label: '智谱 GLM-4V'
},
{
key: 11,
text: 'Google PaLM2',
value: 11,
color: 'orange',
label: 'Google PaLM2',
label: 'Google PaLM2'
},
{ key: 39, text: 'Cloudflare', value: 39, color: 'grey', label: 'Cloudflare' },
{ key: 25, text: 'Moonshot', value: 25, color: 'green', label: 'Moonshot' },
@@ -107,19 +108,20 @@ export const CHANNEL_OPTIONS = [
{ key: 35, text: 'MiniMax', value: 35, color: 'green', label: 'MiniMax' },
{ key: 37, text: 'Dify', value: 37, color: 'teal', label: 'Dify' },
{ key: 38, text: 'Jina', value: 38, color: 'blue', label: 'Jina' },
{ key: 40, text: 'SiliconCloud', value: 40, color: 'purple', label: 'SiliconCloud' },
{ key: 8, text: '自定义渠道', value: 8, color: 'pink', label: '自定义渠道' },
{
key: 22,
text: '知识库FastGPT',
value: 22,
color: 'blue',
label: '知识库FastGPT',
label: '知识库FastGPT'
},
{
key: 21,
text: '知识库AI Proxy',
value: 21,
color: 'purple',
label: '知识库AI Proxy',
},
label: '知识库AI Proxy'
}
];

View File

@@ -15,8 +15,8 @@ export function renderText(text, limit) {
export function renderGroup(group) {
if (group === '') {
return (
<Tag size='large' key='default'>
unknown
<Tag size='large' key='default' color='orange'>
用户分组
</Tag>
);
}

View File

@@ -9,11 +9,12 @@ body {
scrollbar-width: none;
color: var(--semi-color-text-0) !important;
background-color: var(--semi-color-bg-0) !important;
height: 100%;
height: 100vh;
}
#root {
height: 100%;
height: 100vh;
flex-direction: column;
}
@media only screen and (max-width: 767px) {
@@ -50,9 +51,9 @@ body {
}
}
.semi-layout {
height: 100%;
}
/*.semi-layout {*/
/* height: 100%;*/
/*}*/
.tableShow {
display: revert;

View File

@@ -3,7 +3,6 @@ import ReactDOM from 'react-dom/client';
import { BrowserRouter } from 'react-router-dom';
import App from './App';
import HeaderBar from './components/HeaderBar';
import Footer from './components/Footer';
import 'semantic-ui-offline/semantic.min.css';
import './index.css';
import { UserProvider } from './context/User';
@@ -13,35 +12,36 @@ import { StatusProvider } from './context/Status';
import { Layout } from '@douyinfe/semi-ui';
import SiderBar from './components/SiderBar';
import { ThemeProvider } from './context/Theme';
import FooterBar from './components/Footer';
// initialization
const root = ReactDOM.createRoot(document.getElementById('root'));
const { Sider, Content, Header } = Layout;
const { Sider, Content, Header, Footer } = Layout;
root.render(
<React.StrictMode>
<StatusProvider>
<UserProvider>
<BrowserRouter>
<ThemeProvider>
<Layout>
<Sider>
<SiderBar />
</Sider>
<Layout>
<Header>
<HeaderBar />
</Header>
<Content
style={{
padding: '24px',
}}
>
<App />
</Content>
<Layout.Footer>
<Footer></Footer>
</Layout.Footer>
<Layout style={{ height: '100vh', display: 'flex', flexDirection: 'column' }}>
<Header>
<HeaderBar />
</Header>
<Layout style={{ flex: 1, overflow: 'hidden' }}>
<Sider>
<SiderBar />
</Sider>
<Layout>
<Content
style={{ overflowY: 'auto', padding: '24px' }}
>
<App />
</Content>
<Layout.Footer>
<FooterBar></FooterBar>
</Layout.Footer>
</Layout>
</Layout>
<ToastContainer />
</Layout>

View File

@@ -37,6 +37,11 @@ const STATUS_CODE_MAPPING_EXAMPLE = {
400: '500',
};
const REGION_EXAMPLE = {
"default": "us-central1",
"claude-3-5-sonnet-20240620": "europe-west1"
}
const fetchButtonTips = "1. 新建渠道时请求通过当前浏览器发出2. 编辑已有渠道,请求通过后端服务器发出"
function type2secretPrompt(type) {
@@ -588,6 +593,44 @@ const EditChannel = (props) => {
/>
</>
)}
{inputs.type === 41 && (
<>
<div style={{ marginTop: 10 }}>
<Typography.Text strong>部署地区</Typography.Text>
</div>
<TextArea
name='other'
placeholder={
'请输入部署地区例如us-central1\n支持使用模型映射格式\n' +
'{\n' +
' "default": "us-central1",\n' +
' "claude-3-5-sonnet-20240620": "europe-west1"\n' +
'}'
}
autosize={{ minRows: 2 }}
onChange={(value) => {
handleInputChange('other', value);
}}
value={inputs.other}
autoComplete='new-password'
/>
<Typography.Text
style={{
color: 'rgba(var(--semi-blue-5), 1)',
userSelect: 'none',
cursor: 'pointer',
}}
onClick={() => {
handleInputChange(
'other',
JSON.stringify(REGION_EXAMPLE, null, 2),
);
}}
>
填入模板
</Typography.Text>
</>
)}
{inputs.type === 21 && (
<>
<div style={{ marginTop: 10 }}>
@@ -734,17 +777,47 @@ const EditChannel = (props) => {
autoComplete='new-password'
/>
) : (
<Input
label='密钥'
name='key'
required
placeholder={type2secretPrompt(inputs.type)}
onChange={(value) => {
handleInputChange('key', value);
}}
value={inputs.key}
autoComplete='new-password'
/>
<>
{inputs.type === 41 ? (
<TextArea
label='鉴权json'
name='key'
required
placeholder={'{\n' +
' "type": "service_account",\n' +
' "project_id": "abc-bcd-123-456",\n' +
' "private_key_id": "123xxxxx456",\n' +
' "private_key": "-----BEGIN PRIVATE KEY-----xxxx\n' +
' "client_email": "xxx@developer.gserviceaccount.com",\n' +
' "client_id": "111222333",\n' +
' "auth_uri": "https://accounts.google.com/o/oauth2/auth",\n' +
' "token_uri": "https://oauth2.googleapis.com/token",\n' +
' "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",\n' +
' "client_x509_cert_url": "https://xxxxx.gserviceaccount.com",\n' +
' "universe_domain": "googleapis.com"\n' +
'}'}
onChange={(value) => {
handleInputChange('key', value);
}}
autosize={{ minRows: 10 }}
value={inputs.key}
autoComplete='new-password'
/>
) : (
<Input
label='密钥'
name='key'
required
placeholder={type2secretPrompt(inputs.type)}
onChange={(value) => {
handleInputChange('key', value);
}}
value={inputs.key}
autoComplete='new-password'
/>
)
}
</>
)}
{inputs.type === 1 && (
<>

View File

@@ -1,14 +1,35 @@
import React from 'react';
import { useTokenKeys } from '../../components/fetchTokenKeys';
import { Layout } from '@douyinfe/semi-ui';
const Chat = () => {
const chatLink = localStorage.getItem('chat_link');
const ChatPage = () => {
const { keys, chatLink, serverAddress, isLoading } = useTokenKeys();
return (
const comLink = (key) => {
if (!chatLink || !serverAddress || !key) return '';
return `${chatLink}/#/?settings={"key":"sk-${key}","url":"${encodeURIComponent(serverAddress)}"}`;
};
const iframeSrc = keys.length > 0 ? comLink(keys[0]) : '';
return !isLoading && iframeSrc ? (
<iframe
src={chatLink}
src={iframeSrc}
style={{ width: '100%', height: '85vh', border: 'none' }}
title="Token Frame"
/>
) : (
<div>
<Layout>
<Layout.Header>
<h3 style={{ color: 'red'}}>
当前没有可用的已启用令牌请确认是否有令牌处于启用状态<br />
正在跳转......
</h3>
</Layout.Header>
</Layout>
</div>
);
};
export default Chat;
export default ChatPage;

View File

@@ -0,0 +1,26 @@
import React from 'react';
import { useTokenKeys } from '../../components/fetchTokenKeys';
const chat2page = () => {
const { keys, chatLink, serverAddress, isLoading } = useTokenKeys();
const comLink = (key) => {
if (!chatLink || !serverAddress || !key) return '';
return `${chatLink}/#/?settings={"key":"sk-${key}","url":"${encodeURIComponent(serverAddress)}"}`;
};
if (keys.length > 0) {
const redirectLink = comLink(keys[0]);
if (redirectLink) {
window.location.href = redirectLink;
}
}
return (
<div>
<h3>正在加载请稍候...</h3>
</div>
);
};
export default chat2page;

View File

@@ -16,7 +16,8 @@ export default function SettingsMagnification(props) {
ModelPrice: '',
ModelRatio: '',
CompletionRatio: '',
GroupRatio: ''
GroupRatio: '',
UserUsableGroups: ''
});
const refForm = useRef();
const [inputsRow, setInputsRow] = useState(inputs);
@@ -213,6 +214,33 @@ export default function SettingsMagnification(props) {
/>
</Col>
</Row>
<Row gutter={16}>
<Col span={16}>
<Form.TextArea
label={'用户可选分组'}
extraText={''}
placeholder={'为一个 JSON 文本,键为分组名称,值为倍率'}
field={'UserUsableGroups'}
autosize={{ minRows: 6, maxRows: 12 }}
trigger='blur'
stopValidateWithError
rules={[
{
validator: (rule, value) => {
return verifyJSON(value);
},
message: '不是合法的 JSON 字符串'
}
]}
onChange={(value) =>
setInputs({
...inputs,
UserUsableGroups: value
})
}
/>
</Col>
</Row>
</Form.Section>
</Form>
<Space>

View File

@@ -18,8 +18,8 @@ import {
Select,
SideSheet,
Space,
Spin,
Typography,
Spin, TextArea,
Typography
} from '@douyinfe/semi-ui';
import Title from '@douyinfe/semi-ui/lib/es/typography/title';
import { Divider } from 'semantic-ui-react';
@@ -34,6 +34,8 @@ const EditToken = (props) => {
unlimited_quota: false,
model_limits_enabled: false,
model_limits: [],
allow_ips: '',
group: '',
};
const [inputs, setInputs] = useState(originInputs);
const {
@@ -43,9 +45,12 @@ const EditToken = (props) => {
unlimited_quota,
model_limits_enabled,
model_limits,
allow_ips,
group
} = inputs;
// const [visible, setVisible] = useState(false);
const [models, setModels] = useState({});
const [groups, setGroups] = useState([]);
const navigate = useNavigate();
const handleInputChange = (name, value) => {
setInputs((inputs) => ({ ...inputs, [name]: value }));
@@ -86,6 +91,22 @@ const EditToken = (props) => {
}
};
const loadGroups = async () => {
let res = await API.get(`/api/user/groups`);
const { success, message, data } = res.data;
if (success) {
// return data is a map, key is group name, value is group description
// label is group description, value is group name
let localGroupOptions = Object.keys(data).map((group) => ({
label: data[group],
value: group,
}));
setGroups(localGroupOptions);
} else {
showError(message);
}
};
const loadToken = async () => {
setLoading(true);
let res = await API.get(`/api/token/${props.editingToken.id}`);
@@ -118,6 +139,7 @@ const EditToken = (props) => {
});
}
loadModels();
loadGroups();
}, [isEdit]);
// 新增 state 变量 tokenCount 来记录用户想要创建的令牌数量,默认为 1
@@ -374,6 +396,19 @@ const EditToken = (props) => {
</Button>
</div>
<Divider />
<div style={{ marginTop: 10 }}>
<Typography.Text>IP白名单请勿过度信任此功能</Typography.Text>
</div>
<TextArea
label='IP白名单'
name='allow_ips'
placeholder={'允许的IP一行一个'}
onChange={(value) => {
handleInputChange('allow_ips', value);
}}
value={inputs.allow_ips}
style={{ fontFamily: 'JetBrains Mono, Consolas' }}
/>
<div style={{ marginTop: 10, display: 'flex' }}>
<Space>
<Checkbox
@@ -404,6 +439,30 @@ const EditToken = (props) => {
optionList={models}
disabled={!model_limits_enabled}
/>
<div style={{ marginTop: 10 }}>
<Typography.Text>令牌分组默认为用户的分组</Typography.Text>
</div>
{groups.length > 0 ?
<Select
style={{ marginTop: 8 }}
placeholder={'令牌分组,默认为用户的分组'}
name='gruop'
required
selection
onChange={(value) => {
handleInputChange('group', value);
}}
value={inputs.group}
autoComplete='new-password'
optionList={groups}
/>:
<Select
style={{ marginTop: 8 }}
placeholder={'管理员未设置用户可选分组'}
name='gruop'
disabled={true}
/>
}
</Spin>
</SideSheet>
</>