mirror of
				https://github.com/songquanpeng/one-api.git
				synced 2025-10-31 13:53:41 +08:00 
			
		
		
		
	Compare commits
	
		
			91 Commits
		
	
	
		
			v0.4.7
			...
			v0.5.1-alp
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | b9f6461dd4 | ||
|  | 0a39521a3d | ||
|  | c134604cee | ||
|  | 929e43ef81 | ||
|  | dce8bbe1ca | ||
|  | bc2f48b1f2 | ||
|  | 889af8b2db | ||
|  | 4eea096654 | ||
|  | 4ab3211c0e | ||
|  | 3da119efba | ||
|  | dccd66b852 | ||
|  | 2fcd6852e0 | ||
|  | 9b4d1964d4 | ||
|  | 806bf8241c | ||
|  | ce93c9b6b2 | ||
|  | 4ec4289565 | ||
|  | 3dc5a0f91d | ||
|  | 80a846673a | ||
|  | 26c6719ea3 | ||
|  | c87e05bfc2 | ||
|  | e6938bd236 | ||
|  | 8f721d67a5 | ||
|  | fcc1e2d568 | ||
|  | 9a1db61675 | ||
|  | 3c940113ab | ||
|  | 0495b9a0d7 | ||
|  | 12a0e7105e | ||
|  | e628b643cd | ||
|  | 675847bf98 | ||
|  | 2ff15baf66 | ||
|  | 4139a7036f | ||
|  | 02da0b51f8 | ||
|  | 35cfebee12 | ||
|  | 0e088f7c3e | ||
|  | f61d326721 | ||
|  | 74b06b643a | ||
|  | ccf7709e23 | ||
|  | d592e2c8b8 | ||
|  | b520b54625 | ||
|  | 81c5901123 | ||
|  | abc53cb208 | ||
|  | 2b17bb8dd7 | ||
|  | ea73201b6f | ||
|  | 6215d2e71c | ||
|  | d17bdc40a7 | ||
|  | 280df27705 | ||
|  | 991f5bf4ee | ||
|  | 701aaba191 | ||
|  | 3bab5b48bf | ||
|  | f3bccee3b5 | ||
|  | d84b0b0f5d | ||
|  | d383302e8a | ||
|  | 04f40def2f | ||
|  | c48b7bc0f5 | ||
|  | b09daf5ec1 | ||
|  | c90c0ecef4 | ||
|  | 1ab5fb7d2d | ||
|  | f769711c19 | ||
|  | edc5156693 | ||
|  | 9ec6506c32 | ||
|  | f387cc5ead | ||
|  | 569b68c43b | ||
|  | f0c40a6cd0 | ||
|  | 0cea9e6a6f | ||
|  | b1b3651e84 | ||
|  | 8f6bd51f58 | ||
|  | bddbf57104 | ||
|  | 9a16b0f9e5 | ||
|  | 3530309a31 | ||
|  | 733ebc067b | ||
|  | 6a8567ac14 | ||
|  | aabc546691 | ||
|  | 1c82b06f35 | ||
|  | 9e4109672a | ||
|  | 64c35334e6 | ||
|  | 0ce572b405 | ||
|  | a326ac4b28 | ||
|  | 05b0e77839 | ||
|  | 51f19470bc | ||
|  | 737672fb0b | ||
|  | 0941e294bf | ||
|  | 431d505f79 | ||
|  | f0dc7f3f06 | ||
|  | 99fed1f850 | ||
|  | 4dc5388a80 | ||
|  | f81f4c60b2 | ||
|  | c613d8b6b2 | ||
|  | 7adac1c09c | ||
|  | 6f05128368 | ||
|  | 9b178a28a3 | ||
|  | 4a6a7f4635 | 
							
								
								
									
										4
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4
									
								
								.github/ISSUE_TEMPLATE/bug_report.md
									
									
									
									
										vendored
									
									
								
							| @@ -8,11 +8,13 @@ assignees: '' | |||||||
| --- | --- | ||||||
|  |  | ||||||
| **例行检查** | **例行检查** | ||||||
|  |  | ||||||
|  | [//]: # (方框内删除已有的空格,填 x 号) | ||||||
| + [ ] 我已确认目前没有类似 issue | + [ ] 我已确认目前没有类似 issue | ||||||
| + [ ] 我已确认我已升级到最新版本 | + [ ] 我已确认我已升级到最新版本 | ||||||
| + [ ] 我已完整查看过项目 README,尤其是常见问题部分 | + [ ] 我已完整查看过项目 README,尤其是常见问题部分 | ||||||
| + [ ] 我理解并愿意跟进此 issue,协助测试和提供反馈  | + [ ] 我理解并愿意跟进此 issue,协助测试和提供反馈  | ||||||
| + [ ] 我理解并认可上述内容,并理解项目维护者精力有限,不遵循规则的 issue 可能会被无视或直接关闭 | + [ ] 我理解并认可上述内容,并理解项目维护者精力有限,**不遵循规则的 issue 可能会被无视或直接关闭** | ||||||
|  |  | ||||||
| **问题描述** | **问题描述** | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										3
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.github/ISSUE_TEMPLATE/config.yml
									
									
									
									
										vendored
									
									
								
							| @@ -6,6 +6,3 @@ contact_links: | |||||||
|   - name: 赞赏支持 |   - name: 赞赏支持 | ||||||
|     url: https://iamazing.cn/page/reward |     url: https://iamazing.cn/page/reward | ||||||
|     about: 请作者喝杯咖啡,以激励作者持续开发 |     about: 请作者喝杯咖啡,以激励作者持续开发 | ||||||
|   - name: 付费部署或定制功能 |  | ||||||
|     url: https://openai.justsong.cn/ |  | ||||||
|     about: 加群后联系群主 |  | ||||||
|   | |||||||
							
								
								
									
										5
									
								
								.github/ISSUE_TEMPLATE/feature_request.md
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.github/ISSUE_TEMPLATE/feature_request.md
									
									
									
									
										vendored
									
									
								
							| @@ -8,10 +8,13 @@ assignees: '' | |||||||
| --- | --- | ||||||
|  |  | ||||||
| **例行检查** | **例行检查** | ||||||
|  |  | ||||||
|  | [//]: # (方框内删除已有的空格,填 x 号) | ||||||
| + [ ] 我已确认目前没有类似 issue | + [ ] 我已确认目前没有类似 issue | ||||||
| + [ ] 我已确认我已升级到最新版本 | + [ ] 我已确认我已升级到最新版本 | ||||||
|  | + [ ] 我已完整查看过项目 README,已确定现有版本无法满足需求 | ||||||
| + [ ] 我理解并愿意跟进此 issue,协助测试和提供反馈 | + [ ] 我理解并愿意跟进此 issue,协助测试和提供反馈 | ||||||
| + [ ] 我理解并认可上述内容,并理解项目维护者精力有限,不遵循规则的 issue 可能会被无视或直接关闭 | + [ ] 我理解并认可上述内容,并理解项目维护者精力有限,**不遵循规则的 issue 可能会被无视或直接关闭** | ||||||
|  |  | ||||||
| **功能描述** | **功能描述** | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										26
									
								
								README.en.md
									
									
									
									
									
								
							
							
						
						
									
										26
									
								
								README.en.md
									
									
									
									
									
								
							| @@ -10,7 +10,7 @@ | |||||||
|  |  | ||||||
| # One API | # One API | ||||||
|  |  | ||||||
| _✨ The all-in-one OpenAI interface, integrates various API access methods, ready to use ✨_ | _✨ Access all LLM through the standard OpenAI API format, easy to deploy & use ✨_ | ||||||
|  |  | ||||||
| </div> | </div> | ||||||
|  |  | ||||||
| @@ -57,17 +57,14 @@ _✨ The all-in-one OpenAI interface, integrates various API access methods, rea | |||||||
| > **Note**: The latest image pulled from Docker may be an `alpha` release. Specify the version manually if you require stability. | > **Note**: The latest image pulled from Docker may be an `alpha` release. Specify the version manually if you require stability. | ||||||
|  |  | ||||||
| ## Features | ## Features | ||||||
| 1. Supports multiple API access channels. Welcome PRs or issue submissions for additional channels: | 1. Supports multiple API access channels: | ||||||
|     + [x] Official OpenAI channel (support proxy configuration) |     + [x] Official OpenAI channel (support proxy configuration) | ||||||
|     + [x] **Azure OpenAI API** |     + [x] **Azure OpenAI API** | ||||||
|  |     + [x] [API Distribute](https://api.gptjk.top/register?aff=QGxj) | ||||||
|     + [x] [OpenAI-SB](https://openai-sb.com) |     + [x] [OpenAI-SB](https://openai-sb.com) | ||||||
|     + [x] [API2D](https://api2d.com/r/197971) |     + [x] [API2D](https://api2d.com/r/197971) | ||||||
|     + [x] [OhMyGPT](https://aigptx.top?aff=uFpUl2Kf) |     + [x] [OhMyGPT](https://aigptx.top?aff=uFpUl2Kf) | ||||||
|     + [x] [AI Proxy](https://aiproxy.io/?i=OneAPI) (invitation code: `OneAPI`) |     + [x] [AI Proxy](https://aiproxy.io/?i=OneAPI) (invitation code: `OneAPI`) | ||||||
|     + [x] [API2GPT](http://console.api2gpt.com/m/00002S) |  | ||||||
|     + [x] [CloseAI](https://console.closeai-asia.com/r/2412) |  | ||||||
|     + [x] [AI.LS](https://ai.ls) |  | ||||||
|     + [x] [OpenAI Max](https://openaimax.com) |  | ||||||
|     + [x] Custom channel: Various third-party proxy services not included in the list |     + [x] Custom channel: Various third-party proxy services not included in the list | ||||||
| 2. Supports access to multiple channels through **load balancing**. | 2. Supports access to multiple channels through **load balancing**. | ||||||
| 3. Supports **stream mode** that enables typewriter-like effect through stream transmission. | 3. Supports **stream mode** that enables typewriter-like effect through stream transmission. | ||||||
| @@ -174,6 +171,15 @@ Refer to [#175](https://github.com/songquanpeng/one-api/issues/175) for detailed | |||||||
| If you encounter a blank page after deployment, refer to [#97](https://github.com/songquanpeng/one-api/issues/97) for possible solutions. | If you encounter a blank page after deployment, refer to [#97](https://github.com/songquanpeng/one-api/issues/97) for possible solutions. | ||||||
|  |  | ||||||
| ### Deployment on Third-Party Platforms | ### Deployment on Third-Party Platforms | ||||||
|  | <details> | ||||||
|  | <summary><strong>Deploy on Sealos</strong></summary> | ||||||
|  | <div> | ||||||
|  |  | ||||||
|  | Please refer to [this tutorial](https://github.com/c121914yu/FastGPT/blob/main/docs/deploy/one-api/sealos.md). | ||||||
|  |  | ||||||
|  | </div> | ||||||
|  | </details> | ||||||
|  |  | ||||||
| <details> | <details> | ||||||
| <summary><strong>Deployment on Zeabur</strong></summary> | <summary><strong>Deployment on Zeabur</strong></summary> | ||||||
| <div> | <div> | ||||||
| @@ -240,7 +246,7 @@ If the channel ID is not provided, load balancing will be used to distribute the | |||||||
|     + Example: `CHANNEL_UPDATE_FREQUENCY=1440` |     + Example: `CHANNEL_UPDATE_FREQUENCY=1440` | ||||||
| 8. `CHANNEL_TEST_FREQUENCY`: When set, it periodically tests the channels, with the unit in minutes. If not set, no test will happen. | 8. `CHANNEL_TEST_FREQUENCY`: When set, it periodically tests the channels, with the unit in minutes. If not set, no test will happen. | ||||||
|     + Example: `CHANNEL_TEST_FREQUENCY=1440` |     + Example: `CHANNEL_TEST_FREQUENCY=1440` | ||||||
| 9. `REQUEST_INTERVAL`: The time interval (in seconds) between requests when updating channel balances and testing channel availability. Default is no interval. | 9. `POLLING_INTERVAL`: The time interval (in seconds) between requests when updating channel balances and testing channel availability. Default is no interval. | ||||||
|     + Example: `POLLING_INTERVAL=5` |     + Example: `POLLING_INTERVAL=5` | ||||||
|  |  | ||||||
| ### Command Line Parameters | ### Command Line Parameters | ||||||
| @@ -279,6 +285,10 @@ If the channel ID is not provided, load balancing will be used to distribute the | |||||||
| ## Note | ## Note | ||||||
| This project is an open-source project. Please use it in compliance with OpenAI's [Terms of Use](https://openai.com/policies/terms-of-use) and **applicable laws and regulations**. It must not be used for illegal purposes. | This project is an open-source project. Please use it in compliance with OpenAI's [Terms of Use](https://openai.com/policies/terms-of-use) and **applicable laws and regulations**. It must not be used for illegal purposes. | ||||||
|  |  | ||||||
| This project is open-sourced under the MIT license. One must somehow retain the copyright information of One API. | This project is released under the MIT license. Based on this, attribution and a link to this project must be included at the bottom of the page. | ||||||
|  |  | ||||||
|  | The same applies to derivative projects based on this project. | ||||||
|  |  | ||||||
|  | If you do not wish to include attribution, prior authorization must be obtained. | ||||||
|  |  | ||||||
| According to the MIT license, users should bear the risk and responsibility of using this project, and the developer of this open-source project is not responsible for this. | According to the MIT license, users should bear the risk and responsibility of using this project, and the developer of this open-source project is not responsible for this. | ||||||
|   | |||||||
							
								
								
									
										92
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										92
									
								
								README.md
									
									
									
									
									
								
							| @@ -11,7 +11,7 @@ | |||||||
|  |  | ||||||
| # One API | # One API | ||||||
|  |  | ||||||
| _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用✨_ | _✨ 通过标准的 OpenAI API 格式访问所有的大模型,开箱即用 ✨_ | ||||||
|  |  | ||||||
| </div> | </div> | ||||||
|  |  | ||||||
| @@ -51,50 +51,60 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用 | |||||||
|   <a href="https://iamazing.cn/page/reward">赞赏支持</a> |   <a href="https://iamazing.cn/page/reward">赞赏支持</a> | ||||||
| </p> | </p> | ||||||
|  |  | ||||||
|  | > **Note**:本项目为开源项目,使用者必须在遵循 OpenAI 的[使用条款](https://openai.com/policies/terms-of-use)以及**法律法规**的情况下使用,不得用于非法用途。 | ||||||
|  |  | ||||||
| > **Note**:使用 Docker 拉取的最新镜像可能是 `alpha` 版本,如果追求稳定性请手动指定版本。 | > **Note**:使用 Docker 拉取的最新镜像可能是 `alpha` 版本,如果追求稳定性请手动指定版本。 | ||||||
|  |  | ||||||
| > **Warning**:从 `v0.3` 版本升级到 `v0.4` 版本需要手动迁移数据库,请手动执行[数据库迁移脚本](./bin/migration_v0.3-v0.4.sql)。 | > **Warning**:从 `v0.3` 版本升级到 `v0.4` 版本需要手动迁移数据库,请手动执行[数据库迁移脚本](./bin/migration_v0.3-v0.4.sql)。 | ||||||
|  |  | ||||||
| ## 功能 | ## 功能 | ||||||
| 1. 支持多种 API 访问渠道,欢迎 PR 或提 issue 添加更多渠道: | 1. 支持多种大模型: | ||||||
|    + [x] OpenAI 官方通道(支持配置代理) |    + [x] [OpenAI ChatGPT 系列模型](https://platform.openai.com/docs/guides/gpt/chat-completions-api)(支持 [Azure OpenAI API](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference)) | ||||||
|    + [x] **Azure OpenAI API** |    + [x] [Anthropic Claude 系列模型](https://anthropic.com) | ||||||
|  |    + [x] [Google PaLM2 系列模型](https://developers.generativeai.google) | ||||||
|  |    + [x] [百度文心一言系列模型](https://cloud.baidu.com/doc/WENXINWORKSHOP/index.html) | ||||||
|  |    + [x] [智谱 ChatGLM 系列模型](https://bigmodel.cn) | ||||||
|  | 2. 支持配置镜像以及众多第三方代理服务: | ||||||
|  |    + [x] [API Distribute](https://api.gptjk.top/register?aff=QGxj) | ||||||
|    + [x] [OpenAI-SB](https://openai-sb.com) |    + [x] [OpenAI-SB](https://openai-sb.com) | ||||||
|    + [x] [API2D](https://api2d.com/r/197971) |    + [x] [API2D](https://api2d.com/r/197971) | ||||||
|    + [x] [OhMyGPT](https://aigptx.top?aff=uFpUl2Kf) |    + [x] [OhMyGPT](https://aigptx.top?aff=uFpUl2Kf) | ||||||
|    + [x] [AI Proxy](https://aiproxy.io/?i=OneAPI) (邀请码:`OneAPI`) |    + [x] [AI Proxy](https://aiproxy.io/?i=OneAPI) (邀请码:`OneAPI`) | ||||||
|    + [x] [API2GPT](http://console.api2gpt.com/m/00002S) |  | ||||||
|    + [x] [CloseAI](https://console.closeai-asia.com/r/2412) |    + [x] [CloseAI](https://console.closeai-asia.com/r/2412) | ||||||
|    + [x] [AI.LS](https://ai.ls) |  | ||||||
|    + [x] [OpenAI Max](https://openaimax.com) |  | ||||||
|    + [x] 自定义渠道:例如各种未收录的第三方代理服务 |    + [x] 自定义渠道:例如各种未收录的第三方代理服务 | ||||||
| 2. 支持通过**负载均衡**的方式访问多个渠道。 | 3. 支持通过**负载均衡**的方式访问多个渠道。 | ||||||
| 3. 支持 **stream 模式**,可以通过流式传输实现打字机效果。 | 4. 支持 **stream 模式**,可以通过流式传输实现打字机效果。 | ||||||
| 4. 支持**多机部署**,[详见此处](#多机部署)。 | 5. 支持**多机部署**,[详见此处](#多机部署)。 | ||||||
| 5. 支持**令牌管理**,设置令牌的过期时间和使用次数。 | 6. 支持**令牌管理**,设置令牌的过期时间和额度。 | ||||||
| 6. 支持**兑换码管理**,支持批量生成和导出兑换码,可使用兑换码为账户进行充值。 | 7. 支持**兑换码管理**,支持批量生成和导出兑换码,可使用兑换码为账户进行充值。 | ||||||
| 7. 支持**通道管理**,批量创建通道。 | 8. 支持**通道管理**,批量创建通道。 | ||||||
| 8. 支持**用户分组**以及**渠道分组**,支持为不同分组设置不同的倍率。 | 9. 支持**用户分组**以及**渠道分组**,支持为不同分组设置不同的倍率。 | ||||||
| 9. 支持渠道**设置模型列表**。 | 10. 支持渠道**设置模型列表**。 | ||||||
| 10. 支持**查看额度明细**。 | 11. 支持**查看额度明细**。 | ||||||
| 11. 支持**用户邀请奖励**。 | 12. 支持**用户邀请奖励**。 | ||||||
| 12. 支持以美元为单位显示额度。 | 13. 支持以美元为单位显示额度。 | ||||||
| 13. 支持发布公告,设置充值链接,设置新用户初始额度。 | 14. 支持发布公告,设置充值链接,设置新用户初始额度。 | ||||||
| 14. 支持丰富的**自定义**设置, | 15. 支持模型映射,重定向用户的请求模型。 | ||||||
|  | 16. 支持失败自动重试。 | ||||||
|  | 17. 支持绘图接口。 | ||||||
|  | 18. 支持丰富的**自定义**设置, | ||||||
|     1. 支持自定义系统名称,logo 以及页脚。 |     1. 支持自定义系统名称,logo 以及页脚。 | ||||||
|     2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。 |     2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。 | ||||||
| 15. 支持通过系统访问令牌访问管理 API。 | 19. 支持通过系统访问令牌访问管理 API。 | ||||||
| 16. 支持 Cloudflare Turnstile 用户校验。 | 20. 支持 Cloudflare Turnstile 用户校验。 | ||||||
| 17. 支持用户管理,支持**多种用户登录注册方式**: | 21. 支持用户管理,支持**多种用户登录注册方式**: | ||||||
|     + 邮箱登录注册以及通过邮箱进行密码重置。 |     + 邮箱登录注册以及通过邮箱进行密码重置。 | ||||||
|     + [GitHub 开放授权](https://github.com/settings/applications/new)。 |     + [GitHub 开放授权](https://github.com/settings/applications/new)。 | ||||||
|     + 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。 |     + 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。 | ||||||
| 18. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。 |  | ||||||
|  |  | ||||||
| ## 部署 | ## 部署 | ||||||
| ### 基于 Docker 进行部署 | ### 基于 Docker 进行部署 | ||||||
| 部署命令:`docker run --name one-api -d --restart always -p 3000:3000 -e TZ=Asia/Shanghai -v /home/ubuntu/data/one-api:/data justsong/one-api` | 部署命令:`docker run --name one-api -d --restart always -p 3000:3000 -e TZ=Asia/Shanghai -v /home/ubuntu/data/one-api:/data justsong/one-api` | ||||||
|  |  | ||||||
|  | 如果上面的镜像无法拉取,可以尝试使用 GitHub 的 Docker 镜像,将上面的 `justsong/one-api` 替换为 `ghcr.io/songquanpeng/one-api` 即可。 | ||||||
|  |  | ||||||
|  | 如果你的并发量较大,推荐设置 `SQL_DSN`,详见下面[环境变量](#环境变量)一节。 | ||||||
|  |  | ||||||
| 更新命令:`docker run --rm -v /var/run/docker.sock:/var/run/docker.sock containrrr/watchtower -cR` | 更新命令:`docker run --rm -v /var/run/docker.sock:/var/run/docker.sock containrrr/watchtower -cR` | ||||||
|  |  | ||||||
| `-p 3000:3000` 中的第一个 `3000` 是宿主机的端口,可以根据需要进行修改。 | `-p 3000:3000` 中的第一个 `3000` 是宿主机的端口,可以根据需要进行修改。 | ||||||
| @@ -114,6 +124,7 @@ server{ | |||||||
|           proxy_set_header X-Forwarded-For $remote_addr; |           proxy_set_header X-Forwarded-For $remote_addr; | ||||||
|           proxy_cache_bypass $http_upgrade; |           proxy_cache_bypass $http_upgrade; | ||||||
|           proxy_set_header Accept-Encoding gzip; |           proxy_set_header Accept-Encoding gzip; | ||||||
|  |           proxy_read_timeout 300s;  # GPT-4 需要较长的超时时间,请自行调整 | ||||||
|    } |    } | ||||||
| } | } | ||||||
| ``` | ``` | ||||||
| @@ -159,8 +170,8 @@ sudo service nginx restart | |||||||
| ### 多机部署 | ### 多机部署 | ||||||
| 1. 所有服务器 `SESSION_SECRET` 设置一样的值。 | 1. 所有服务器 `SESSION_SECRET` 设置一样的值。 | ||||||
| 2. 必须设置 `SQL_DSN`,使用 MySQL 数据库而非 SQLite,所有服务器连接同一个数据库。 | 2. 必须设置 `SQL_DSN`,使用 MySQL 数据库而非 SQLite,所有服务器连接同一个数据库。 | ||||||
| 3. 所有从服务器必须设置 `NODE_TYPE` 为 `slave`。 | 3. 所有从服务器必须设置 `NODE_TYPE` 为 `slave`,不设置则默认为主服务器。 | ||||||
| 4. 设置 `SYNC_FREQUENCY` 后服务器将定期从数据库同步配置。 | 4. 设置 `SYNC_FREQUENCY` 后服务器将定期从数据库同步配置,在使用远程数据库的情况下,推荐设置该项并启用 Redis,无论主从。 | ||||||
| 5. 从服务器可以选择设置 `FRONTEND_BASE_URL`,以重定向页面请求到主服务器。 | 5. 从服务器可以选择设置 `FRONTEND_BASE_URL`,以重定向页面请求到主服务器。 | ||||||
| 6. 从服务器上**分别**装好 Redis,设置好 `REDIS_CONN_STRING`,这样可以做到在缓存未过期的情况下数据库零访问,可以减少延迟。 | 6. 从服务器上**分别**装好 Redis,设置好 `REDIS_CONN_STRING`,这样可以做到在缓存未过期的情况下数据库零访问,可以减少延迟。 | ||||||
| 7. 如果主服务器访问数据库延迟也比较高,则也需要启用 Redis,并设置 `SYNC_FREQUENCY`,以定期从数据库同步配置。 | 7. 如果主服务器访问数据库延迟也比较高,则也需要启用 Redis,并设置 `SYNC_FREQUENCY`,以定期从数据库同步配置。 | ||||||
| @@ -183,7 +194,7 @@ sudo service nginx restart | |||||||
| docker run --name chat-next-web -d -p 3001:3000 yidadaa/chatgpt-next-web | docker run --name chat-next-web -d -p 3001:3000 yidadaa/chatgpt-next-web | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| 注意修改端口号和 `BASE_URL`。 | 注意修改端口号,之后在页面上设置接口地址(例如:https://openai.justsong.cn/ )和 API Key 即可。 | ||||||
|  |  | ||||||
| #### ChatGPT Web | #### ChatGPT Web | ||||||
| 项目主页:https://github.com/Chanzhaoyu/chatgpt-web | 项目主页:https://github.com/Chanzhaoyu/chatgpt-web | ||||||
| @@ -195,6 +206,17 @@ docker run --name chatgpt-web -d -p 3002:3002 -e OPENAI_API_BASE_URL=https://ope | |||||||
| 注意修改端口号、`OPENAI_API_BASE_URL` 和 `OPENAI_API_KEY`。 | 注意修改端口号、`OPENAI_API_BASE_URL` 和 `OPENAI_API_KEY`。 | ||||||
|  |  | ||||||
| ### 部署到第三方平台 | ### 部署到第三方平台 | ||||||
|  | <details> | ||||||
|  | <summary><strong>部署到 Sealos </strong></summary> | ||||||
|  | <div> | ||||||
|  |  | ||||||
|  | > Sealos 可视化部署,仅需 1 分钟。 | ||||||
|  |  | ||||||
|  | 参考这个[教程](https://github.com/c121914yu/FastGPT/blob/main/docs/deploy/one-api/sealos.md)中 1~5 步。 | ||||||
|  |  | ||||||
|  | </div> | ||||||
|  | </details> | ||||||
|  |  | ||||||
| <details> | <details> | ||||||
| <summary><strong>部署到 Zeabur</strong></summary> | <summary><strong>部署到 Zeabur</strong></summary> | ||||||
| <div> | <div> | ||||||
| @@ -221,6 +243,8 @@ docker run --name chatgpt-web -d -p 3002:3002 -e OPENAI_API_BASE_URL=https://ope | |||||||
|  |  | ||||||
| 等到系统启动后,使用 `root` 用户登录系统并做进一步的配置。 | 等到系统启动后,使用 `root` 用户登录系统并做进一步的配置。 | ||||||
|  |  | ||||||
|  | **Note**:如果你不知道某个配置项的含义,可以临时删掉值以看到进一步的提示文字。 | ||||||
|  |  | ||||||
| ## 使用方法 | ## 使用方法 | ||||||
| 在`渠道`页面中添加你的 API Key,之后在`令牌`页面中新增访问令牌。 | 在`渠道`页面中添加你的 API Key,之后在`令牌`页面中新增访问令牌。 | ||||||
|  |  | ||||||
| @@ -251,7 +275,10 @@ graph LR | |||||||
|    + 例子:`SESSION_SECRET=random_string` |    + 例子:`SESSION_SECRET=random_string` | ||||||
| 3. `SQL_DSN`:设置之后将使用指定数据库而非 SQLite,请使用 MySQL 8.0 版本。 | 3. `SQL_DSN`:设置之后将使用指定数据库而非 SQLite,请使用 MySQL 8.0 版本。 | ||||||
|    + 例子:`SQL_DSN=root:123456@tcp(localhost:3306)/oneapi` |    + 例子:`SQL_DSN=root:123456@tcp(localhost:3306)/oneapi` | ||||||
| 4. `FRONTEND_BASE_URL`:设置之后将使用指定的前端地址,而非后端地址。 |    + 注意需要提前建立数据库 `oneapi`,无需手动建表,程序将自动建表。 | ||||||
|  |    + 如果使用本地数据库:部署命令可添加 `--network="host"` 以使得容器内的程序可以访问到宿主机上的 MySQL。 | ||||||
|  |    + 如果使用云数据库:如果云服务器需要验证身份,需要在连接参数中添加 `?tls=skip-verify`。 | ||||||
|  | 4. `FRONTEND_BASE_URL`:设置之后将重定向页面请求到指定的地址,仅限从服务器设置。 | ||||||
|    + 例子:`FRONTEND_BASE_URL=https://openai.justsong.cn` |    + 例子:`FRONTEND_BASE_URL=https://openai.justsong.cn` | ||||||
| 5. `SYNC_FREQUENCY`:设置之后将定期与数据库同步配置,单位为秒,未设置则不进行同步。 | 5. `SYNC_FREQUENCY`:设置之后将定期与数据库同步配置,单位为秒,未设置则不进行同步。 | ||||||
|    + 例子:`SYNC_FREQUENCY=60` |    + 例子:`SYNC_FREQUENCY=60` | ||||||
| @@ -261,7 +288,7 @@ graph LR | |||||||
|    + 例子:`CHANNEL_UPDATE_FREQUENCY=1440` |    + 例子:`CHANNEL_UPDATE_FREQUENCY=1440` | ||||||
| 8. `CHANNEL_TEST_FREQUENCY`:设置之后将定期检查渠道,单位为分钟,未设置则不进行检查。 | 8. `CHANNEL_TEST_FREQUENCY`:设置之后将定期检查渠道,单位为分钟,未设置则不进行检查。 | ||||||
|    + 例子:`CHANNEL_TEST_FREQUENCY=1440` |    + 例子:`CHANNEL_TEST_FREQUENCY=1440` | ||||||
| 9. `REQUEST_INTERVAL`:批量更新渠道余额以及测试可用性时的请求间隔,单位为秒,默认无间隔。 | 9. `POLLING_INTERVAL`:批量更新渠道余额以及测试可用性时的请求间隔,单位为秒,默认无间隔。 | ||||||
|    + 例子:`POLLING_INTERVAL=5` |    + 例子:`POLLING_INTERVAL=5` | ||||||
|  |  | ||||||
| ### 命令行参数 | ### 命令行参数 | ||||||
| @@ -298,13 +325,16 @@ https://openai.justsong.cn | |||||||
| 5. ChatGPT Next Web 报错:`Failed to fetch` | 5. ChatGPT Next Web 报错:`Failed to fetch` | ||||||
|    + 部署的时候不要设置 `BASE_URL`。 |    + 部署的时候不要设置 `BASE_URL`。 | ||||||
|    + 检查你的接口地址和 API Key 有没有填对。 |    + 检查你的接口地址和 API Key 有没有填对。 | ||||||
|  | 6. 报错:`当前分组负载已饱和,请稍后再试` | ||||||
|  |    + 上游通道 429 了。 | ||||||
|  |  | ||||||
| ## 相关项目 | ## 相关项目 | ||||||
| [FastGPT](https://github.com/c121914yu/FastGPT): 三分钟搭建 AI 知识库 | [FastGPT](https://github.com/c121914yu/FastGPT): 三分钟搭建 AI 知识库 | ||||||
|  |  | ||||||
| ## 注意 | ## 注意 | ||||||
| 本项目为开源项目,请在遵循 OpenAI 的[使用条款](https://openai.com/policies/terms-of-use)以及**法律法规**的情况下使用,不得用于非法用途。 |  | ||||||
|  |  | ||||||
| 本项目使用 MIT 协议进行开源,请以某种方式保留 One API 的版权信息。 | 本项目使用 MIT 协议进行开源,**在此基础上**,必须在页面底部保留署名以及指向本项目的链接。如果不想保留署名,必须首先获得授权。 | ||||||
|  |  | ||||||
|  | 同样适用于基于本项目的二开项目。 | ||||||
|  |  | ||||||
| 依据 MIT 协议,使用者需自行承担使用本项目的风险与责任,本开源项目开发者与此无关。 | 依据 MIT 协议,使用者需自行承担使用本项目的风险与责任,本开源项目开发者与此无关。 | ||||||
| @@ -1,25 +1,29 @@ | |||||||
| #!/bin/bash | #!/bin/bash | ||||||
|  |  | ||||||
| if [ $# -ne 3 ]; then | if [ $# -lt 3 ]; then | ||||||
|   echo "Usage: time_test.sh <domain> <key> <count>" |   echo "Usage: time_test.sh <domain> <key> <count> [<model>]" | ||||||
|   exit 1 |   exit 1 | ||||||
| fi | fi | ||||||
|  |  | ||||||
| domain=$1 | domain=$1 | ||||||
| key=$2 | key=$2 | ||||||
| count=$3 | count=$3 | ||||||
|  | model=${4:-"gpt-3.5-turbo"} # 设置默认模型为 gpt-3.5-turbo | ||||||
|  |  | ||||||
| total_time=0 | total_time=0 | ||||||
| times=() | times=() | ||||||
|  |  | ||||||
| for ((i=1; i<=count; i++)); do | for ((i=1; i<=count; i++)); do | ||||||
|   result=$(curl -o /dev/null -s -w %{time_total}\\n \ |   result=$(curl -o /dev/null -s -w "%{http_code} %{time_total}\\n" \ | ||||||
|            https://"$domain"/v1/chat/completions \ |            https://"$domain"/v1/chat/completions \ | ||||||
|            -H "Content-Type: application/json" \ |            -H "Content-Type: application/json" \ | ||||||
|            -H "Authorization: Bearer $key" \ |            -H "Authorization: Bearer $key" \ | ||||||
|            -d '{"messages": [{"content": "echo hi", "role": "user"}], "model": "gpt-3.5-turbo", "stream": false, "max_tokens": 1}') |            -d '{"messages": [{"content": "echo hi", "role": "user"}], "model": "'"$model"'", "stream": false, "max_tokens": 1}') | ||||||
|   echo "$result" |   http_code=$(echo "$result" | awk '{print $1}') | ||||||
|   total_time=$(bc <<< "$total_time + $result") |   time=$(echo "$result" | awk '{print $2}') | ||||||
|   times+=("$result") |   echo "HTTP status code: $http_code, Time taken: $time" | ||||||
|  |   total_time=$(bc <<< "$total_time + $time") | ||||||
|  |   times+=("$time") | ||||||
| done | done | ||||||
|  |  | ||||||
| average_time=$(echo "scale=4; $total_time / $count" | bc) | average_time=$(echo "scale=4; $total_time / $count" | bc) | ||||||
|   | |||||||
| @@ -67,14 +67,18 @@ var ChannelDisableThreshold = 5.0 | |||||||
| var AutomaticDisableChannelEnabled = false | var AutomaticDisableChannelEnabled = false | ||||||
| var QuotaRemindThreshold = 1000 | var QuotaRemindThreshold = 1000 | ||||||
| var PreConsumedQuota = 500 | var PreConsumedQuota = 500 | ||||||
|  | var ApproximateTokenEnabled = false | ||||||
|  | var RetryTimes = 0 | ||||||
|  |  | ||||||
| var RootUserEmail = "" | var RootUserEmail = "" | ||||||
|  |  | ||||||
| var IsMasterNode = os.Getenv("NODE_TYPE") != "slave" | var IsMasterNode = os.Getenv("NODE_TYPE") != "slave" | ||||||
|  |  | ||||||
| var requestInterval, _ = strconv.Atoi(os.Getenv("REQUEST_INTERVAL")) | var requestInterval, _ = strconv.Atoi(os.Getenv("POLLING_INTERVAL")) | ||||||
| var RequestInterval = time.Duration(requestInterval) * time.Second | var RequestInterval = time.Duration(requestInterval) * time.Second | ||||||
|  |  | ||||||
|  | var SyncFrequency = 10 * 60 // unit is second, will be overwritten by SYNC_FREQUENCY | ||||||
|  |  | ||||||
| const ( | const ( | ||||||
| 	RoleGuestUser  = 0 | 	RoleGuestUser  = 0 | ||||||
| 	RoleCommonUser = 1 | 	RoleCommonUser = 1 | ||||||
| @@ -148,6 +152,10 @@ const ( | |||||||
| 	ChannelTypeAIProxy   = 10 | 	ChannelTypeAIProxy   = 10 | ||||||
| 	ChannelTypePaLM      = 11 | 	ChannelTypePaLM      = 11 | ||||||
| 	ChannelTypeAPI2GPT   = 12 | 	ChannelTypeAPI2GPT   = 12 | ||||||
|  | 	ChannelTypeAIGC2D    = 13 | ||||||
|  | 	ChannelTypeAnthropic = 14 | ||||||
|  | 	ChannelTypeBaidu     = 15 | ||||||
|  | 	ChannelTypeZhipu     = 16 | ||||||
| ) | ) | ||||||
|  |  | ||||||
| var ChannelBaseURLs = []string{ | var ChannelBaseURLs = []string{ | ||||||
| @@ -155,7 +163,7 @@ var ChannelBaseURLs = []string{ | |||||||
| 	"https://api.openai.com",        // 1 | 	"https://api.openai.com",        // 1 | ||||||
| 	"https://oa.api2d.net",          // 2 | 	"https://oa.api2d.net",          // 2 | ||||||
| 	"",                              // 3 | 	"",                              // 3 | ||||||
| 	"https://api.openai-proxy.org", // 4 | 	"https://api.closeai-proxy.xyz", // 4 | ||||||
| 	"https://api.openai-sb.com",     // 5 | 	"https://api.openai-sb.com",     // 5 | ||||||
| 	"https://api.openaimax.com",     // 6 | 	"https://api.openaimax.com",     // 6 | ||||||
| 	"https://api.ohmygpt.com",       // 7 | 	"https://api.ohmygpt.com",       // 7 | ||||||
| @@ -164,4 +172,8 @@ var ChannelBaseURLs = []string{ | |||||||
| 	"https://api.aiproxy.io",        // 10 | 	"https://api.aiproxy.io",        // 10 | ||||||
| 	"",                              // 11 | 	"",                              // 11 | ||||||
| 	"https://api.api2gpt.com",       // 12 | 	"https://api.api2gpt.com",       // 12 | ||||||
|  | 	"https://api.aigc2d.com",        // 13 | ||||||
|  | 	"https://api.anthropic.com",     // 14 | ||||||
|  | 	"https://aip.baidubce.com",      // 15 | ||||||
|  | 	"https://open.bigmodel.cn",      // 16 | ||||||
| } | } | ||||||
|   | |||||||
| @@ -4,9 +4,11 @@ import "encoding/json" | |||||||
|  |  | ||||||
| // ModelRatio | // ModelRatio | ||||||
| // https://platform.openai.com/docs/models/model-endpoint-compatibility | // https://platform.openai.com/docs/models/model-endpoint-compatibility | ||||||
|  | // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Blfmc9dlf | ||||||
| // https://openai.com/pricing | // https://openai.com/pricing | ||||||
| // TODO: when a new api is enabled, check the pricing here | // TODO: when a new api is enabled, check the pricing here | ||||||
| // 1 === $0.002 / 1K tokens | // 1 === $0.002 / 1K tokens | ||||||
|  | // 1 === ¥0.014 / 1k tokens | ||||||
| var ModelRatio = map[string]float64{ | var ModelRatio = map[string]float64{ | ||||||
| 	"gpt-4":                   15, | 	"gpt-4":                   15, | ||||||
| 	"gpt-4-0314":              15, | 	"gpt-4-0314":              15, | ||||||
| @@ -31,10 +33,19 @@ var ModelRatio = map[string]float64{ | |||||||
| 	"curie":                   10, | 	"curie":                   10, | ||||||
| 	"babbage":                 10, | 	"babbage":                 10, | ||||||
| 	"ada":                     10, | 	"ada":                     10, | ||||||
| 	"text-embedding-ada-002":  0.2, | 	"text-embedding-ada-002":  0.05, | ||||||
| 	"text-search-ada-doc-001": 10, | 	"text-search-ada-doc-001": 10, | ||||||
| 	"text-moderation-stable":  0.1, | 	"text-moderation-stable":  0.1, | ||||||
| 	"text-moderation-latest":  0.1, | 	"text-moderation-latest":  0.1, | ||||||
|  | 	"dall-e":                  8, | ||||||
|  | 	"claude-instant-1":        0.75, | ||||||
|  | 	"claude-2":                30, | ||||||
|  | 	"ERNIE-Bot":               0.8572, // ¥0.012 / 1k tokens | ||||||
|  | 	"ERNIE-Bot-turbo":         0.5715, // ¥0.008 / 1k tokens | ||||||
|  | 	"PaLM-2":                  1, | ||||||
|  | 	"chatglm_pro":             0.7143, // ¥0.01 / 1k tokens | ||||||
|  | 	"chatglm_std":             0.3572, // ¥0.005 / 1k tokens | ||||||
|  | 	"chatglm_lite":            0.1429, // ¥0.002 / 1k tokens | ||||||
| } | } | ||||||
|  |  | ||||||
| func ModelRatio2JSONString() string { | func ModelRatio2JSONString() string { | ||||||
|   | |||||||
| @@ -7,16 +7,24 @@ import ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| func GetSubscription(c *gin.Context) { | func GetSubscription(c *gin.Context) { | ||||||
| 	var quota int | 	var remainQuota int | ||||||
|  | 	var usedQuota int | ||||||
| 	var err error | 	var err error | ||||||
| 	var token *model.Token | 	var token *model.Token | ||||||
|  | 	var expiredTime int64 | ||||||
| 	if common.DisplayTokenStatEnabled { | 	if common.DisplayTokenStatEnabled { | ||||||
| 		tokenId := c.GetInt("token_id") | 		tokenId := c.GetInt("token_id") | ||||||
| 		token, err = model.GetTokenById(tokenId) | 		token, err = model.GetTokenById(tokenId) | ||||||
| 		quota = token.RemainQuota | 		expiredTime = token.ExpiredTime | ||||||
|  | 		remainQuota = token.RemainQuota | ||||||
|  | 		usedQuota = token.UsedQuota | ||||||
| 	} else { | 	} else { | ||||||
| 		userId := c.GetInt("id") | 		userId := c.GetInt("id") | ||||||
| 		quota, err = model.GetUserQuota(userId) | 		remainQuota, err = model.GetUserQuota(userId) | ||||||
|  | 		usedQuota, err = model.GetUserUsedQuota(userId) | ||||||
|  | 	} | ||||||
|  | 	if expiredTime <= 0 { | ||||||
|  | 		expiredTime = 0 | ||||||
| 	} | 	} | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		openAIError := OpenAIError{ | 		openAIError := OpenAIError{ | ||||||
| @@ -28,12 +36,13 @@ func GetSubscription(c *gin.Context) { | |||||||
| 		}) | 		}) | ||||||
| 		return | 		return | ||||||
| 	} | 	} | ||||||
|  | 	quota := remainQuota + usedQuota | ||||||
| 	amount := float64(quota) | 	amount := float64(quota) | ||||||
| 	if common.DisplayInCurrencyEnabled { | 	if common.DisplayInCurrencyEnabled { | ||||||
| 		amount /= common.QuotaPerUnit | 		amount /= common.QuotaPerUnit | ||||||
| 	} | 	} | ||||||
| 	if token != nil && token.UnlimitedQuota { | 	if token != nil && token.UnlimitedQuota { | ||||||
| 		amount = 99999999.9999 | 		amount = 100000000 | ||||||
| 	} | 	} | ||||||
| 	subscription := OpenAISubscriptionResponse{ | 	subscription := OpenAISubscriptionResponse{ | ||||||
| 		Object:             "billing_subscription", | 		Object:             "billing_subscription", | ||||||
| @@ -41,6 +50,7 @@ func GetSubscription(c *gin.Context) { | |||||||
| 		SoftLimitUSD:       amount, | 		SoftLimitUSD:       amount, | ||||||
| 		HardLimitUSD:       amount, | 		HardLimitUSD:       amount, | ||||||
| 		SystemHardLimitUSD: amount, | 		SystemHardLimitUSD: amount, | ||||||
|  | 		AccessUntil:        expiredTime, | ||||||
| 	} | 	} | ||||||
| 	c.JSON(200, subscription) | 	c.JSON(200, subscription) | ||||||
| 	return | 	return | ||||||
|   | |||||||
| @@ -22,6 +22,7 @@ type OpenAISubscriptionResponse struct { | |||||||
| 	SoftLimitUSD       float64 `json:"soft_limit_usd"` | 	SoftLimitUSD       float64 `json:"soft_limit_usd"` | ||||||
| 	HardLimitUSD       float64 `json:"hard_limit_usd"` | 	HardLimitUSD       float64 `json:"hard_limit_usd"` | ||||||
| 	SystemHardLimitUSD float64 `json:"system_hard_limit_usd"` | 	SystemHardLimitUSD float64 `json:"system_hard_limit_usd"` | ||||||
|  | 	AccessUntil        int64   `json:"access_until"` | ||||||
| } | } | ||||||
|  |  | ||||||
| type OpenAIUsageDailyCost struct { | type OpenAIUsageDailyCost struct { | ||||||
| @@ -32,6 +33,13 @@ type OpenAIUsageDailyCost struct { | |||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type OpenAICreditGrants struct { | ||||||
|  | 	Object         string  `json:"object"` | ||||||
|  | 	TotalGranted   float64 `json:"total_granted"` | ||||||
|  | 	TotalUsed      float64 `json:"total_used"` | ||||||
|  | 	TotalAvailable float64 `json:"total_available"` | ||||||
|  | } | ||||||
|  |  | ||||||
| type OpenAIUsageResponse struct { | type OpenAIUsageResponse struct { | ||||||
| 	Object string `json:"object"` | 	Object string `json:"object"` | ||||||
| 	//DailyCosts []OpenAIUsageDailyCost `json:"daily_costs"` | 	//DailyCosts []OpenAIUsageDailyCost `json:"daily_costs"` | ||||||
| @@ -61,6 +69,14 @@ type API2GPTUsageResponse struct { | |||||||
| 	TotalRemaining float64 `json:"total_remaining"` | 	TotalRemaining float64 `json:"total_remaining"` | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type APGC2DGPTUsageResponse struct { | ||||||
|  | 	//Grants         interface{} `json:"grants"` | ||||||
|  | 	Object         string  `json:"object"` | ||||||
|  | 	TotalAvailable float64 `json:"total_available"` | ||||||
|  | 	TotalGranted   float64 `json:"total_granted"` | ||||||
|  | 	TotalUsed      float64 `json:"total_used"` | ||||||
|  | } | ||||||
|  |  | ||||||
| // GetAuthHeader get auth header | // GetAuthHeader get auth header | ||||||
| func GetAuthHeader(token string) http.Header { | func GetAuthHeader(token string) http.Header { | ||||||
| 	h := http.Header{} | 	h := http.Header{} | ||||||
| @@ -69,7 +85,6 @@ func GetAuthHeader(token string) http.Header { | |||||||
| } | } | ||||||
|  |  | ||||||
| func GetResponseBody(method, url string, channel *model.Channel, headers http.Header) ([]byte, error) { | func GetResponseBody(method, url string, channel *model.Channel, headers http.Header) ([]byte, error) { | ||||||
| 	client := &http.Client{} |  | ||||||
| 	req, err := http.NewRequest(method, url, nil) | 	req, err := http.NewRequest(method, url, nil) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, err | ||||||
| @@ -77,10 +92,13 @@ func GetResponseBody(method, url string, channel *model.Channel, headers http.He | |||||||
| 	for k := range headers { | 	for k := range headers { | ||||||
| 		req.Header.Add(k, headers.Get(k)) | 		req.Header.Add(k, headers.Get(k)) | ||||||
| 	} | 	} | ||||||
| 	res, err := client.Do(req) | 	res, err := httpClient.Do(req) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, err | ||||||
| 	} | 	} | ||||||
|  | 	if res.StatusCode != http.StatusOK { | ||||||
|  | 		return nil, fmt.Errorf("status code: %d", res.StatusCode) | ||||||
|  | 	} | ||||||
| 	body, err := io.ReadAll(res.Body) | 	body, err := io.ReadAll(res.Body) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return nil, err | 		return nil, err | ||||||
| @@ -92,6 +110,22 @@ func GetResponseBody(method, url string, channel *model.Channel, headers http.He | |||||||
| 	return body, nil | 	return body, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func updateChannelCloseAIBalance(channel *model.Channel) (float64, error) { | ||||||
|  | 	url := fmt.Sprintf("%s/dashboard/billing/credit_grants", channel.BaseURL) | ||||||
|  | 	body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) | ||||||
|  |  | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, err | ||||||
|  | 	} | ||||||
|  | 	response := OpenAICreditGrants{} | ||||||
|  | 	err = json.Unmarshal(body, &response) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, err | ||||||
|  | 	} | ||||||
|  | 	channel.UpdateBalance(response.TotalAvailable) | ||||||
|  | 	return response.TotalAvailable, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func updateChannelOpenAISBBalance(channel *model.Channel) (float64, error) { | func updateChannelOpenAISBBalance(channel *model.Channel) (float64, error) { | ||||||
| 	url := fmt.Sprintf("https://api.openai-sb.com/sb-api/user/status?api_key=%s", channel.Key) | 	url := fmt.Sprintf("https://api.openai-sb.com/sb-api/user/status?api_key=%s", channel.Key) | ||||||
| 	body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) | 	body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) | ||||||
| @@ -150,8 +184,26 @@ func updateChannelAPI2GPTBalance(channel *model.Channel) (float64, error) { | |||||||
| 	return response.TotalRemaining, nil | 	return response.TotalRemaining, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func updateChannelAIGC2DBalance(channel *model.Channel) (float64, error) { | ||||||
|  | 	url := "https://api.aigc2d.com/dashboard/billing/credit_grants" | ||||||
|  | 	body, err := GetResponseBody("GET", url, channel, GetAuthHeader(channel.Key)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, err | ||||||
|  | 	} | ||||||
|  | 	response := APGC2DGPTUsageResponse{} | ||||||
|  | 	err = json.Unmarshal(body, &response) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return 0, err | ||||||
|  | 	} | ||||||
|  | 	channel.UpdateBalance(response.TotalAvailable) | ||||||
|  | 	return response.TotalAvailable, nil | ||||||
|  | } | ||||||
|  |  | ||||||
| func updateChannelBalance(channel *model.Channel) (float64, error) { | func updateChannelBalance(channel *model.Channel) (float64, error) { | ||||||
| 	baseURL := common.ChannelBaseURLs[channel.Type] | 	baseURL := common.ChannelBaseURLs[channel.Type] | ||||||
|  | 	if channel.BaseURL == "" { | ||||||
|  | 		channel.BaseURL = baseURL | ||||||
|  | 	} | ||||||
| 	switch channel.Type { | 	switch channel.Type { | ||||||
| 	case common.ChannelTypeOpenAI: | 	case common.ChannelTypeOpenAI: | ||||||
| 		if channel.BaseURL != "" { | 		if channel.BaseURL != "" { | ||||||
| @@ -161,12 +213,16 @@ func updateChannelBalance(channel *model.Channel) (float64, error) { | |||||||
| 		return 0, errors.New("尚未实现") | 		return 0, errors.New("尚未实现") | ||||||
| 	case common.ChannelTypeCustom: | 	case common.ChannelTypeCustom: | ||||||
| 		baseURL = channel.BaseURL | 		baseURL = channel.BaseURL | ||||||
|  | 	case common.ChannelTypeCloseAI: | ||||||
|  | 		return updateChannelCloseAIBalance(channel) | ||||||
| 	case common.ChannelTypeOpenAISB: | 	case common.ChannelTypeOpenAISB: | ||||||
| 		return updateChannelOpenAISBBalance(channel) | 		return updateChannelOpenAISBBalance(channel) | ||||||
| 	case common.ChannelTypeAIProxy: | 	case common.ChannelTypeAIProxy: | ||||||
| 		return updateChannelAIProxyBalance(channel) | 		return updateChannelAIProxyBalance(channel) | ||||||
| 	case common.ChannelTypeAPI2GPT: | 	case common.ChannelTypeAPI2GPT: | ||||||
| 		return updateChannelAPI2GPTBalance(channel) | 		return updateChannelAPI2GPTBalance(channel) | ||||||
|  | 	case common.ChannelTypeAIGC2D: | ||||||
|  | 		return updateChannelAIGC2DBalance(channel) | ||||||
| 	default: | 	default: | ||||||
| 		return 0, errors.New("尚未实现") | 		return 0, errors.New("尚未实现") | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -14,8 +14,16 @@ import ( | |||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func testChannel(channel *model.Channel, request ChatRequest) error { | func testChannel(channel *model.Channel, request ChatRequest) (error, *OpenAIError) { | ||||||
| 	switch channel.Type { | 	switch channel.Type { | ||||||
|  | 	case common.ChannelTypePaLM: | ||||||
|  | 		fallthrough | ||||||
|  | 	case common.ChannelTypeAnthropic: | ||||||
|  | 		fallthrough | ||||||
|  | 	case common.ChannelTypeBaidu: | ||||||
|  | 		fallthrough | ||||||
|  | 	case common.ChannelTypeZhipu: | ||||||
|  | 		return errors.New("该渠道类型当前版本不支持测试,请手动测试"), nil | ||||||
| 	case common.ChannelTypeAzure: | 	case common.ChannelTypeAzure: | ||||||
| 		request.Model = "gpt-35-turbo" | 		request.Model = "gpt-35-turbo" | ||||||
| 	default: | 	default: | ||||||
| @@ -33,11 +41,11 @@ func testChannel(channel *model.Channel, request ChatRequest) error { | |||||||
|  |  | ||||||
| 	jsonData, err := json.Marshal(request) | 	jsonData, err := json.Marshal(request) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err, nil | ||||||
| 	} | 	} | ||||||
| 	req, err := http.NewRequest("POST", requestURL, bytes.NewBuffer(jsonData)) | 	req, err := http.NewRequest("POST", requestURL, bytes.NewBuffer(jsonData)) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err, nil | ||||||
| 	} | 	} | ||||||
| 	if channel.Type == common.ChannelTypeAzure { | 	if channel.Type == common.ChannelTypeAzure { | ||||||
| 		req.Header.Set("api-key", channel.Key) | 		req.Header.Set("api-key", channel.Key) | ||||||
| @@ -45,21 +53,20 @@ func testChannel(channel *model.Channel, request ChatRequest) error { | |||||||
| 		req.Header.Set("Authorization", "Bearer "+channel.Key) | 		req.Header.Set("Authorization", "Bearer "+channel.Key) | ||||||
| 	} | 	} | ||||||
| 	req.Header.Set("Content-Type", "application/json") | 	req.Header.Set("Content-Type", "application/json") | ||||||
| 	client := &http.Client{} | 	resp, err := httpClient.Do(req) | ||||||
| 	resp, err := client.Do(req) |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err, nil | ||||||
| 	} | 	} | ||||||
| 	defer resp.Body.Close() | 	defer resp.Body.Close() | ||||||
| 	var response TextResponse | 	var response TextResponse | ||||||
| 	err = json.NewDecoder(resp.Body).Decode(&response) | 	err = json.NewDecoder(resp.Body).Decode(&response) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return err | 		return err, nil | ||||||
| 	} | 	} | ||||||
| 	if response.Usage.CompletionTokens == 0 { | 	if response.Usage.CompletionTokens == 0 { | ||||||
| 		return errors.New(fmt.Sprintf("type %s, code %v, message %s", response.Error.Type, response.Error.Code, response.Error.Message)) | 		return errors.New(fmt.Sprintf("type %s, code %v, message %s", response.Error.Type, response.Error.Code, response.Error.Message)), &response.Error | ||||||
| 	} | 	} | ||||||
| 	return nil | 	return nil, nil | ||||||
| } | } | ||||||
|  |  | ||||||
| func buildTestRequest() *ChatRequest { | func buildTestRequest() *ChatRequest { | ||||||
| @@ -94,7 +101,7 @@ func TestChannel(c *gin.Context) { | |||||||
| 	} | 	} | ||||||
| 	testRequest := buildTestRequest() | 	testRequest := buildTestRequest() | ||||||
| 	tik := time.Now() | 	tik := time.Now() | ||||||
| 	err = testChannel(channel, *testRequest) | 	err, _ = testChannel(channel, *testRequest) | ||||||
| 	tok := time.Now() | 	tok := time.Now() | ||||||
| 	milliseconds := tok.Sub(tik).Milliseconds() | 	milliseconds := tok.Sub(tik).Milliseconds() | ||||||
| 	go channel.UpdateResponseTime(milliseconds) | 	go channel.UpdateResponseTime(milliseconds) | ||||||
| @@ -158,13 +165,14 @@ func testAllChannels(notify bool) error { | |||||||
| 				continue | 				continue | ||||||
| 			} | 			} | ||||||
| 			tik := time.Now() | 			tik := time.Now() | ||||||
| 			err := testChannel(channel, *testRequest) | 			err, openaiErr := testChannel(channel, *testRequest) | ||||||
| 			tok := time.Now() | 			tok := time.Now() | ||||||
| 			milliseconds := tok.Sub(tik).Milliseconds() | 			milliseconds := tok.Sub(tik).Milliseconds() | ||||||
| 			if err != nil || milliseconds > disableThreshold { |  | ||||||
| 			if milliseconds > disableThreshold { | 			if milliseconds > disableThreshold { | ||||||
| 				err = errors.New(fmt.Sprintf("响应时间 %.2fs 超过阈值 %.2fs", float64(milliseconds)/1000.0, float64(disableThreshold)/1000.0)) | 				err = errors.New(fmt.Sprintf("响应时间 %.2fs 超过阈值 %.2fs", float64(milliseconds)/1000.0, float64(disableThreshold)/1000.0)) | ||||||
|  | 				disableChannel(channel.Id, channel.Name, err.Error()) | ||||||
| 			} | 			} | ||||||
|  | 			if shouldDisableChannel(openaiErr) { | ||||||
| 				disableChannel(channel.Id, channel.Name, err.Error()) | 				disableChannel(channel.Id, channel.Name, err.Error()) | ||||||
| 			} | 			} | ||||||
| 			channel.UpdateResponseTime(milliseconds) | 			channel.UpdateResponseTime(milliseconds) | ||||||
|   | |||||||
| @@ -128,7 +128,8 @@ func SendPasswordResetEmail(c *gin.Context) { | |||||||
| 	subject := fmt.Sprintf("%s密码重置", common.SystemName) | 	subject := fmt.Sprintf("%s密码重置", common.SystemName) | ||||||
| 	content := fmt.Sprintf("<p>您好,你正在进行%s密码重置。</p>"+ | 	content := fmt.Sprintf("<p>您好,你正在进行%s密码重置。</p>"+ | ||||||
| 		"<p>点击 <a href='%s'>此处</a> 进行密码重置。</p>"+ | 		"<p>点击 <a href='%s'>此处</a> 进行密码重置。</p>"+ | ||||||
| 		"<p>重置链接 %d 分钟内有效,如果不是本人操作,请忽略。</p>", common.SystemName, link, common.VerificationValidMinutes) | 		"<p>如果链接无法点击,请尝试点击下面的链接或将其复制到浏览器中打开:<br> %s </p>"+ | ||||||
|  | 		"<p>重置链接 %d 分钟内有效,如果不是本人操作,请忽略。</p>", common.SystemName, link, link, common.VerificationValidMinutes) | ||||||
| 	err := common.SendEmail(subject, email, content) | 	err := common.SendEmail(subject, email, content) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		c.JSON(http.StatusOK, gin.H{ | 		c.JSON(http.StatusOK, gin.H{ | ||||||
|   | |||||||
| @@ -2,6 +2,7 @@ package controller | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  |  | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -53,6 +54,15 @@ func init() { | |||||||
| 	}) | 	}) | ||||||
| 	// https://platform.openai.com/docs/models/model-endpoint-compatibility | 	// https://platform.openai.com/docs/models/model-endpoint-compatibility | ||||||
| 	openAIModels = []OpenAIModels{ | 	openAIModels = []OpenAIModels{ | ||||||
|  | 		{ | ||||||
|  | 			Id:         "dall-e", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "openai", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "dall-e", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
| 		{ | 		{ | ||||||
| 			Id:         "gpt-3.5-turbo", | 			Id:         "gpt-3.5-turbo", | ||||||
| 			Object:     "model", | 			Object:     "model", | ||||||
| @@ -224,6 +234,96 @@ func init() { | |||||||
| 			Root:       "text-moderation-stable", | 			Root:       "text-moderation-stable", | ||||||
| 			Parent:     nil, | 			Parent:     nil, | ||||||
| 		}, | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "text-davinci-edit-001", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "openai", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "text-davinci-edit-001", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "code-davinci-edit-001", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "openai", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "code-davinci-edit-001", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "claude-instant-1", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "anturopic", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "claude-instant-1", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "claude-2", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "anturopic", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "claude-2", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "ERNIE-Bot", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "baidu", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "ERNIE-Bot", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "ERNIE-Bot-turbo", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "baidu", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "ERNIE-Bot-turbo", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "PaLM-2", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "google", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "PaLM-2", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "chatglm_pro", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "zhipu", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "chatglm_pro", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "chatglm_std", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "zhipu", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "chatglm_std", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
|  | 		{ | ||||||
|  | 			Id:         "chatglm_lite", | ||||||
|  | 			Object:     "model", | ||||||
|  | 			Created:    1677649963, | ||||||
|  | 			OwnedBy:    "zhipu", | ||||||
|  | 			Permission: permission, | ||||||
|  | 			Root:       "chatglm_lite", | ||||||
|  | 			Parent:     nil, | ||||||
|  | 		}, | ||||||
| 	} | 	} | ||||||
| 	openAIModelsMap = make(map[string]OpenAIModels) | 	openAIModelsMap = make(map[string]OpenAIModels) | ||||||
| 	for _, model := range openAIModels { | 	for _, model := range openAIModels { | ||||||
|   | |||||||
							
								
								
									
										214
									
								
								controller/relay-baidu.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										214
									
								
								controller/relay-baidu.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,214 @@ | |||||||
|  | package controller | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bufio" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"one-api/common" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2 | ||||||
|  |  | ||||||
|  | type BaiduTokenResponse struct { | ||||||
|  | 	RefreshToken  string `json:"refresh_token"` | ||||||
|  | 	ExpiresIn     int    `json:"expires_in"` | ||||||
|  | 	SessionKey    string `json:"session_key"` | ||||||
|  | 	AccessToken   string `json:"access_token"` | ||||||
|  | 	Scope         string `json:"scope"` | ||||||
|  | 	SessionSecret string `json:"session_secret"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type BaiduMessage struct { | ||||||
|  | 	Role    string `json:"role"` | ||||||
|  | 	Content string `json:"content"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type BaiduChatRequest struct { | ||||||
|  | 	Messages []BaiduMessage `json:"messages"` | ||||||
|  | 	Stream   bool           `json:"stream"` | ||||||
|  | 	UserId   string         `json:"user_id,omitempty"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type BaiduError struct { | ||||||
|  | 	ErrorCode int    `json:"error_code"` | ||||||
|  | 	ErrorMsg  string `json:"error_msg"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type BaiduChatResponse struct { | ||||||
|  | 	Id               string `json:"id"` | ||||||
|  | 	Object           string `json:"object"` | ||||||
|  | 	Created          int64  `json:"created"` | ||||||
|  | 	Result           string `json:"result"` | ||||||
|  | 	IsTruncated      bool   `json:"is_truncated"` | ||||||
|  | 	NeedClearHistory bool   `json:"need_clear_history"` | ||||||
|  | 	Usage            Usage  `json:"usage"` | ||||||
|  | 	BaiduError | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type BaiduChatStreamResponse struct { | ||||||
|  | 	BaiduChatResponse | ||||||
|  | 	SentenceId int  `json:"sentence_id"` | ||||||
|  | 	IsEnd      bool `json:"is_end"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func requestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduChatRequest { | ||||||
|  | 	messages := make([]BaiduMessage, 0, len(request.Messages)) | ||||||
|  | 	for _, message := range request.Messages { | ||||||
|  | 		if message.Role == "system" { | ||||||
|  | 			messages = append(messages, BaiduMessage{ | ||||||
|  | 				Role:    "user", | ||||||
|  | 				Content: message.Content, | ||||||
|  | 			}) | ||||||
|  | 			messages = append(messages, BaiduMessage{ | ||||||
|  | 				Role:    "assistant", | ||||||
|  | 				Content: "Okay", | ||||||
|  | 			}) | ||||||
|  | 		} else { | ||||||
|  | 			messages = append(messages, BaiduMessage{ | ||||||
|  | 				Role:    message.Role, | ||||||
|  | 				Content: message.Content, | ||||||
|  | 			}) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	return &BaiduChatRequest{ | ||||||
|  | 		Messages: messages, | ||||||
|  | 		Stream:   request.Stream, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func responseBaidu2OpenAI(response *BaiduChatResponse) *OpenAITextResponse { | ||||||
|  | 	choice := OpenAITextResponseChoice{ | ||||||
|  | 		Index: 0, | ||||||
|  | 		Message: Message{ | ||||||
|  | 			Role:    "assistant", | ||||||
|  | 			Content: response.Result, | ||||||
|  | 		}, | ||||||
|  | 		FinishReason: "stop", | ||||||
|  | 	} | ||||||
|  | 	fullTextResponse := OpenAITextResponse{ | ||||||
|  | 		Id:      response.Id, | ||||||
|  | 		Object:  "chat.completion", | ||||||
|  | 		Created: response.Created, | ||||||
|  | 		Choices: []OpenAITextResponseChoice{choice}, | ||||||
|  | 		Usage:   response.Usage, | ||||||
|  | 	} | ||||||
|  | 	return &fullTextResponse | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStreamResponse) *ChatCompletionsStreamResponse { | ||||||
|  | 	var choice ChatCompletionsStreamResponseChoice | ||||||
|  | 	choice.Delta.Content = baiduResponse.Result | ||||||
|  | 	choice.FinishReason = "stop" | ||||||
|  | 	response := ChatCompletionsStreamResponse{ | ||||||
|  | 		Id:      baiduResponse.Id, | ||||||
|  | 		Object:  "chat.completion.chunk", | ||||||
|  | 		Created: baiduResponse.Created, | ||||||
|  | 		Model:   "ernie-bot", | ||||||
|  | 		Choices: []ChatCompletionsStreamResponseChoice{choice}, | ||||||
|  | 	} | ||||||
|  | 	return &response | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func baiduStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) { | ||||||
|  | 	var usage Usage | ||||||
|  | 	scanner := bufio.NewScanner(resp.Body) | ||||||
|  | 	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { | ||||||
|  | 		if atEOF && len(data) == 0 { | ||||||
|  | 			return 0, nil, nil | ||||||
|  | 		} | ||||||
|  | 		if i := strings.Index(string(data), "\n"); i >= 0 { | ||||||
|  | 			return i + 1, data[0:i], nil | ||||||
|  | 		} | ||||||
|  | 		if atEOF { | ||||||
|  | 			return len(data), data, nil | ||||||
|  | 		} | ||||||
|  | 		return 0, nil, nil | ||||||
|  | 	}) | ||||||
|  | 	dataChan := make(chan string) | ||||||
|  | 	stopChan := make(chan bool) | ||||||
|  | 	go func() { | ||||||
|  | 		for scanner.Scan() { | ||||||
|  | 			data := scanner.Text() | ||||||
|  | 			if len(data) < 6 { // ignore blank line or wrong format | ||||||
|  | 				continue | ||||||
|  | 			} | ||||||
|  | 			data = data[6:] | ||||||
|  | 			dataChan <- data | ||||||
|  | 		} | ||||||
|  | 		stopChan <- true | ||||||
|  | 	}() | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "text/event-stream") | ||||||
|  | 	c.Writer.Header().Set("Cache-Control", "no-cache") | ||||||
|  | 	c.Writer.Header().Set("Connection", "keep-alive") | ||||||
|  | 	c.Writer.Header().Set("Transfer-Encoding", "chunked") | ||||||
|  | 	c.Writer.Header().Set("X-Accel-Buffering", "no") | ||||||
|  | 	c.Stream(func(w io.Writer) bool { | ||||||
|  | 		select { | ||||||
|  | 		case data := <-dataChan: | ||||||
|  | 			var baiduResponse BaiduChatStreamResponse | ||||||
|  | 			err := json.Unmarshal([]byte(data), &baiduResponse) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error unmarshalling stream response: " + err.Error()) | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 			usage.PromptTokens += baiduResponse.Usage.PromptTokens | ||||||
|  | 			usage.CompletionTokens += baiduResponse.Usage.CompletionTokens | ||||||
|  | 			usage.TotalTokens += baiduResponse.Usage.TotalTokens | ||||||
|  | 			response := streamResponseBaidu2OpenAI(&baiduResponse) | ||||||
|  | 			jsonResponse, err := json.Marshal(response) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error marshalling stream response: " + err.Error()) | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)}) | ||||||
|  | 			return true | ||||||
|  | 		case <-stopChan: | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"}) | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  | 	}) | ||||||
|  | 	err := resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	return nil, &usage | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func baiduHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) { | ||||||
|  | 	var baiduResponse BaiduChatResponse | ||||||
|  | 	responseBody, err := io.ReadAll(resp.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	err = resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	err = json.Unmarshal(responseBody, &baiduResponse) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	if baiduResponse.ErrorMsg != "" { | ||||||
|  | 		return &OpenAIErrorWithStatusCode{ | ||||||
|  | 			OpenAIError: OpenAIError{ | ||||||
|  | 				Message: baiduResponse.ErrorMsg, | ||||||
|  | 				Type:    "baidu_error", | ||||||
|  | 				Param:   "", | ||||||
|  | 				Code:    baiduResponse.ErrorCode, | ||||||
|  | 			}, | ||||||
|  | 			StatusCode: resp.StatusCode, | ||||||
|  | 		}, nil | ||||||
|  | 	} | ||||||
|  | 	fullTextResponse := responseBaidu2OpenAI(&baiduResponse) | ||||||
|  | 	jsonResponse, err := json.Marshal(fullTextResponse) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "application/json") | ||||||
|  | 	c.Writer.WriteHeader(resp.StatusCode) | ||||||
|  | 	_, err = c.Writer.Write(jsonResponse) | ||||||
|  | 	return nil, &fullTextResponse.Usage | ||||||
|  | } | ||||||
							
								
								
									
										221
									
								
								controller/relay-claude.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										221
									
								
								controller/relay-claude.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,221 @@ | |||||||
|  | package controller | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bufio" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"fmt" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"one-api/common" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | type ClaudeMetadata struct { | ||||||
|  | 	UserId string `json:"user_id"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ClaudeRequest struct { | ||||||
|  | 	Model             string   `json:"model"` | ||||||
|  | 	Prompt            string   `json:"prompt"` | ||||||
|  | 	MaxTokensToSample int      `json:"max_tokens_to_sample"` | ||||||
|  | 	StopSequences     []string `json:"stop_sequences,omitempty"` | ||||||
|  | 	Temperature       float64  `json:"temperature,omitempty"` | ||||||
|  | 	TopP              float64  `json:"top_p,omitempty"` | ||||||
|  | 	TopK              int      `json:"top_k,omitempty"` | ||||||
|  | 	//ClaudeMetadata    `json:"metadata,omitempty"` | ||||||
|  | 	Stream bool `json:"stream,omitempty"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ClaudeError struct { | ||||||
|  | 	Type    string `json:"type"` | ||||||
|  | 	Message string `json:"message"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ClaudeResponse struct { | ||||||
|  | 	Completion string      `json:"completion"` | ||||||
|  | 	StopReason string      `json:"stop_reason"` | ||||||
|  | 	Model      string      `json:"model"` | ||||||
|  | 	Error      ClaudeError `json:"error"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func stopReasonClaude2OpenAI(reason string) string { | ||||||
|  | 	switch reason { | ||||||
|  | 	case "stop_sequence": | ||||||
|  | 		return "stop" | ||||||
|  | 	case "max_tokens": | ||||||
|  | 		return "length" | ||||||
|  | 	default: | ||||||
|  | 		return reason | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func requestOpenAI2Claude(textRequest GeneralOpenAIRequest) *ClaudeRequest { | ||||||
|  | 	claudeRequest := ClaudeRequest{ | ||||||
|  | 		Model:             textRequest.Model, | ||||||
|  | 		Prompt:            "", | ||||||
|  | 		MaxTokensToSample: textRequest.MaxTokens, | ||||||
|  | 		StopSequences:     nil, | ||||||
|  | 		Temperature:       textRequest.Temperature, | ||||||
|  | 		TopP:              textRequest.TopP, | ||||||
|  | 		Stream:            textRequest.Stream, | ||||||
|  | 	} | ||||||
|  | 	if claudeRequest.MaxTokensToSample == 0 { | ||||||
|  | 		claudeRequest.MaxTokensToSample = 1000000 | ||||||
|  | 	} | ||||||
|  | 	prompt := "" | ||||||
|  | 	for _, message := range textRequest.Messages { | ||||||
|  | 		if message.Role == "user" { | ||||||
|  | 			prompt += fmt.Sprintf("\n\nHuman: %s", message.Content) | ||||||
|  | 		} else if message.Role == "assistant" { | ||||||
|  | 			prompt += fmt.Sprintf("\n\nAssistant: %s", message.Content) | ||||||
|  | 		} else if message.Role == "system" { | ||||||
|  | 			prompt += fmt.Sprintf("\n\nSystem: %s", message.Content) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	prompt += "\n\nAssistant:" | ||||||
|  | 	claudeRequest.Prompt = prompt | ||||||
|  | 	return &claudeRequest | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func streamResponseClaude2OpenAI(claudeResponse *ClaudeResponse) *ChatCompletionsStreamResponse { | ||||||
|  | 	var choice ChatCompletionsStreamResponseChoice | ||||||
|  | 	choice.Delta.Content = claudeResponse.Completion | ||||||
|  | 	choice.FinishReason = stopReasonClaude2OpenAI(claudeResponse.StopReason) | ||||||
|  | 	var response ChatCompletionsStreamResponse | ||||||
|  | 	response.Object = "chat.completion.chunk" | ||||||
|  | 	response.Model = claudeResponse.Model | ||||||
|  | 	response.Choices = []ChatCompletionsStreamResponseChoice{choice} | ||||||
|  | 	return &response | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func responseClaude2OpenAI(claudeResponse *ClaudeResponse) *OpenAITextResponse { | ||||||
|  | 	choice := OpenAITextResponseChoice{ | ||||||
|  | 		Index: 0, | ||||||
|  | 		Message: Message{ | ||||||
|  | 			Role:    "assistant", | ||||||
|  | 			Content: strings.TrimPrefix(claudeResponse.Completion, " "), | ||||||
|  | 			Name:    nil, | ||||||
|  | 		}, | ||||||
|  | 		FinishReason: stopReasonClaude2OpenAI(claudeResponse.StopReason), | ||||||
|  | 	} | ||||||
|  | 	fullTextResponse := OpenAITextResponse{ | ||||||
|  | 		Id:      fmt.Sprintf("chatcmpl-%s", common.GetUUID()), | ||||||
|  | 		Object:  "chat.completion", | ||||||
|  | 		Created: common.GetTimestamp(), | ||||||
|  | 		Choices: []OpenAITextResponseChoice{choice}, | ||||||
|  | 	} | ||||||
|  | 	return &fullTextResponse | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func claudeStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, string) { | ||||||
|  | 	responseText := "" | ||||||
|  | 	responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID()) | ||||||
|  | 	createdTime := common.GetTimestamp() | ||||||
|  | 	scanner := bufio.NewScanner(resp.Body) | ||||||
|  | 	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { | ||||||
|  | 		if atEOF && len(data) == 0 { | ||||||
|  | 			return 0, nil, nil | ||||||
|  | 		} | ||||||
|  | 		if i := strings.Index(string(data), "\r\n\r\n"); i >= 0 { | ||||||
|  | 			return i + 4, data[0:i], nil | ||||||
|  | 		} | ||||||
|  | 		if atEOF { | ||||||
|  | 			return len(data), data, nil | ||||||
|  | 		} | ||||||
|  | 		return 0, nil, nil | ||||||
|  | 	}) | ||||||
|  | 	dataChan := make(chan string) | ||||||
|  | 	stopChan := make(chan bool) | ||||||
|  | 	go func() { | ||||||
|  | 		for scanner.Scan() { | ||||||
|  | 			data := scanner.Text() | ||||||
|  | 			if !strings.HasPrefix(data, "event: completion") { | ||||||
|  | 				continue | ||||||
|  | 			} | ||||||
|  | 			data = strings.TrimPrefix(data, "event: completion\r\ndata: ") | ||||||
|  | 			dataChan <- data | ||||||
|  | 		} | ||||||
|  | 		stopChan <- true | ||||||
|  | 	}() | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "text/event-stream") | ||||||
|  | 	c.Writer.Header().Set("Cache-Control", "no-cache") | ||||||
|  | 	c.Writer.Header().Set("Connection", "keep-alive") | ||||||
|  | 	c.Writer.Header().Set("Transfer-Encoding", "chunked") | ||||||
|  | 	c.Writer.Header().Set("X-Accel-Buffering", "no") | ||||||
|  | 	c.Stream(func(w io.Writer) bool { | ||||||
|  | 		select { | ||||||
|  | 		case data := <-dataChan: | ||||||
|  | 			// some implementations may add \r at the end of data | ||||||
|  | 			data = strings.TrimSuffix(data, "\r") | ||||||
|  | 			var claudeResponse ClaudeResponse | ||||||
|  | 			err := json.Unmarshal([]byte(data), &claudeResponse) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error unmarshalling stream response: " + err.Error()) | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 			responseText += claudeResponse.Completion | ||||||
|  | 			response := streamResponseClaude2OpenAI(&claudeResponse) | ||||||
|  | 			response.Id = responseId | ||||||
|  | 			response.Created = createdTime | ||||||
|  | 			jsonStr, err := json.Marshal(response) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error marshalling stream response: " + err.Error()) | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)}) | ||||||
|  | 			return true | ||||||
|  | 		case <-stopChan: | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"}) | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  | 	}) | ||||||
|  | 	err := resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "" | ||||||
|  | 	} | ||||||
|  | 	return nil, responseText | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func claudeHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*OpenAIErrorWithStatusCode, *Usage) { | ||||||
|  | 	responseBody, err := io.ReadAll(resp.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	err = resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	var claudeResponse ClaudeResponse | ||||||
|  | 	err = json.Unmarshal(responseBody, &claudeResponse) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	if claudeResponse.Error.Type != "" { | ||||||
|  | 		return &OpenAIErrorWithStatusCode{ | ||||||
|  | 			OpenAIError: OpenAIError{ | ||||||
|  | 				Message: claudeResponse.Error.Message, | ||||||
|  | 				Type:    claudeResponse.Error.Type, | ||||||
|  | 				Param:   "", | ||||||
|  | 				Code:    claudeResponse.Error.Type, | ||||||
|  | 			}, | ||||||
|  | 			StatusCode: resp.StatusCode, | ||||||
|  | 		}, nil | ||||||
|  | 	} | ||||||
|  | 	fullTextResponse := responseClaude2OpenAI(&claudeResponse) | ||||||
|  | 	completionTokens := countTokenText(claudeResponse.Completion, model) | ||||||
|  | 	usage := Usage{ | ||||||
|  | 		PromptTokens:     promptTokens, | ||||||
|  | 		CompletionTokens: completionTokens, | ||||||
|  | 		TotalTokens:      promptTokens + completionTokens, | ||||||
|  | 	} | ||||||
|  | 	fullTextResponse.Usage = usage | ||||||
|  | 	jsonResponse, err := json.Marshal(fullTextResponse) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "application/json") | ||||||
|  | 	c.Writer.WriteHeader(resp.StatusCode) | ||||||
|  | 	_, err = c.Writer.Write(jsonResponse) | ||||||
|  | 	return nil, &usage | ||||||
|  | } | ||||||
| @@ -1,34 +1,180 @@ | |||||||
| package controller | package controller | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"github.com/gin-gonic/gin" | 	"bytes" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"errors" | ||||||
|  | 	"fmt" | ||||||
| 	"io" | 	"io" | ||||||
| 	"net/http" | 	"net/http" | ||||||
|  | 	"one-api/common" | ||||||
|  | 	"one-api/model" | ||||||
|  |  | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func relayImageHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | func relayImageHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | ||||||
| 	// TODO: this part is not finished | 	imageModel := "dall-e" | ||||||
| 	req, err := http.NewRequest(c.Request.Method, c.Request.RequestURI, c.Request.Body) |  | ||||||
| 	client := &http.Client{} | 	tokenId := c.GetInt("token_id") | ||||||
| 	resp, err := client.Do(req) | 	channelType := c.GetInt("channel") | ||||||
|  | 	userId := c.GetInt("id") | ||||||
|  | 	consumeQuota := c.GetBool("consume_quota") | ||||||
|  | 	group := c.GetString("group") | ||||||
|  |  | ||||||
|  | 	var imageRequest ImageRequest | ||||||
|  | 	if consumeQuota { | ||||||
|  | 		err := common.UnmarshalBodyReusable(c, &imageRequest) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 		return errorWrapper(err, "do_request_failed", http.StatusOK) | 			return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest) | ||||||
| 		} | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// Prompt validation | ||||||
|  | 	if imageRequest.Prompt == "" { | ||||||
|  | 		return errorWrapper(errors.New("prompt is required"), "required_field_missing", http.StatusBadRequest) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// Not "256x256", "512x512", or "1024x1024" | ||||||
|  | 	if imageRequest.Size != "" && imageRequest.Size != "256x256" && imageRequest.Size != "512x512" && imageRequest.Size != "1024x1024" { | ||||||
|  | 		return errorWrapper(errors.New("size must be one of 256x256, 512x512, or 1024x1024"), "invalid_field_value", http.StatusBadRequest) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// N should between 1 and 10 | ||||||
|  | 	if imageRequest.N != 0 && (imageRequest.N < 1 || imageRequest.N > 10) { | ||||||
|  | 		return errorWrapper(errors.New("n must be between 1 and 10"), "invalid_field_value", http.StatusBadRequest) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	// map model name | ||||||
|  | 	modelMapping := c.GetString("model_mapping") | ||||||
|  | 	isModelMapped := false | ||||||
|  | 	if modelMapping != "" { | ||||||
|  | 		modelMap := make(map[string]string) | ||||||
|  | 		err := json.Unmarshal([]byte(modelMapping), &modelMap) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "unmarshal_model_mapping_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		if modelMap[imageModel] != "" { | ||||||
|  | 			imageModel = modelMap[imageModel] | ||||||
|  | 			isModelMapped = true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	baseURL := common.ChannelBaseURLs[channelType] | ||||||
|  | 	requestURL := c.Request.URL.String() | ||||||
|  |  | ||||||
|  | 	if c.GetString("base_url") != "" { | ||||||
|  | 		baseURL = c.GetString("base_url") | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL) | ||||||
|  |  | ||||||
|  | 	var requestBody io.Reader | ||||||
|  | 	if isModelMapped { | ||||||
|  | 		jsonStr, err := json.Marshal(imageRequest) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		requestBody = bytes.NewBuffer(jsonStr) | ||||||
|  | 	} else { | ||||||
|  | 		requestBody = c.Request.Body | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	modelRatio := common.GetModelRatio(imageModel) | ||||||
|  | 	groupRatio := common.GetGroupRatio(group) | ||||||
|  | 	ratio := modelRatio * groupRatio | ||||||
|  | 	userQuota, err := model.CacheGetUserQuota(userId) | ||||||
|  |  | ||||||
|  | 	sizeRatio := 1.0 | ||||||
|  | 	// Size | ||||||
|  | 	if imageRequest.Size == "256x256" { | ||||||
|  | 		sizeRatio = 1 | ||||||
|  | 	} else if imageRequest.Size == "512x512" { | ||||||
|  | 		sizeRatio = 1.125 | ||||||
|  | 	} else if imageRequest.Size == "1024x1024" { | ||||||
|  | 		sizeRatio = 1.25 | ||||||
|  | 	} | ||||||
|  | 	quota := int(ratio*sizeRatio*1000) * imageRequest.N | ||||||
|  |  | ||||||
|  | 	if consumeQuota && userQuota-quota < 0 { | ||||||
|  | 		return errorWrapper(err, "insufficient_user_quota", http.StatusForbidden) | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	req, err := http.NewRequest(c.Request.Method, fullRequestURL, requestBody) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "new_request_failed", http.StatusInternalServerError) | ||||||
|  | 	} | ||||||
|  | 	req.Header.Set("Authorization", c.Request.Header.Get("Authorization")) | ||||||
|  |  | ||||||
|  | 	req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type")) | ||||||
|  | 	req.Header.Set("Accept", c.Request.Header.Get("Accept")) | ||||||
|  |  | ||||||
|  | 	resp, err := httpClient.Do(req) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "do_request_failed", http.StatusInternalServerError) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	err = req.Body.Close() | 	err = req.Body.Close() | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return errorWrapper(err, "close_request_body_failed", http.StatusOK) | 		return errorWrapper(err, "close_request_body_failed", http.StatusInternalServerError) | ||||||
| 	} | 	} | ||||||
|  | 	err = c.Request.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_request_body_failed", http.StatusInternalServerError) | ||||||
|  | 	} | ||||||
|  | 	var textResponse ImageResponse | ||||||
|  |  | ||||||
|  | 	defer func() { | ||||||
|  | 		if consumeQuota { | ||||||
|  | 			err := model.PostConsumeTokenQuota(tokenId, quota) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error consuming token remain quota: " + err.Error()) | ||||||
|  | 			} | ||||||
|  | 			err = model.CacheUpdateUserQuota(userId) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error update user quota cache: " + err.Error()) | ||||||
|  | 			} | ||||||
|  | 			if quota != 0 { | ||||||
|  | 				tokenName := c.GetString("token_name") | ||||||
|  | 				logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio) | ||||||
|  | 				model.RecordConsumeLog(userId, 0, 0, imageModel, tokenName, quota, logContent) | ||||||
|  | 				model.UpdateUserUsedQuotaAndRequestCount(userId, quota) | ||||||
|  | 				channelId := c.GetInt("channel_id") | ||||||
|  | 				model.UpdateChannelUsedQuota(channelId, quota) | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 	}() | ||||||
|  |  | ||||||
|  | 	if consumeQuota { | ||||||
|  | 		responseBody, err := io.ReadAll(resp.Body) | ||||||
|  |  | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		err = resp.Body.Close() | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		err = json.Unmarshal(responseBody, &textResponse) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  |  | ||||||
|  | 		resp.Body = io.NopCloser(bytes.NewBuffer(responseBody)) | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	for k, v := range resp.Header { | 	for k, v := range resp.Header { | ||||||
| 		c.Writer.Header().Set(k, v[0]) | 		c.Writer.Header().Set(k, v[0]) | ||||||
| 	} | 	} | ||||||
| 	c.Writer.WriteHeader(resp.StatusCode) | 	c.Writer.WriteHeader(resp.StatusCode) | ||||||
|  |  | ||||||
| 	_, err = io.Copy(c.Writer, resp.Body) | 	_, err = io.Copy(c.Writer, resp.Body) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return errorWrapper(err, "copy_response_body_failed", http.StatusOK) | 		return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError) | ||||||
| 	} | 	} | ||||||
| 	err = resp.Body.Close() | 	err = resp.Body.Close() | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return errorWrapper(err, "close_response_body_failed", http.StatusOK) | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) | ||||||
| 	} | 	} | ||||||
| 	return nil | 	return nil | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										133
									
								
								controller/relay-openai.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										133
									
								
								controller/relay-openai.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,133 @@ | |||||||
|  | package controller | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bufio" | ||||||
|  | 	"bytes" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"one-api/common" | ||||||
|  | 	"strings" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | func openaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*OpenAIErrorWithStatusCode, string) { | ||||||
|  | 	responseText := "" | ||||||
|  | 	scanner := bufio.NewScanner(resp.Body) | ||||||
|  | 	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { | ||||||
|  | 		if atEOF && len(data) == 0 { | ||||||
|  | 			return 0, nil, nil | ||||||
|  | 		} | ||||||
|  | 		if i := strings.Index(string(data), "\n"); i >= 0 { | ||||||
|  | 			return i + 1, data[0:i], nil | ||||||
|  | 		} | ||||||
|  | 		if atEOF { | ||||||
|  | 			return len(data), data, nil | ||||||
|  | 		} | ||||||
|  | 		return 0, nil, nil | ||||||
|  | 	}) | ||||||
|  | 	dataChan := make(chan string) | ||||||
|  | 	stopChan := make(chan bool) | ||||||
|  | 	go func() { | ||||||
|  | 		for scanner.Scan() { | ||||||
|  | 			data := scanner.Text() | ||||||
|  | 			if len(data) < 6 { // ignore blank line or wrong format | ||||||
|  | 				continue | ||||||
|  | 			} | ||||||
|  | 			dataChan <- data | ||||||
|  | 			data = data[6:] | ||||||
|  | 			if !strings.HasPrefix(data, "[DONE]") { | ||||||
|  | 				switch relayMode { | ||||||
|  | 				case RelayModeChatCompletions: | ||||||
|  | 					var streamResponse ChatCompletionsStreamResponse | ||||||
|  | 					err := json.Unmarshal([]byte(data), &streamResponse) | ||||||
|  | 					if err != nil { | ||||||
|  | 						common.SysError("error unmarshalling stream response: " + err.Error()) | ||||||
|  | 						return | ||||||
|  | 					} | ||||||
|  | 					for _, choice := range streamResponse.Choices { | ||||||
|  | 						responseText += choice.Delta.Content | ||||||
|  | 					} | ||||||
|  | 				case RelayModeCompletions: | ||||||
|  | 					var streamResponse CompletionsStreamResponse | ||||||
|  | 					err := json.Unmarshal([]byte(data), &streamResponse) | ||||||
|  | 					if err != nil { | ||||||
|  | 						common.SysError("error unmarshalling stream response: " + err.Error()) | ||||||
|  | 						return | ||||||
|  | 					} | ||||||
|  | 					for _, choice := range streamResponse.Choices { | ||||||
|  | 						responseText += choice.Text | ||||||
|  | 					} | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 		stopChan <- true | ||||||
|  | 	}() | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "text/event-stream") | ||||||
|  | 	c.Writer.Header().Set("Cache-Control", "no-cache") | ||||||
|  | 	c.Writer.Header().Set("Connection", "keep-alive") | ||||||
|  | 	c.Writer.Header().Set("Transfer-Encoding", "chunked") | ||||||
|  | 	c.Writer.Header().Set("X-Accel-Buffering", "no") | ||||||
|  | 	c.Stream(func(w io.Writer) bool { | ||||||
|  | 		select { | ||||||
|  | 		case data := <-dataChan: | ||||||
|  | 			if strings.HasPrefix(data, "data: [DONE]") { | ||||||
|  | 				data = data[:12] | ||||||
|  | 			} | ||||||
|  | 			// some implementations may add \r at the end of data | ||||||
|  | 			data = strings.TrimSuffix(data, "\r") | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: data}) | ||||||
|  | 			return true | ||||||
|  | 		case <-stopChan: | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  | 	}) | ||||||
|  | 	err := resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "" | ||||||
|  | 	} | ||||||
|  | 	return nil, responseText | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func openaiHandler(c *gin.Context, resp *http.Response, consumeQuota bool) (*OpenAIErrorWithStatusCode, *Usage) { | ||||||
|  | 	var textResponse TextResponse | ||||||
|  | 	if consumeQuota { | ||||||
|  | 		responseBody, err := io.ReadAll(resp.Body) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 		} | ||||||
|  | 		err = resp.Body.Close() | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 		} | ||||||
|  | 		err = json.Unmarshal(responseBody, &textResponse) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 		} | ||||||
|  | 		if textResponse.Error.Type != "" { | ||||||
|  | 			return &OpenAIErrorWithStatusCode{ | ||||||
|  | 				OpenAIError: textResponse.Error, | ||||||
|  | 				StatusCode:  resp.StatusCode, | ||||||
|  | 			}, nil | ||||||
|  | 		} | ||||||
|  | 		// Reset response body | ||||||
|  | 		resp.Body = io.NopCloser(bytes.NewBuffer(responseBody)) | ||||||
|  | 	} | ||||||
|  | 	// We shouldn't set the header before we parse the response body, because the parse part may fail. | ||||||
|  | 	// And then we will have to send an error response, but in this case, the header has already been set. | ||||||
|  | 	// So the httpClient will be confused by the response. | ||||||
|  | 	// For example, Postman will report error, and we cannot check the response at all. | ||||||
|  | 	for k, v := range resp.Header { | ||||||
|  | 		c.Writer.Header().Set(k, v[0]) | ||||||
|  | 	} | ||||||
|  | 	c.Writer.WriteHeader(resp.StatusCode) | ||||||
|  | 	_, err := io.Copy(c.Writer, resp.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	err = resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	return nil, &textResponse.Usage | ||||||
|  | } | ||||||
| @@ -1,10 +1,17 @@ | |||||||
| package controller | package controller | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
|  | 	"encoding/json" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"one-api/common" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#request-body | ||||||
|  | // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#response-body | ||||||
|  |  | ||||||
| type PaLMChatMessage struct { | type PaLMChatMessage struct { | ||||||
| 	Author  string `json:"author"` | 	Author  string `json:"author"` | ||||||
| 	Content string `json:"content"` | 	Content string `json:"content"` | ||||||
| @@ -15,45 +22,188 @@ type PaLMFilter struct { | |||||||
| 	Message string `json:"message"` | 	Message string `json:"message"` | ||||||
| } | } | ||||||
|  |  | ||||||
| // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#request-body | type PaLMPrompt struct { | ||||||
| type PaLMChatRequest struct { | 	Messages []PaLMChatMessage `json:"messages"` | ||||||
| 	Prompt         []Message `json:"prompt"` | } | ||||||
| 	Temperature    float64   `json:"temperature"` |  | ||||||
| 	CandidateCount int       `json:"candidateCount"` | type PaLMChatRequest struct { | ||||||
| 	TopP           float64   `json:"topP"` | 	Prompt         PaLMPrompt `json:"prompt"` | ||||||
| 	TopK           int       `json:"topK"` | 	Temperature    float64    `json:"temperature,omitempty"` | ||||||
|  | 	CandidateCount int        `json:"candidateCount,omitempty"` | ||||||
|  | 	TopP           float64    `json:"topP,omitempty"` | ||||||
|  | 	TopK           int        `json:"topK,omitempty"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type PaLMError struct { | ||||||
|  | 	Code    int    `json:"code"` | ||||||
|  | 	Message string `json:"message"` | ||||||
|  | 	Status  string `json:"status"` | ||||||
| } | } | ||||||
|  |  | ||||||
| // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#response-body |  | ||||||
| type PaLMChatResponse struct { | type PaLMChatResponse struct { | ||||||
| 	Candidates []Message    `json:"candidates"` | 	Candidates []PaLMChatMessage `json:"candidates"` | ||||||
| 	Messages   []Message         `json:"messages"` | 	Messages   []Message         `json:"messages"` | ||||||
| 	Filters    []PaLMFilter      `json:"filters"` | 	Filters    []PaLMFilter      `json:"filters"` | ||||||
|  | 	Error      PaLMError         `json:"error"` | ||||||
| } | } | ||||||
|  |  | ||||||
| func relayPaLM(openAIRequest GeneralOpenAIRequest, c *gin.Context) *OpenAIErrorWithStatusCode { | func requestOpenAI2PaLM(textRequest GeneralOpenAIRequest) *PaLMChatRequest { | ||||||
| 	// https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage | 	palmRequest := PaLMChatRequest{ | ||||||
| 	messages := make([]PaLMChatMessage, 0, len(openAIRequest.Messages)) | 		Prompt: PaLMPrompt{ | ||||||
| 	for _, message := range openAIRequest.Messages { | 			Messages: make([]PaLMChatMessage, 0, len(textRequest.Messages)), | ||||||
| 		var author string | 		}, | ||||||
| 		if message.Role == "user" { | 		Temperature:    textRequest.Temperature, | ||||||
| 			author = "0" | 		CandidateCount: textRequest.N, | ||||||
| 		} else { | 		TopP:           textRequest.TopP, | ||||||
| 			author = "1" | 		TopK:           textRequest.MaxTokens, | ||||||
| 	} | 	} | ||||||
| 		messages = append(messages, PaLMChatMessage{ | 	for _, message := range textRequest.Messages { | ||||||
| 			Author:  author, | 		palmMessage := PaLMChatMessage{ | ||||||
| 			Content: message.Content, | 			Content: message.Content, | ||||||
|  | 		} | ||||||
|  | 		if message.Role == "user" { | ||||||
|  | 			palmMessage.Author = "0" | ||||||
|  | 		} else { | ||||||
|  | 			palmMessage.Author = "1" | ||||||
|  | 		} | ||||||
|  | 		palmRequest.Prompt.Messages = append(palmRequest.Prompt.Messages, palmMessage) | ||||||
|  | 	} | ||||||
|  | 	return &palmRequest | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func responsePaLM2OpenAI(response *PaLMChatResponse) *OpenAITextResponse { | ||||||
|  | 	fullTextResponse := OpenAITextResponse{ | ||||||
|  | 		Choices: make([]OpenAITextResponseChoice, 0, len(response.Candidates)), | ||||||
|  | 	} | ||||||
|  | 	for i, candidate := range response.Candidates { | ||||||
|  | 		choice := OpenAITextResponseChoice{ | ||||||
|  | 			Index: i, | ||||||
|  | 			Message: Message{ | ||||||
|  | 				Role:    "assistant", | ||||||
|  | 				Content: candidate.Content, | ||||||
|  | 			}, | ||||||
|  | 			FinishReason: "stop", | ||||||
|  | 		} | ||||||
|  | 		fullTextResponse.Choices = append(fullTextResponse.Choices, choice) | ||||||
|  | 	} | ||||||
|  | 	return &fullTextResponse | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func streamResponsePaLM2OpenAI(palmResponse *PaLMChatResponse) *ChatCompletionsStreamResponse { | ||||||
|  | 	var choice ChatCompletionsStreamResponseChoice | ||||||
|  | 	if len(palmResponse.Candidates) > 0 { | ||||||
|  | 		choice.Delta.Content = palmResponse.Candidates[0].Content | ||||||
|  | 	} | ||||||
|  | 	choice.FinishReason = "stop" | ||||||
|  | 	var response ChatCompletionsStreamResponse | ||||||
|  | 	response.Object = "chat.completion.chunk" | ||||||
|  | 	response.Model = "palm2" | ||||||
|  | 	response.Choices = []ChatCompletionsStreamResponseChoice{choice} | ||||||
|  | 	return &response | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func palmStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, string) { | ||||||
|  | 	responseText := "" | ||||||
|  | 	responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID()) | ||||||
|  | 	createdTime := common.GetTimestamp() | ||||||
|  | 	dataChan := make(chan string) | ||||||
|  | 	stopChan := make(chan bool) | ||||||
|  | 	go func() { | ||||||
|  | 		responseBody, err := io.ReadAll(resp.Body) | ||||||
|  | 		if err != nil { | ||||||
|  | 			common.SysError("error reading stream response: " + err.Error()) | ||||||
|  | 			stopChan <- true | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 		err = resp.Body.Close() | ||||||
|  | 		if err != nil { | ||||||
|  | 			common.SysError("error closing stream response: " + err.Error()) | ||||||
|  | 			stopChan <- true | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 		var palmResponse PaLMChatResponse | ||||||
|  | 		err = json.Unmarshal(responseBody, &palmResponse) | ||||||
|  | 		if err != nil { | ||||||
|  | 			common.SysError("error unmarshalling stream response: " + err.Error()) | ||||||
|  | 			stopChan <- true | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 		fullTextResponse := streamResponsePaLM2OpenAI(&palmResponse) | ||||||
|  | 		fullTextResponse.Id = responseId | ||||||
|  | 		fullTextResponse.Created = createdTime | ||||||
|  | 		if len(palmResponse.Candidates) > 0 { | ||||||
|  | 			responseText = palmResponse.Candidates[0].Content | ||||||
|  | 		} | ||||||
|  | 		jsonResponse, err := json.Marshal(fullTextResponse) | ||||||
|  | 		if err != nil { | ||||||
|  | 			common.SysError("error marshalling stream response: " + err.Error()) | ||||||
|  | 			stopChan <- true | ||||||
|  | 			return | ||||||
|  | 		} | ||||||
|  | 		dataChan <- string(jsonResponse) | ||||||
|  | 		stopChan <- true | ||||||
|  | 	}() | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "text/event-stream") | ||||||
|  | 	c.Writer.Header().Set("Cache-Control", "no-cache") | ||||||
|  | 	c.Writer.Header().Set("Connection", "keep-alive") | ||||||
|  | 	c.Writer.Header().Set("Transfer-Encoding", "chunked") | ||||||
|  | 	c.Writer.Header().Set("X-Accel-Buffering", "no") | ||||||
|  | 	c.Stream(func(w io.Writer) bool { | ||||||
|  | 		select { | ||||||
|  | 		case data := <-dataChan: | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: " + data}) | ||||||
|  | 			return true | ||||||
|  | 		case <-stopChan: | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"}) | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
| 	}) | 	}) | ||||||
|  | 	err := resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), "" | ||||||
| 	} | 	} | ||||||
| 	request := PaLMChatRequest{ | 	return nil, responseText | ||||||
| 		Prompt:         nil, |  | ||||||
| 		Temperature:    openAIRequest.Temperature, |  | ||||||
| 		CandidateCount: openAIRequest.N, |  | ||||||
| 		TopP:           openAIRequest.TopP, |  | ||||||
| 		TopK:           openAIRequest.MaxTokens, |  | ||||||
| } | } | ||||||
| 	// TODO: forward request to PaLM & convert response |  | ||||||
| 	fmt.Print(request) | func palmHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*OpenAIErrorWithStatusCode, *Usage) { | ||||||
| 	return nil | 	responseBody, err := io.ReadAll(resp.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	err = resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	var palmResponse PaLMChatResponse | ||||||
|  | 	err = json.Unmarshal(responseBody, &palmResponse) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	if palmResponse.Error.Code != 0 || len(palmResponse.Candidates) == 0 { | ||||||
|  | 		return &OpenAIErrorWithStatusCode{ | ||||||
|  | 			OpenAIError: OpenAIError{ | ||||||
|  | 				Message: palmResponse.Error.Message, | ||||||
|  | 				Type:    palmResponse.Error.Status, | ||||||
|  | 				Param:   "", | ||||||
|  | 				Code:    palmResponse.Error.Code, | ||||||
|  | 			}, | ||||||
|  | 			StatusCode: resp.StatusCode, | ||||||
|  | 		}, nil | ||||||
|  | 	} | ||||||
|  | 	fullTextResponse := responsePaLM2OpenAI(&palmResponse) | ||||||
|  | 	completionTokens := countTokenText(palmResponse.Candidates[0].Content, model) | ||||||
|  | 	usage := Usage{ | ||||||
|  | 		PromptTokens:     promptTokens, | ||||||
|  | 		CompletionTokens: completionTokens, | ||||||
|  | 		TotalTokens:      promptTokens + completionTokens, | ||||||
|  | 	} | ||||||
|  | 	fullTextResponse.Usage = usage | ||||||
|  | 	jsonResponse, err := json.Marshal(fullTextResponse) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "application/json") | ||||||
|  | 	c.Writer.WriteHeader(resp.StatusCode) | ||||||
|  | 	_, err = c.Writer.Write(jsonResponse) | ||||||
|  | 	return nil, &usage | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,19 +1,33 @@ | |||||||
| package controller | package controller | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"bufio" |  | ||||||
| 	"bytes" | 	"bytes" | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" |  | ||||||
| 	"io" | 	"io" | ||||||
| 	"net/http" | 	"net/http" | ||||||
| 	"one-api/common" | 	"one-api/common" | ||||||
| 	"one-api/model" | 	"one-api/model" | ||||||
| 	"strings" | 	"strings" | ||||||
|  |  | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| ) | ) | ||||||
|  |  | ||||||
|  | const ( | ||||||
|  | 	APITypeOpenAI = iota | ||||||
|  | 	APITypeClaude | ||||||
|  | 	APITypePaLM | ||||||
|  | 	APITypeBaidu | ||||||
|  | 	APITypeZhipu | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | var httpClient *http.Client | ||||||
|  |  | ||||||
|  | func init() { | ||||||
|  | 	httpClient = &http.Client{} | ||||||
|  | } | ||||||
|  |  | ||||||
| func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | ||||||
| 	channelType := c.GetInt("channel") | 	channelType := c.GetInt("channel") | ||||||
| 	tokenId := c.GetInt("token_id") | 	tokenId := c.GetInt("token_id") | ||||||
| @@ -27,9 +41,12 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 			return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest) | 			return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	if relayMode == RelayModeModeration && textRequest.Model == "" { | 	if relayMode == RelayModeModerations && textRequest.Model == "" { | ||||||
| 		textRequest.Model = "text-moderation-latest" | 		textRequest.Model = "text-moderation-latest" | ||||||
| 	} | 	} | ||||||
|  | 	if relayMode == RelayModeEmbeddings && textRequest.Model == "" { | ||||||
|  | 		textRequest.Model = c.Param("model") | ||||||
|  | 	} | ||||||
| 	// request validation | 	// request validation | ||||||
| 	if textRequest.Model == "" { | 	if textRequest.Model == "" { | ||||||
| 		return errorWrapper(errors.New("model is required"), "required_field_missing", http.StatusBadRequest) | 		return errorWrapper(errors.New("model is required"), "required_field_missing", http.StatusBadRequest) | ||||||
| @@ -37,17 +54,46 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 	switch relayMode { | 	switch relayMode { | ||||||
| 	case RelayModeCompletions: | 	case RelayModeCompletions: | ||||||
| 		if textRequest.Prompt == "" { | 		if textRequest.Prompt == "" { | ||||||
| 			return errorWrapper(errors.New("prompt is required"), "required_field_missing", http.StatusBadRequest) | 			return errorWrapper(errors.New("field prompt is required"), "required_field_missing", http.StatusBadRequest) | ||||||
| 		} | 		} | ||||||
| 	case RelayModeChatCompletions: | 	case RelayModeChatCompletions: | ||||||
| 		if len(textRequest.Messages) == 0 { | 		if textRequest.Messages == nil || len(textRequest.Messages) == 0 { | ||||||
| 			return errorWrapper(errors.New("messages is required"), "required_field_missing", http.StatusBadRequest) | 			return errorWrapper(errors.New("field messages is required"), "required_field_missing", http.StatusBadRequest) | ||||||
| 		} | 		} | ||||||
| 	case RelayModeEmbeddings: | 	case RelayModeEmbeddings: | ||||||
| 	case RelayModeModeration: | 	case RelayModeModerations: | ||||||
| 		if textRequest.Input == "" { | 		if textRequest.Input == "" { | ||||||
| 			return errorWrapper(errors.New("input is required"), "required_field_missing", http.StatusBadRequest) | 			return errorWrapper(errors.New("field input is required"), "required_field_missing", http.StatusBadRequest) | ||||||
| 		} | 		} | ||||||
|  | 	case RelayModeEdits: | ||||||
|  | 		if textRequest.Instruction == "" { | ||||||
|  | 			return errorWrapper(errors.New("field instruction is required"), "required_field_missing", http.StatusBadRequest) | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	// map model name | ||||||
|  | 	modelMapping := c.GetString("model_mapping") | ||||||
|  | 	isModelMapped := false | ||||||
|  | 	if modelMapping != "" { | ||||||
|  | 		modelMap := make(map[string]string) | ||||||
|  | 		err := json.Unmarshal([]byte(modelMapping), &modelMap) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "unmarshal_model_mapping_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		if modelMap[textRequest.Model] != "" { | ||||||
|  | 			textRequest.Model = modelMap[textRequest.Model] | ||||||
|  | 			isModelMapped = true | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  | 	apiType := APITypeOpenAI | ||||||
|  | 	switch channelType { | ||||||
|  | 	case common.ChannelTypeAnthropic: | ||||||
|  | 		apiType = APITypeClaude | ||||||
|  | 	case common.ChannelTypeBaidu: | ||||||
|  | 		apiType = APITypeBaidu | ||||||
|  | 	case common.ChannelTypePaLM: | ||||||
|  | 		apiType = APITypePaLM | ||||||
|  | 	case common.ChannelTypeZhipu: | ||||||
|  | 		apiType = APITypeZhipu | ||||||
| 	} | 	} | ||||||
| 	baseURL := common.ChannelBaseURLs[channelType] | 	baseURL := common.ChannelBaseURLs[channelType] | ||||||
| 	requestURL := c.Request.URL.String() | 	requestURL := c.Request.URL.String() | ||||||
| @@ -55,6 +101,8 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 		baseURL = c.GetString("base_url") | 		baseURL = c.GetString("base_url") | ||||||
| 	} | 	} | ||||||
| 	fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL) | 	fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL) | ||||||
|  | 	switch apiType { | ||||||
|  | 	case APITypeOpenAI: | ||||||
| 		if channelType == common.ChannelTypeAzure { | 		if channelType == common.ChannelTypeAzure { | ||||||
| 			// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api | 			// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api | ||||||
| 			query := c.Request.URL.Query() | 			query := c.Request.URL.Query() | ||||||
| @@ -73,9 +121,38 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 			model_ = strings.TrimSuffix(model_, "-0314") | 			model_ = strings.TrimSuffix(model_, "-0314") | ||||||
| 			model_ = strings.TrimSuffix(model_, "-0613") | 			model_ = strings.TrimSuffix(model_, "-0613") | ||||||
| 			fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/%s", baseURL, model_, task) | 			fullRequestURL = fmt.Sprintf("%s/openai/deployments/%s/%s", baseURL, model_, task) | ||||||
| 	} else if channelType == common.ChannelTypePaLM { | 		} | ||||||
| 		err := relayPaLM(textRequest, c) | 	case APITypeClaude: | ||||||
| 		return err | 		fullRequestURL = "https://api.anthropic.com/v1/complete" | ||||||
|  | 		if baseURL != "" { | ||||||
|  | 			fullRequestURL = fmt.Sprintf("%s/v1/complete", baseURL) | ||||||
|  | 		} | ||||||
|  | 	case APITypeBaidu: | ||||||
|  | 		switch textRequest.Model { | ||||||
|  | 		case "ERNIE-Bot": | ||||||
|  | 			fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions" | ||||||
|  | 		case "ERNIE-Bot-turbo": | ||||||
|  | 			fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant" | ||||||
|  | 		case "BLOOMZ-7B": | ||||||
|  | 			fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1" | ||||||
|  | 		} | ||||||
|  | 		apiKey := c.Request.Header.Get("Authorization") | ||||||
|  | 		apiKey = strings.TrimPrefix(apiKey, "Bearer ") | ||||||
|  | 		fullRequestURL += "?access_token=" + apiKey // TODO: access token expire in 30 days | ||||||
|  | 	case APITypePaLM: | ||||||
|  | 		fullRequestURL = "https://generativelanguage.googleapis.com/v1beta2/models/chat-bison-001:generateMessage" | ||||||
|  | 		if baseURL != "" { | ||||||
|  | 			fullRequestURL = fmt.Sprintf("%s/v1beta2/models/chat-bison-001:generateMessage", baseURL) | ||||||
|  | 		} | ||||||
|  | 		apiKey := c.Request.Header.Get("Authorization") | ||||||
|  | 		apiKey = strings.TrimPrefix(apiKey, "Bearer ") | ||||||
|  | 		fullRequestURL += "?key=" + apiKey | ||||||
|  | 	case APITypeZhipu: | ||||||
|  | 		method := "invoke" | ||||||
|  | 		if textRequest.Stream { | ||||||
|  | 			method = "sse-invoke" | ||||||
|  | 		} | ||||||
|  | 		fullRequestURL = fmt.Sprintf("https://open.bigmodel.cn/api/paas/v3/model-api/%s/%s", textRequest.Model, method) | ||||||
| 	} | 	} | ||||||
| 	var promptTokens int | 	var promptTokens int | ||||||
| 	var completionTokens int | 	var completionTokens int | ||||||
| @@ -84,7 +161,7 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 		promptTokens = countTokenMessages(textRequest.Messages, textRequest.Model) | 		promptTokens = countTokenMessages(textRequest.Messages, textRequest.Model) | ||||||
| 	case RelayModeCompletions: | 	case RelayModeCompletions: | ||||||
| 		promptTokens = countTokenInput(textRequest.Prompt, textRequest.Model) | 		promptTokens = countTokenInput(textRequest.Prompt, textRequest.Model) | ||||||
| 	case RelayModeModeration: | 	case RelayModeModerations: | ||||||
| 		promptTokens = countTokenInput(textRequest.Input, textRequest.Model) | 		promptTokens = countTokenInput(textRequest.Input, textRequest.Model) | ||||||
| 	} | 	} | ||||||
| 	preConsumedTokens := common.PreConsumedQuota | 	preConsumedTokens := common.PreConsumedQuota | ||||||
| @@ -110,22 +187,74 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 			return errorWrapper(err, "pre_consume_token_quota_failed", http.StatusForbidden) | 			return errorWrapper(err, "pre_consume_token_quota_failed", http.StatusForbidden) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	req, err := http.NewRequest(c.Request.Method, fullRequestURL, c.Request.Body) | 	var requestBody io.Reader | ||||||
|  | 	if isModelMapped { | ||||||
|  | 		jsonStr, err := json.Marshal(textRequest) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		requestBody = bytes.NewBuffer(jsonStr) | ||||||
|  | 	} else { | ||||||
|  | 		requestBody = c.Request.Body | ||||||
|  | 	} | ||||||
|  | 	switch apiType { | ||||||
|  | 	case APITypeClaude: | ||||||
|  | 		claudeRequest := requestOpenAI2Claude(textRequest) | ||||||
|  | 		jsonStr, err := json.Marshal(claudeRequest) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		requestBody = bytes.NewBuffer(jsonStr) | ||||||
|  | 	case APITypeBaidu: | ||||||
|  | 		baiduRequest := requestOpenAI2Baidu(textRequest) | ||||||
|  | 		jsonStr, err := json.Marshal(baiduRequest) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		requestBody = bytes.NewBuffer(jsonStr) | ||||||
|  | 	case APITypePaLM: | ||||||
|  | 		palmRequest := requestOpenAI2PaLM(textRequest) | ||||||
|  | 		jsonStr, err := json.Marshal(palmRequest) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		requestBody = bytes.NewBuffer(jsonStr) | ||||||
|  | 	case APITypeZhipu: | ||||||
|  | 		zhipuRequest := requestOpenAI2Zhipu(textRequest) | ||||||
|  | 		jsonStr, err := json.Marshal(zhipuRequest) | ||||||
|  | 		if err != nil { | ||||||
|  | 			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError) | ||||||
|  | 		} | ||||||
|  | 		requestBody = bytes.NewBuffer(jsonStr) | ||||||
|  | 	} | ||||||
|  | 	req, err := http.NewRequest(c.Request.Method, fullRequestURL, requestBody) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return errorWrapper(err, "new_request_failed", http.StatusInternalServerError) | 		return errorWrapper(err, "new_request_failed", http.StatusInternalServerError) | ||||||
| 	} | 	} | ||||||
|  | 	apiKey := c.Request.Header.Get("Authorization") | ||||||
|  | 	apiKey = strings.TrimPrefix(apiKey, "Bearer ") | ||||||
|  | 	switch apiType { | ||||||
|  | 	case APITypeOpenAI: | ||||||
| 		if channelType == common.ChannelTypeAzure { | 		if channelType == common.ChannelTypeAzure { | ||||||
| 		key := c.Request.Header.Get("Authorization") | 			req.Header.Set("api-key", apiKey) | ||||||
| 		key = strings.TrimPrefix(key, "Bearer ") |  | ||||||
| 		req.Header.Set("api-key", key) |  | ||||||
| 		} else { | 		} else { | ||||||
| 			req.Header.Set("Authorization", c.Request.Header.Get("Authorization")) | 			req.Header.Set("Authorization", c.Request.Header.Get("Authorization")) | ||||||
| 		} | 		} | ||||||
|  | 	case APITypeClaude: | ||||||
|  | 		req.Header.Set("x-api-key", apiKey) | ||||||
|  | 		anthropicVersion := c.Request.Header.Get("anthropic-version") | ||||||
|  | 		if anthropicVersion == "" { | ||||||
|  | 			anthropicVersion = "2023-06-01" | ||||||
|  | 		} | ||||||
|  | 		req.Header.Set("anthropic-version", anthropicVersion) | ||||||
|  | 	case APITypeZhipu: | ||||||
|  | 		token := getZhipuToken(apiKey) | ||||||
|  | 		req.Header.Set("Authorization", token) | ||||||
|  | 	} | ||||||
| 	req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type")) | 	req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type")) | ||||||
| 	req.Header.Set("Accept", c.Request.Header.Get("Accept")) | 	req.Header.Set("Accept", c.Request.Header.Get("Accept")) | ||||||
| 	req.Header.Set("Connection", c.Request.Header.Get("Connection")) | 	//req.Header.Set("Connection", c.Request.Header.Get("Connection")) | ||||||
| 	client := &http.Client{} | 	resp, err := httpClient.Do(req) | ||||||
| 	resp, err := client.Do(req) |  | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		return errorWrapper(err, "do_request_failed", http.StatusInternalServerError) | 		return errorWrapper(err, "do_request_failed", http.StatusInternalServerError) | ||||||
| 	} | 	} | ||||||
| @@ -144,15 +273,22 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 	defer func() { | 	defer func() { | ||||||
| 		if consumeQuota { | 		if consumeQuota { | ||||||
| 			quota := 0 | 			quota := 0 | ||||||
| 			completionRatio := 1.333333 // default for gpt-3 | 			completionRatio := 1.0 | ||||||
|  | 			if strings.HasPrefix(textRequest.Model, "gpt-3.5") { | ||||||
|  | 				completionRatio = 1.333333 | ||||||
|  | 			} | ||||||
| 			if strings.HasPrefix(textRequest.Model, "gpt-4") { | 			if strings.HasPrefix(textRequest.Model, "gpt-4") { | ||||||
| 				completionRatio = 2 | 				completionRatio = 2 | ||||||
| 			} | 			} | ||||||
| 			if isStream { | 			if isStream && apiType != APITypeBaidu && apiType != APITypeZhipu { | ||||||
| 				completionTokens = countTokenText(streamResponseText, textRequest.Model) | 				completionTokens = countTokenText(streamResponseText, textRequest.Model) | ||||||
| 			} else { | 			} else { | ||||||
| 				promptTokens = textResponse.Usage.PromptTokens | 				promptTokens = textResponse.Usage.PromptTokens | ||||||
| 				completionTokens = textResponse.Usage.CompletionTokens | 				completionTokens = textResponse.Usage.CompletionTokens | ||||||
|  | 				if apiType == APITypeZhipu { | ||||||
|  | 					// zhipu's API does not return prompt tokens & completion tokens | ||||||
|  | 					promptTokens = textResponse.Usage.TotalTokens | ||||||
|  | 				} | ||||||
| 			} | 			} | ||||||
| 			quota = promptTokens + int(float64(completionTokens)*completionRatio) | 			quota = promptTokens + int(float64(completionTokens)*completionRatio) | ||||||
| 			quota = int(float64(quota) * ratio) | 			quota = int(float64(quota) * ratio) | ||||||
| @@ -170,6 +306,10 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				common.SysError("error consuming token remain quota: " + err.Error()) | 				common.SysError("error consuming token remain quota: " + err.Error()) | ||||||
| 			} | 			} | ||||||
|  | 			err = model.CacheUpdateUserQuota(userId) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error update user quota cache: " + err.Error()) | ||||||
|  | 			} | ||||||
| 			if quota != 0 { | 			if quota != 0 { | ||||||
| 				tokenName := c.GetString("token_name") | 				tokenName := c.GetString("token_name") | ||||||
| 				logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio) | 				logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio) | ||||||
| @@ -180,123 +320,102 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode { | |||||||
| 			} | 			} | ||||||
| 		} | 		} | ||||||
| 	}() | 	}() | ||||||
|  | 	switch apiType { | ||||||
|  | 	case APITypeOpenAI: | ||||||
| 		if isStream { | 		if isStream { | ||||||
| 		scanner := bufio.NewScanner(resp.Body) | 			err, responseText := openaiStreamHandler(c, resp, relayMode) | ||||||
| 		scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { |  | ||||||
| 			if atEOF && len(data) == 0 { |  | ||||||
| 				return 0, nil, nil |  | ||||||
| 			} |  | ||||||
|  |  | ||||||
| 			if i := strings.Index(string(data), "\n\n"); i >= 0 { |  | ||||||
| 				return i + 2, data[0:i], nil |  | ||||||
| 			} |  | ||||||
|  |  | ||||||
| 			if atEOF { |  | ||||||
| 				return len(data), data, nil |  | ||||||
| 			} |  | ||||||
|  |  | ||||||
| 			return 0, nil, nil |  | ||||||
| 		}) |  | ||||||
| 		dataChan := make(chan string) |  | ||||||
| 		stopChan := make(chan bool) |  | ||||||
| 		go func() { |  | ||||||
| 			for scanner.Scan() { |  | ||||||
| 				data := scanner.Text() |  | ||||||
| 				if len(data) < 6 { // must be something wrong! |  | ||||||
| 					common.SysError("invalid stream response: " + data) |  | ||||||
| 					continue |  | ||||||
| 				} |  | ||||||
| 				dataChan <- data |  | ||||||
| 				data = data[6:] |  | ||||||
| 				if !strings.HasPrefix(data, "[DONE]") { |  | ||||||
| 					switch relayMode { |  | ||||||
| 					case RelayModeChatCompletions: |  | ||||||
| 						var streamResponse ChatCompletionsStreamResponse |  | ||||||
| 						err = json.Unmarshal([]byte(data), &streamResponse) |  | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 							common.SysError("error unmarshalling stream response: " + err.Error()) | 				return err | ||||||
| 							return |  | ||||||
| 			} | 			} | ||||||
| 						for _, choice := range streamResponse.Choices { | 			streamResponseText = responseText | ||||||
| 							streamResponseText += choice.Delta.Content | 			return nil | ||||||
| 						} | 		} else { | ||||||
| 					case RelayModeCompletions: | 			err, usage := openaiHandler(c, resp, consumeQuota) | ||||||
| 						var streamResponse CompletionsStreamResponse |  | ||||||
| 						err = json.Unmarshal([]byte(data), &streamResponse) |  | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 							common.SysError("error unmarshalling stream response: " + err.Error()) | 				return err | ||||||
| 							return |  | ||||||
| 			} | 			} | ||||||
| 						for _, choice := range streamResponse.Choices { | 			if usage != nil { | ||||||
| 							streamResponseText += choice.Text | 				textResponse.Usage = *usage | ||||||
| 			} | 			} | ||||||
|  | 			return nil | ||||||
| 		} | 		} | ||||||
| 				} | 	case APITypeClaude: | ||||||
| 			} | 		if isStream { | ||||||
| 			stopChan <- true | 			err, responseText := claudeStreamHandler(c, resp) | ||||||
| 		}() |  | ||||||
| 		c.Writer.Header().Set("Content-Type", "text/event-stream") |  | ||||||
| 		c.Writer.Header().Set("Cache-Control", "no-cache") |  | ||||||
| 		c.Writer.Header().Set("Connection", "keep-alive") |  | ||||||
| 		c.Writer.Header().Set("Transfer-Encoding", "chunked") |  | ||||||
| 		c.Writer.Header().Set("X-Accel-Buffering", "no") |  | ||||||
| 		c.Stream(func(w io.Writer) bool { |  | ||||||
| 			select { |  | ||||||
| 			case data := <-dataChan: |  | ||||||
| 				if strings.HasPrefix(data, "data: [DONE]") { |  | ||||||
| 					data = data[:12] |  | ||||||
| 				} |  | ||||||
| 				c.Render(-1, common.CustomEvent{Data: data}) |  | ||||||
| 				return true |  | ||||||
| 			case <-stopChan: |  | ||||||
| 				return false |  | ||||||
| 			} |  | ||||||
| 		}) |  | ||||||
| 		err = resp.Body.Close() |  | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 			return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) | 				return err | ||||||
|  | 			} | ||||||
|  | 			streamResponseText = responseText | ||||||
|  | 			return nil | ||||||
|  | 		} else { | ||||||
|  | 			err, usage := claudeHandler(c, resp, promptTokens, textRequest.Model) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  | 			if usage != nil { | ||||||
|  | 				textResponse.Usage = *usage | ||||||
|  | 			} | ||||||
|  | 			return nil | ||||||
|  | 		} | ||||||
|  | 	case APITypeBaidu: | ||||||
|  | 		if isStream { | ||||||
|  | 			err, usage := baiduStreamHandler(c, resp) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  | 			if usage != nil { | ||||||
|  | 				textResponse.Usage = *usage | ||||||
| 			} | 			} | ||||||
| 			return nil | 			return nil | ||||||
| 		} else { | 		} else { | ||||||
| 		if consumeQuota { | 			err, usage := baiduHandler(c, resp) | ||||||
| 			responseBody, err := io.ReadAll(resp.Body) |  | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
| 				return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError) | 				return err | ||||||
| 			} | 			} | ||||||
| 			err = resp.Body.Close() | 			if usage != nil { | ||||||
| 			if err != nil { | 				textResponse.Usage = *usage | ||||||
| 				return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) |  | ||||||
| 			} |  | ||||||
| 			err = json.Unmarshal(responseBody, &textResponse) |  | ||||||
| 			if err != nil { |  | ||||||
| 				return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError) |  | ||||||
| 			} |  | ||||||
| 			if textResponse.Error.Type != "" { |  | ||||||
| 				return &OpenAIErrorWithStatusCode{ |  | ||||||
| 					OpenAIError: textResponse.Error, |  | ||||||
| 					StatusCode:  resp.StatusCode, |  | ||||||
| 				} |  | ||||||
| 			} |  | ||||||
| 			// Reset response body |  | ||||||
| 			resp.Body = io.NopCloser(bytes.NewBuffer(responseBody)) |  | ||||||
| 		} |  | ||||||
| 		// We shouldn't set the header before we parse the response body, because the parse part may fail. |  | ||||||
| 		// And then we will have to send an error response, but in this case, the header has already been set. |  | ||||||
| 		// So the client will be confused by the response. |  | ||||||
| 		// For example, Postman will report error, and we cannot check the response at all. |  | ||||||
| 		for k, v := range resp.Header { |  | ||||||
| 			c.Writer.Header().Set(k, v[0]) |  | ||||||
| 		} |  | ||||||
| 		c.Writer.WriteHeader(resp.StatusCode) |  | ||||||
| 		_, err = io.Copy(c.Writer, resp.Body) |  | ||||||
| 		if err != nil { |  | ||||||
| 			return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError) |  | ||||||
| 		} |  | ||||||
| 		err = resp.Body.Close() |  | ||||||
| 		if err != nil { |  | ||||||
| 			return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError) |  | ||||||
| 			} | 			} | ||||||
| 			return nil | 			return nil | ||||||
| 		} | 		} | ||||||
|  | 	case APITypePaLM: | ||||||
|  | 		if textRequest.Stream { // PaLM2 API does not support stream | ||||||
|  | 			err, responseText := palmStreamHandler(c, resp) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  | 			streamResponseText = responseText | ||||||
|  | 			return nil | ||||||
|  | 		} else { | ||||||
|  | 			err, usage := palmHandler(c, resp, promptTokens, textRequest.Model) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  | 			if usage != nil { | ||||||
|  | 				textResponse.Usage = *usage | ||||||
|  | 			} | ||||||
|  | 			return nil | ||||||
|  | 		} | ||||||
|  | 	case APITypeZhipu: | ||||||
|  | 		if isStream { | ||||||
|  | 			err, usage := zhipuStreamHandler(c, resp) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  | 			if usage != nil { | ||||||
|  | 				textResponse.Usage = *usage | ||||||
|  | 			} | ||||||
|  | 			return nil | ||||||
|  | 		} else { | ||||||
|  | 			err, usage := zhipuHandler(c, resp) | ||||||
|  | 			if err != nil { | ||||||
|  | 				return err | ||||||
|  | 			} | ||||||
|  | 			if usage != nil { | ||||||
|  | 				textResponse.Usage = *usage | ||||||
|  | 			} | ||||||
|  | 			return nil | ||||||
|  | 		} | ||||||
|  | 	default: | ||||||
|  | 		return errorWrapper(errors.New("unknown api type"), "unknown_api_type", http.StatusInternalServerError) | ||||||
|  | 	} | ||||||
| } | } | ||||||
|   | |||||||
| @@ -4,7 +4,6 @@ import ( | |||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/pkoukk/tiktoken-go" | 	"github.com/pkoukk/tiktoken-go" | ||||||
| 	"one-api/common" | 	"one-api/common" | ||||||
| 	"strings" |  | ||||||
| ) | ) | ||||||
|  |  | ||||||
| var tokenEncoderMap = map[string]*tiktoken.Tiktoken{} | var tokenEncoderMap = map[string]*tiktoken.Tiktoken{} | ||||||
| @@ -25,6 +24,13 @@ func getTokenEncoder(model string) *tiktoken.Tiktoken { | |||||||
| 	return tokenEncoder | 	return tokenEncoder | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func getTokenNum(tokenEncoder *tiktoken.Tiktoken, text string) int { | ||||||
|  | 	if common.ApproximateTokenEnabled { | ||||||
|  | 		return int(float64(len(text)) * 0.38) | ||||||
|  | 	} | ||||||
|  | 	return len(tokenEncoder.Encode(text, nil, nil)) | ||||||
|  | } | ||||||
|  |  | ||||||
| func countTokenMessages(messages []Message, model string) int { | func countTokenMessages(messages []Message, model string) int { | ||||||
| 	tokenEncoder := getTokenEncoder(model) | 	tokenEncoder := getTokenEncoder(model) | ||||||
| 	// Reference: | 	// Reference: | ||||||
| @@ -34,12 +40,9 @@ func countTokenMessages(messages []Message, model string) int { | |||||||
| 	// Every message follows <|start|>{role/name}\n{content}<|end|>\n | 	// Every message follows <|start|>{role/name}\n{content}<|end|>\n | ||||||
| 	var tokensPerMessage int | 	var tokensPerMessage int | ||||||
| 	var tokensPerName int | 	var tokensPerName int | ||||||
| 	if strings.HasPrefix(model, "gpt-3.5") { | 	if model == "gpt-3.5-turbo-0301" { | ||||||
| 		tokensPerMessage = 4 | 		tokensPerMessage = 4 | ||||||
| 		tokensPerName = -1 // If there's a name, the role is omitted | 		tokensPerName = -1 // If there's a name, the role is omitted | ||||||
| 	} else if strings.HasPrefix(model, "gpt-4") { |  | ||||||
| 		tokensPerMessage = 3 |  | ||||||
| 		tokensPerName = 1 |  | ||||||
| 	} else { | 	} else { | ||||||
| 		tokensPerMessage = 3 | 		tokensPerMessage = 3 | ||||||
| 		tokensPerName = 1 | 		tokensPerName = 1 | ||||||
| @@ -47,11 +50,11 @@ func countTokenMessages(messages []Message, model string) int { | |||||||
| 	tokenNum := 0 | 	tokenNum := 0 | ||||||
| 	for _, message := range messages { | 	for _, message := range messages { | ||||||
| 		tokenNum += tokensPerMessage | 		tokenNum += tokensPerMessage | ||||||
| 		tokenNum += len(tokenEncoder.Encode(message.Content, nil, nil)) | 		tokenNum += getTokenNum(tokenEncoder, message.Content) | ||||||
| 		tokenNum += len(tokenEncoder.Encode(message.Role, nil, nil)) | 		tokenNum += getTokenNum(tokenEncoder, message.Role) | ||||||
| 		if message.Name != nil { | 		if message.Name != nil { | ||||||
| 			tokenNum += tokensPerName | 			tokenNum += tokensPerName | ||||||
| 			tokenNum += len(tokenEncoder.Encode(*message.Name, nil, nil)) | 			tokenNum += getTokenNum(tokenEncoder, *message.Name) | ||||||
| 		} | 		} | ||||||
| 	} | 	} | ||||||
| 	tokenNum += 3 // Every reply is primed with <|start|>assistant<|message|> | 	tokenNum += 3 // Every reply is primed with <|start|>assistant<|message|> | ||||||
| @@ -74,8 +77,7 @@ func countTokenInput(input any, model string) int { | |||||||
|  |  | ||||||
| func countTokenText(text string, model string) int { | func countTokenText(text string, model string) int { | ||||||
| 	tokenEncoder := getTokenEncoder(model) | 	tokenEncoder := getTokenEncoder(model) | ||||||
| 	token := tokenEncoder.Encode(text, nil, nil) | 	return getTokenNum(tokenEncoder, text) | ||||||
| 	return len(token) |  | ||||||
| } | } | ||||||
|  |  | ||||||
| func errorWrapper(err error, code string, statusCode int) *OpenAIErrorWithStatusCode { | func errorWrapper(err error, code string, statusCode int) *OpenAIErrorWithStatusCode { | ||||||
| @@ -89,3 +91,16 @@ func errorWrapper(err error, code string, statusCode int) *OpenAIErrorWithStatus | |||||||
| 		StatusCode:  statusCode, | 		StatusCode:  statusCode, | ||||||
| 	} | 	} | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func shouldDisableChannel(err *OpenAIError) bool { | ||||||
|  | 	if !common.AutomaticDisableChannelEnabled { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  | 	if err == nil { | ||||||
|  | 		return false | ||||||
|  | 	} | ||||||
|  | 	if err.Type == "insufficient_quota" || err.Code == "invalid_api_key" || err.Code == "account_deactivated" { | ||||||
|  | 		return true | ||||||
|  | 	} | ||||||
|  | 	return false | ||||||
|  | } | ||||||
|   | |||||||
							
								
								
									
										290
									
								
								controller/relay-zhipu.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										290
									
								
								controller/relay-zhipu.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,290 @@ | |||||||
|  | package controller | ||||||
|  |  | ||||||
|  | import ( | ||||||
|  | 	"bufio" | ||||||
|  | 	"encoding/json" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
|  | 	"github.com/golang-jwt/jwt" | ||||||
|  | 	"io" | ||||||
|  | 	"net/http" | ||||||
|  | 	"one-api/common" | ||||||
|  | 	"strings" | ||||||
|  | 	"sync" | ||||||
|  | 	"time" | ||||||
|  | ) | ||||||
|  |  | ||||||
|  | // https://open.bigmodel.cn/doc/api#chatglm_std | ||||||
|  | // chatglm_std, chatglm_lite | ||||||
|  | // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke | ||||||
|  | // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke | ||||||
|  |  | ||||||
|  | type ZhipuMessage struct { | ||||||
|  | 	Role    string `json:"role"` | ||||||
|  | 	Content string `json:"content"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ZhipuRequest struct { | ||||||
|  | 	Prompt      []ZhipuMessage `json:"prompt"` | ||||||
|  | 	Temperature float64        `json:"temperature,omitempty"` | ||||||
|  | 	TopP        float64        `json:"top_p,omitempty"` | ||||||
|  | 	RequestId   string         `json:"request_id,omitempty"` | ||||||
|  | 	Incremental bool           `json:"incremental,omitempty"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ZhipuResponseData struct { | ||||||
|  | 	TaskId     string         `json:"task_id"` | ||||||
|  | 	RequestId  string         `json:"request_id"` | ||||||
|  | 	TaskStatus string         `json:"task_status"` | ||||||
|  | 	Choices    []ZhipuMessage `json:"choices"` | ||||||
|  | 	Usage      `json:"usage"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ZhipuResponse struct { | ||||||
|  | 	Code    int               `json:"code"` | ||||||
|  | 	Msg     string            `json:"msg"` | ||||||
|  | 	Success bool              `json:"success"` | ||||||
|  | 	Data    ZhipuResponseData `json:"data"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ZhipuStreamMetaResponse struct { | ||||||
|  | 	RequestId  string `json:"request_id"` | ||||||
|  | 	TaskId     string `json:"task_id"` | ||||||
|  | 	TaskStatus string `json:"task_status"` | ||||||
|  | 	Usage      `json:"usage"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type zhipuTokenData struct { | ||||||
|  | 	Token      string | ||||||
|  | 	ExpiryTime time.Time | ||||||
|  | } | ||||||
|  |  | ||||||
|  | var zhipuTokens sync.Map | ||||||
|  | var expSeconds int64 = 24 * 3600 | ||||||
|  |  | ||||||
|  | func getZhipuToken(apikey string) string { | ||||||
|  | 	data, ok := zhipuTokens.Load(apikey) | ||||||
|  | 	if ok { | ||||||
|  | 		tokenData := data.(zhipuTokenData) | ||||||
|  | 		if time.Now().Before(tokenData.ExpiryTime) { | ||||||
|  | 			return tokenData.Token | ||||||
|  | 		} | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	split := strings.Split(apikey, ".") | ||||||
|  | 	if len(split) != 2 { | ||||||
|  | 		common.SysError("invalid zhipu key: " + apikey) | ||||||
|  | 		return "" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	id := split[0] | ||||||
|  | 	secret := split[1] | ||||||
|  |  | ||||||
|  | 	expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6 | ||||||
|  | 	expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second) | ||||||
|  |  | ||||||
|  | 	timestamp := time.Now().UnixNano() / 1e6 | ||||||
|  |  | ||||||
|  | 	payload := jwt.MapClaims{ | ||||||
|  | 		"api_key":   id, | ||||||
|  | 		"exp":       expMillis, | ||||||
|  | 		"timestamp": timestamp, | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload) | ||||||
|  |  | ||||||
|  | 	token.Header["alg"] = "HS256" | ||||||
|  | 	token.Header["sign_type"] = "SIGN" | ||||||
|  |  | ||||||
|  | 	tokenString, err := token.SignedString([]byte(secret)) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return "" | ||||||
|  | 	} | ||||||
|  |  | ||||||
|  | 	zhipuTokens.Store(apikey, zhipuTokenData{ | ||||||
|  | 		Token:      tokenString, | ||||||
|  | 		ExpiryTime: expiryTime, | ||||||
|  | 	}) | ||||||
|  |  | ||||||
|  | 	return tokenString | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func requestOpenAI2Zhipu(request GeneralOpenAIRequest) *ZhipuRequest { | ||||||
|  | 	messages := make([]ZhipuMessage, 0, len(request.Messages)) | ||||||
|  | 	for _, message := range request.Messages { | ||||||
|  | 		messages = append(messages, ZhipuMessage{ | ||||||
|  | 			Role:    message.Role, | ||||||
|  | 			Content: message.Content, | ||||||
|  | 		}) | ||||||
|  | 	} | ||||||
|  | 	return &ZhipuRequest{ | ||||||
|  | 		Prompt:      messages, | ||||||
|  | 		Temperature: request.Temperature, | ||||||
|  | 		TopP:        request.TopP, | ||||||
|  | 		Incremental: false, | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func responseZhipu2OpenAI(response *ZhipuResponse) *OpenAITextResponse { | ||||||
|  | 	fullTextResponse := OpenAITextResponse{ | ||||||
|  | 		Id:      response.Data.TaskId, | ||||||
|  | 		Object:  "chat.completion", | ||||||
|  | 		Created: common.GetTimestamp(), | ||||||
|  | 		Choices: make([]OpenAITextResponseChoice, 0, len(response.Data.Choices)), | ||||||
|  | 		Usage:   response.Data.Usage, | ||||||
|  | 	} | ||||||
|  | 	for i, choice := range response.Data.Choices { | ||||||
|  | 		openaiChoice := OpenAITextResponseChoice{ | ||||||
|  | 			Index: i, | ||||||
|  | 			Message: Message{ | ||||||
|  | 				Role:    choice.Role, | ||||||
|  | 				Content: strings.Trim(choice.Content, "\""), | ||||||
|  | 			}, | ||||||
|  | 			FinishReason: "", | ||||||
|  | 		} | ||||||
|  | 		if i == len(response.Data.Choices)-1 { | ||||||
|  | 			openaiChoice.FinishReason = "stop" | ||||||
|  | 		} | ||||||
|  | 		fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice) | ||||||
|  | 	} | ||||||
|  | 	return &fullTextResponse | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func streamResponseZhipu2OpenAI(zhipuResponse string) *ChatCompletionsStreamResponse { | ||||||
|  | 	var choice ChatCompletionsStreamResponseChoice | ||||||
|  | 	choice.Delta.Content = zhipuResponse | ||||||
|  | 	choice.FinishReason = "" | ||||||
|  | 	response := ChatCompletionsStreamResponse{ | ||||||
|  | 		Object:  "chat.completion.chunk", | ||||||
|  | 		Created: common.GetTimestamp(), | ||||||
|  | 		Model:   "chatglm", | ||||||
|  | 		Choices: []ChatCompletionsStreamResponseChoice{choice}, | ||||||
|  | 	} | ||||||
|  | 	return &response | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStreamMetaResponse) (*ChatCompletionsStreamResponse, *Usage) { | ||||||
|  | 	var choice ChatCompletionsStreamResponseChoice | ||||||
|  | 	choice.Delta.Content = "" | ||||||
|  | 	choice.FinishReason = "stop" | ||||||
|  | 	response := ChatCompletionsStreamResponse{ | ||||||
|  | 		Id:      zhipuResponse.RequestId, | ||||||
|  | 		Object:  "chat.completion.chunk", | ||||||
|  | 		Created: common.GetTimestamp(), | ||||||
|  | 		Model:   "chatglm", | ||||||
|  | 		Choices: []ChatCompletionsStreamResponseChoice{choice}, | ||||||
|  | 	} | ||||||
|  | 	return &response, &zhipuResponse.Usage | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func zhipuStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) { | ||||||
|  | 	var usage *Usage | ||||||
|  | 	scanner := bufio.NewScanner(resp.Body) | ||||||
|  | 	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { | ||||||
|  | 		if atEOF && len(data) == 0 { | ||||||
|  | 			return 0, nil, nil | ||||||
|  | 		} | ||||||
|  | 		if i := strings.Index(string(data), "\n"); i >= 0 { | ||||||
|  | 			return i + 1, data[0:i], nil | ||||||
|  | 		} | ||||||
|  | 		if atEOF { | ||||||
|  | 			return len(data), data, nil | ||||||
|  | 		} | ||||||
|  | 		return 0, nil, nil | ||||||
|  | 	}) | ||||||
|  | 	dataChan := make(chan string) | ||||||
|  | 	metaChan := make(chan string) | ||||||
|  | 	stopChan := make(chan bool) | ||||||
|  | 	go func() { | ||||||
|  | 		for scanner.Scan() { | ||||||
|  | 			data := scanner.Text() | ||||||
|  | 			data = strings.Trim(data, "\"") | ||||||
|  | 			if len(data) < 5 { // ignore blank line or wrong format | ||||||
|  | 				continue | ||||||
|  | 			} | ||||||
|  | 			if data[:5] == "data:" { | ||||||
|  | 				dataChan <- data[5:] | ||||||
|  | 			} else if data[:5] == "meta:" { | ||||||
|  | 				metaChan <- data[5:] | ||||||
|  | 			} | ||||||
|  | 		} | ||||||
|  | 		stopChan <- true | ||||||
|  | 	}() | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "text/event-stream") | ||||||
|  | 	c.Writer.Header().Set("Cache-Control", "no-cache") | ||||||
|  | 	c.Writer.Header().Set("Connection", "keep-alive") | ||||||
|  | 	c.Writer.Header().Set("Transfer-Encoding", "chunked") | ||||||
|  | 	c.Writer.Header().Set("X-Accel-Buffering", "no") | ||||||
|  | 	c.Stream(func(w io.Writer) bool { | ||||||
|  | 		select { | ||||||
|  | 		case data := <-dataChan: | ||||||
|  | 			response := streamResponseZhipu2OpenAI(data) | ||||||
|  | 			jsonResponse, err := json.Marshal(response) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error marshalling stream response: " + err.Error()) | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)}) | ||||||
|  | 			return true | ||||||
|  | 		case data := <-metaChan: | ||||||
|  | 			var zhipuResponse ZhipuStreamMetaResponse | ||||||
|  | 			err := json.Unmarshal([]byte(data), &zhipuResponse) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error unmarshalling stream response: " + err.Error()) | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 			response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse) | ||||||
|  | 			jsonResponse, err := json.Marshal(response) | ||||||
|  | 			if err != nil { | ||||||
|  | 				common.SysError("error marshalling stream response: " + err.Error()) | ||||||
|  | 				return true | ||||||
|  | 			} | ||||||
|  | 			usage = zhipuUsage | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)}) | ||||||
|  | 			return true | ||||||
|  | 		case <-stopChan: | ||||||
|  | 			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"}) | ||||||
|  | 			return false | ||||||
|  | 		} | ||||||
|  | 	}) | ||||||
|  | 	err := resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	return nil, usage | ||||||
|  | } | ||||||
|  |  | ||||||
|  | func zhipuHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) { | ||||||
|  | 	var zhipuResponse ZhipuResponse | ||||||
|  | 	responseBody, err := io.ReadAll(resp.Body) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	err = resp.Body.Close() | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	err = json.Unmarshal(responseBody, &zhipuResponse) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	if !zhipuResponse.Success { | ||||||
|  | 		return &OpenAIErrorWithStatusCode{ | ||||||
|  | 			OpenAIError: OpenAIError{ | ||||||
|  | 				Message: zhipuResponse.Msg, | ||||||
|  | 				Type:    "zhipu_error", | ||||||
|  | 				Param:   "", | ||||||
|  | 				Code:    zhipuResponse.Code, | ||||||
|  | 			}, | ||||||
|  | 			StatusCode: resp.StatusCode, | ||||||
|  | 		}, nil | ||||||
|  | 	} | ||||||
|  | 	fullTextResponse := responseZhipu2OpenAI(&zhipuResponse) | ||||||
|  | 	jsonResponse, err := json.Marshal(fullTextResponse) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil | ||||||
|  | 	} | ||||||
|  | 	c.Writer.Header().Set("Content-Type", "application/json") | ||||||
|  | 	c.Writer.WriteHeader(resp.StatusCode) | ||||||
|  | 	_, err = c.Writer.Write(jsonResponse) | ||||||
|  | 	return nil, &fullTextResponse.Usage | ||||||
|  | } | ||||||
| @@ -2,10 +2,12 @@ package controller | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" |  | ||||||
| 	"net/http" | 	"net/http" | ||||||
| 	"one-api/common" | 	"one-api/common" | ||||||
|  | 	"strconv" | ||||||
| 	"strings" | 	"strings" | ||||||
|  |  | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type Message struct { | type Message struct { | ||||||
| @@ -19,22 +21,25 @@ const ( | |||||||
| 	RelayModeChatCompletions | 	RelayModeChatCompletions | ||||||
| 	RelayModeCompletions | 	RelayModeCompletions | ||||||
| 	RelayModeEmbeddings | 	RelayModeEmbeddings | ||||||
| 	RelayModeModeration | 	RelayModeModerations | ||||||
| 	RelayModeImagesGenerations | 	RelayModeImagesGenerations | ||||||
|  | 	RelayModeEdits | ||||||
| ) | ) | ||||||
|  |  | ||||||
| // https://platform.openai.com/docs/api-reference/chat | // https://platform.openai.com/docs/api-reference/chat | ||||||
|  |  | ||||||
| type GeneralOpenAIRequest struct { | type GeneralOpenAIRequest struct { | ||||||
| 	Model       string    `json:"model"` | 	Model       string    `json:"model,omitempty"` | ||||||
| 	Messages    []Message `json:"messages"` | 	Messages    []Message `json:"messages,omitempty"` | ||||||
| 	Prompt      any       `json:"prompt"` | 	Prompt      any       `json:"prompt,omitempty"` | ||||||
| 	Stream      bool      `json:"stream"` | 	Stream      bool      `json:"stream,omitempty"` | ||||||
| 	MaxTokens   int       `json:"max_tokens"` | 	MaxTokens   int       `json:"max_tokens,omitempty"` | ||||||
| 	Temperature float64   `json:"temperature"` | 	Temperature float64   `json:"temperature,omitempty"` | ||||||
| 	TopP        float64   `json:"top_p"` | 	TopP        float64   `json:"top_p,omitempty"` | ||||||
| 	N           int       `json:"n"` | 	N           int       `json:"n,omitempty"` | ||||||
| 	Input       any       `json:"input"` | 	Input       any       `json:"input,omitempty"` | ||||||
|  | 	Instruction string    `json:"instruction,omitempty"` | ||||||
|  | 	Size        string    `json:"size,omitempty"` | ||||||
| } | } | ||||||
|  |  | ||||||
| type ChatRequest struct { | type ChatRequest struct { | ||||||
| @@ -51,6 +56,12 @@ type TextRequest struct { | |||||||
| 	//Stream   bool      `json:"stream"` | 	//Stream   bool      `json:"stream"` | ||||||
| } | } | ||||||
|  |  | ||||||
|  | type ImageRequest struct { | ||||||
|  | 	Prompt string `json:"prompt"` | ||||||
|  | 	N      int    `json:"n"` | ||||||
|  | 	Size   string `json:"size"` | ||||||
|  | } | ||||||
|  |  | ||||||
| type Usage struct { | type Usage struct { | ||||||
| 	PromptTokens     int `json:"prompt_tokens"` | 	PromptTokens     int `json:"prompt_tokens"` | ||||||
| 	CompletionTokens int `json:"completion_tokens"` | 	CompletionTokens int `json:"completion_tokens"` | ||||||
| @@ -74,13 +85,40 @@ type TextResponse struct { | |||||||
| 	Error OpenAIError `json:"error"` | 	Error OpenAIError `json:"error"` | ||||||
| } | } | ||||||
|  |  | ||||||
| type ChatCompletionsStreamResponse struct { | type OpenAITextResponseChoice struct { | ||||||
| 	Choices []struct { | 	Index        int `json:"index"` | ||||||
|  | 	Message      `json:"message"` | ||||||
|  | 	FinishReason string `json:"finish_reason"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type OpenAITextResponse struct { | ||||||
|  | 	Id      string                     `json:"id"` | ||||||
|  | 	Object  string                     `json:"object"` | ||||||
|  | 	Created int64                      `json:"created"` | ||||||
|  | 	Choices []OpenAITextResponseChoice `json:"choices"` | ||||||
|  | 	Usage   `json:"usage"` | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ImageResponse struct { | ||||||
|  | 	Created int `json:"created"` | ||||||
|  | 	Data    []struct { | ||||||
|  | 		Url string `json:"url"` | ||||||
|  | 	} | ||||||
|  | } | ||||||
|  |  | ||||||
|  | type ChatCompletionsStreamResponseChoice struct { | ||||||
| 	Delta struct { | 	Delta struct { | ||||||
| 		Content string `json:"content"` | 		Content string `json:"content"` | ||||||
| 	} `json:"delta"` | 	} `json:"delta"` | ||||||
| 		FinishReason string `json:"finish_reason"` | 	FinishReason string `json:"finish_reason,omitempty"` | ||||||
| 	} `json:"choices"` | } | ||||||
|  |  | ||||||
|  | type ChatCompletionsStreamResponse struct { | ||||||
|  | 	Id      string                                `json:"id"` | ||||||
|  | 	Object  string                                `json:"object"` | ||||||
|  | 	Created int64                                 `json:"created"` | ||||||
|  | 	Model   string                                `json:"model"` | ||||||
|  | 	Choices []ChatCompletionsStreamResponseChoice `json:"choices"` | ||||||
| } | } | ||||||
|  |  | ||||||
| type CompletionsStreamResponse struct { | type CompletionsStreamResponse struct { | ||||||
| @@ -98,10 +136,14 @@ func Relay(c *gin.Context) { | |||||||
| 		relayMode = RelayModeCompletions | 		relayMode = RelayModeCompletions | ||||||
| 	} else if strings.HasPrefix(c.Request.URL.Path, "/v1/embeddings") { | 	} else if strings.HasPrefix(c.Request.URL.Path, "/v1/embeddings") { | ||||||
| 		relayMode = RelayModeEmbeddings | 		relayMode = RelayModeEmbeddings | ||||||
|  | 	} else if strings.HasSuffix(c.Request.URL.Path, "embeddings") { | ||||||
|  | 		relayMode = RelayModeEmbeddings | ||||||
| 	} else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") { | 	} else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") { | ||||||
| 		relayMode = RelayModeModeration | 		relayMode = RelayModeModerations | ||||||
| 	} else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") { | 	} else if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") { | ||||||
| 		relayMode = RelayModeImagesGenerations | 		relayMode = RelayModeImagesGenerations | ||||||
|  | 	} else if strings.HasPrefix(c.Request.URL.Path, "/v1/edits") { | ||||||
|  | 		relayMode = RelayModeEdits | ||||||
| 	} | 	} | ||||||
| 	var err *OpenAIErrorWithStatusCode | 	var err *OpenAIErrorWithStatusCode | ||||||
| 	switch relayMode { | 	switch relayMode { | ||||||
| @@ -111,16 +153,25 @@ func Relay(c *gin.Context) { | |||||||
| 		err = relayTextHelper(c, relayMode) | 		err = relayTextHelper(c, relayMode) | ||||||
| 	} | 	} | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
|  | 		retryTimesStr := c.Query("retry") | ||||||
|  | 		retryTimes, _ := strconv.Atoi(retryTimesStr) | ||||||
|  | 		if retryTimesStr == "" { | ||||||
|  | 			retryTimes = common.RetryTimes | ||||||
|  | 		} | ||||||
|  | 		if retryTimes > 0 { | ||||||
|  | 			c.Redirect(http.StatusTemporaryRedirect, fmt.Sprintf("%s?retry=%d", c.Request.URL.Path, retryTimes-1)) | ||||||
|  | 		} else { | ||||||
| 			if err.StatusCode == http.StatusTooManyRequests { | 			if err.StatusCode == http.StatusTooManyRequests { | ||||||
| 				err.OpenAIError.Message = "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。" | 				err.OpenAIError.Message = "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。" | ||||||
| 			} | 			} | ||||||
| 			c.JSON(err.StatusCode, gin.H{ | 			c.JSON(err.StatusCode, gin.H{ | ||||||
| 				"error": err.OpenAIError, | 				"error": err.OpenAIError, | ||||||
| 			}) | 			}) | ||||||
|  | 		} | ||||||
| 		channelId := c.GetInt("channel_id") | 		channelId := c.GetInt("channel_id") | ||||||
| 		common.SysError(fmt.Sprintf("relay error (channel #%d): %s", channelId, err.Message)) | 		common.SysError(fmt.Sprintf("relay error (channel #%d): %s", channelId, err.Message)) | ||||||
| 		// https://platform.openai.com/docs/guides/error-codes/api-errors | 		// https://platform.openai.com/docs/guides/error-codes/api-errors | ||||||
| 		if common.AutomaticDisableChannelEnabled && (err.Type == "insufficient_quota" || err.Code == "invalid_api_key") { | 		if shouldDisableChannel(&err.OpenAIError) { | ||||||
| 			channelId := c.GetInt("channel_id") | 			channelId := c.GetInt("channel_id") | ||||||
| 			channelName := c.GetString("channel_name") | 			channelName := c.GetString("channel_name") | ||||||
| 			disableChannel(channelId, channelName, err.Message) | 			disableChannel(channelId, channelName, err.Message) | ||||||
|   | |||||||
| @@ -180,10 +180,10 @@ func UpdateToken(c *gin.Context) { | |||||||
| 		return | 		return | ||||||
| 	} | 	} | ||||||
| 	if token.Status == common.TokenStatusEnabled { | 	if token.Status == common.TokenStatusEnabled { | ||||||
| 		if cleanToken.Status == common.TokenStatusExpired && cleanToken.ExpiredTime <= common.GetTimestamp() { | 		if cleanToken.Status == common.TokenStatusExpired && cleanToken.ExpiredTime <= common.GetTimestamp() && cleanToken.ExpiredTime != -1 { | ||||||
| 			c.JSON(http.StatusOK, gin.H{ | 			c.JSON(http.StatusOK, gin.H{ | ||||||
| 				"success": false, | 				"success": false, | ||||||
| 				"message": "令牌已过期,无法启用,请先修改令牌过期时间", | 				"message": "令牌已过期,无法启用,请先修改令牌过期时间,或者设置为永不过期", | ||||||
| 			}) | 			}) | ||||||
| 			return | 			return | ||||||
| 		} | 		} | ||||||
|   | |||||||
| @@ -3,12 +3,13 @@ package controller | |||||||
| import ( | import ( | ||||||
| 	"encoding/json" | 	"encoding/json" | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-contrib/sessions" |  | ||||||
| 	"github.com/gin-gonic/gin" |  | ||||||
| 	"net/http" | 	"net/http" | ||||||
| 	"one-api/common" | 	"one-api/common" | ||||||
| 	"one-api/model" | 	"one-api/model" | ||||||
| 	"strconv" | 	"strconv" | ||||||
|  |  | ||||||
|  | 	"github.com/gin-contrib/sessions" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type LoginRequest struct { | type LoginRequest struct { | ||||||
| @@ -477,6 +478,16 @@ func DeleteUser(c *gin.Context) { | |||||||
|  |  | ||||||
| func DeleteSelf(c *gin.Context) { | func DeleteSelf(c *gin.Context) { | ||||||
| 	id := c.GetInt("id") | 	id := c.GetInt("id") | ||||||
|  | 	user, _ := model.GetUserById(id, false) | ||||||
|  |  | ||||||
|  | 	if user.Role == common.RoleRootUser { | ||||||
|  | 		c.JSON(http.StatusOK, gin.H{ | ||||||
|  | 			"success": false, | ||||||
|  | 			"message": "不能删除超级管理员账户", | ||||||
|  | 		}) | ||||||
|  | 		return | ||||||
|  | 	} | ||||||
|  |  | ||||||
| 	err := model.DeleteUserById(id) | 	err := model.DeleteUserById(id) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 		c.JSON(http.StatusOK, gin.H{ | 		c.JSON(http.StatusOK, gin.H{ | ||||||
|   | |||||||
							
								
								
									
										3
									
								
								go.mod
									
									
									
									
									
								
							
							
						
						
									
										3
									
								
								go.mod
									
									
									
									
									
								
							| @@ -11,6 +11,7 @@ require ( | |||||||
| 	github.com/gin-gonic/gin v1.9.1 | 	github.com/gin-gonic/gin v1.9.1 | ||||||
| 	github.com/go-playground/validator/v10 v10.14.0 | 	github.com/go-playground/validator/v10 v10.14.0 | ||||||
| 	github.com/go-redis/redis/v8 v8.11.5 | 	github.com/go-redis/redis/v8 v8.11.5 | ||||||
|  | 	github.com/golang-jwt/jwt v3.2.2+incompatible | ||||||
| 	github.com/google/uuid v1.3.0 | 	github.com/google/uuid v1.3.0 | ||||||
| 	github.com/pkoukk/tiktoken-go v0.1.1 | 	github.com/pkoukk/tiktoken-go v0.1.1 | ||||||
| 	golang.org/x/crypto v0.9.0 | 	golang.org/x/crypto v0.9.0 | ||||||
| @@ -20,7 +21,6 @@ require ( | |||||||
| ) | ) | ||||||
|  |  | ||||||
| require ( | require ( | ||||||
| 	github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff // indirect |  | ||||||
| 	github.com/bytedance/sonic v1.9.1 // indirect | 	github.com/bytedance/sonic v1.9.1 // indirect | ||||||
| 	github.com/cespare/xxhash/v2 v2.1.2 // indirect | 	github.com/cespare/xxhash/v2 v2.1.2 // indirect | ||||||
| 	github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect | 	github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect | ||||||
| @@ -32,7 +32,6 @@ require ( | |||||||
| 	github.com/go-playground/universal-translator v0.18.1 // indirect | 	github.com/go-playground/universal-translator v0.18.1 // indirect | ||||||
| 	github.com/go-sql-driver/mysql v1.6.0 // indirect | 	github.com/go-sql-driver/mysql v1.6.0 // indirect | ||||||
| 	github.com/goccy/go-json v0.10.2 // indirect | 	github.com/goccy/go-json v0.10.2 // indirect | ||||||
| 	github.com/gomodule/redigo v2.0.0+incompatible // indirect |  | ||||||
| 	github.com/gorilla/context v1.1.1 // indirect | 	github.com/gorilla/context v1.1.1 // indirect | ||||||
| 	github.com/gorilla/securecookie v1.1.1 // indirect | 	github.com/gorilla/securecookie v1.1.1 // indirect | ||||||
| 	github.com/gorilla/sessions v1.2.1 // indirect | 	github.com/gorilla/sessions v1.2.1 // indirect | ||||||
|   | |||||||
							
								
								
									
										7
									
								
								go.sum
									
									
									
									
									
								
							
							
						
						
									
										7
									
								
								go.sum
									
									
									
									
									
								
							| @@ -1,5 +1,3 @@ | |||||||
| github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff h1:RmdPFa+slIr4SCBg4st/l/vZWVe9QJKMXGO60Bxbe04= |  | ||||||
| github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff/go.mod h1:+RTT1BOk5P97fT2CiHkbFQwkK3mjsFAP6zCYV2aXtjw= |  | ||||||
| github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= | github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= | ||||||
| github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= | github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= | ||||||
| github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= | github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= | ||||||
| @@ -54,10 +52,10 @@ github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LB | |||||||
| github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | ||||||
| github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= | ||||||
| github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= | ||||||
|  | github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= | ||||||
|  | github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= | ||||||
| github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= | github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= | ||||||
| github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= | ||||||
| github.com/gomodule/redigo v2.0.0+incompatible h1:K/R+8tc58AaqLkqG2Ol3Qk+DR/TlNuhuh457pBFPtt0= |  | ||||||
| github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= |  | ||||||
| github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= | github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= | ||||||
| github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= | ||||||
| github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= | github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= | ||||||
| @@ -67,7 +65,6 @@ github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8 | |||||||
| github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= | github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= | ||||||
| github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= | github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= | ||||||
| github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= | github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= | ||||||
| github.com/gorilla/sessions v1.1.1/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w= |  | ||||||
| github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= | github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI= | ||||||
| github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= | github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM= | ||||||
| github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= | github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= | ||||||
|   | |||||||
							
								
								
									
										51
									
								
								i18n/en.json
									
									
									
									
									
								
							
							
						
						
									
										51
									
								
								i18n/en.json
									
									
									
									
									
								
							| @@ -36,7 +36,7 @@ | |||||||
|   "通过令牌「%s」使用模型 %s 消耗 %s(模型倍率 %.2f,分组倍率 %.2f)": "Using model %s with token %s consumes %s (model rate %.2f, group rate %.2f)", |   "通过令牌「%s」使用模型 %s 消耗 %s(模型倍率 %.2f,分组倍率 %.2f)": "Using model %s with token %s consumes %s (model rate %.2f, group rate %.2f)", | ||||||
|   "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。": "The current group load is saturated, please try again later, or upgrade your account to improve service quality.", |   "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。": "The current group load is saturated, please try again later, or upgrade your account to improve service quality.", | ||||||
|   "令牌名称长度必须在1-20之间": "The length of the token name must be between 1-20", |   "令牌名称长度必须在1-20之间": "The length of the token name must be between 1-20", | ||||||
|   "令牌已过期,无法启用,请先修改令牌过期时间": "The token has expired and cannot be enabled. Please modify the token expiration time first", |   "令牌已过期,无法启用,请先修改令牌过期时间,或者设置为永不过期": "The token has expired and cannot be enabled. Please modify the expiration time of the token, or set it to never expire.", | ||||||
|   "令牌可用额度已用尽,无法启用,请先修改令牌剩余额度,或者设置为无限额度": "The available quota of the token has been used up and cannot be enabled. Please modify the remaining quota of the token, or set it to unlimited quota", |   "令牌可用额度已用尽,无法启用,请先修改令牌剩余额度,或者设置为无限额度": "The available quota of the token has been used up and cannot be enabled. Please modify the remaining quota of the token, or set it to unlimited quota", | ||||||
|   "管理员关闭了密码登录": "The administrator has turned off password login", |   "管理员关闭了密码登录": "The administrator has turned off password login", | ||||||
|   "无法保存会话信息,请重试": "Unable to save session information, please try again", |   "无法保存会话信息,请重试": "Unable to save session information, please try again", | ||||||
| @@ -107,6 +107,11 @@ | |||||||
|   "已禁用": "Disabled", |   "已禁用": "Disabled", | ||||||
|   "未知状态": "Unknown status", |   "未知状态": "Unknown status", | ||||||
|   " 秒": "s", |   " 秒": "s", | ||||||
|  |   " 分钟 ": " m ", | ||||||
|  |   " 小时 ": " h ", | ||||||
|  |   " 天 ": " d ", | ||||||
|  |   " 个月 ": " M ", | ||||||
|  |   " 年 ": " y ", | ||||||
|   "未测试": "Not tested", |   "未测试": "Not tested", | ||||||
|   "通道 ${name} 测试成功,耗时 ${time.toFixed(2)} 秒。": "Channel ${name} test succeeded, time consumed ${time.toFixed(2)} s.", |   "通道 ${name} 测试成功,耗时 ${time.toFixed(2)} 秒。": "Channel ${name} test succeeded, time consumed ${time.toFixed(2)} s.", | ||||||
|   "已成功开始测试所有已启用通道,请刷新页面查看结果。": "All enabled channels have been successfully tested, please refresh the page to view the results.", |   "已成功开始测试所有已启用通道,请刷新页面查看结果。": "All enabled channels have been successfully tested, please refresh the page to view the results.", | ||||||
| @@ -456,5 +461,47 @@ | |||||||
|   "提示": "Prompt", |   "提示": "Prompt", | ||||||
|   "补全": "Completion", |   "补全": "Completion", | ||||||
|   "消耗额度": "Used Quota", |   "消耗额度": "Used Quota", | ||||||
|   "可选值": "Optional Values" |   "可选值": "Optional Values", | ||||||
|  |   "渠道不存在:%d": "Channel does not exist: %d", | ||||||
|  |   "数据库一致性已被破坏,请联系管理员": "Database consistency has been broken, please contact the administrator", | ||||||
|  |   "使用近似的方式估算 token 数以减少计算量": "Estimate the number of tokens in an approximate way to reduce computational load", | ||||||
|  |   "请填写ChannelName和ChannelKey!": "Please fill in the ChannelName and ChannelKey!", | ||||||
|  |   "请至少选择一个Model!": "Please select at least one Model!", | ||||||
|  |   "加载首页内容失败": "Failed to load the homepage content", | ||||||
|  |   "加载关于内容失败": "Failed to load the About content", | ||||||
|  |   "兑换码更新成功!": "Redemption code updated successfully!", | ||||||
|  |   "兑换码创建成功!": "Redemption code created successfully!", | ||||||
|  |   "用户账户创建成功!": "User account created successfully!", | ||||||
|  |   "生成数量": "Generate quantity", | ||||||
|  |   "请输入生成数量": "Please enter the quantity to generate", | ||||||
|  |   "创建新用户账户": "Create new user account", | ||||||
|  |   "渠道更新成功!": "Channel updated successfully!", | ||||||
|  |   "渠道创建成功!": "Channel created successfully!", | ||||||
|  |   "请选择分组": "Please select a group", | ||||||
|  |   "更新兑换码信息": "Update redemption code information", | ||||||
|  |   "创建新的兑换码": "Create a new redemption code", | ||||||
|  |   "请在系统设置页面编辑分组倍率以添加新的分组:": "Please edit the group ratio in the system settings page to add a new group:", | ||||||
|  |   "未找到所请求的页面": "The requested page was not found", | ||||||
|  |   "过期时间格式错误!": "Expiration time format error!", | ||||||
|  |   "请输入过期时间,格式为 yyyy-MM-dd HH:mm:ss,-1 表示无限制": "Please enter the expiration time, the format is yyyy-MM-dd HH:mm:ss, -1 means no limit", | ||||||
|  |   "此项可选,为一个 JSON 文本,键为用户请求的模型名称,值为要替换的模型名称,例如:": "This is optional, it's a JSON text, the key is the model name requested by the user, and the value is the model name to be replaced, for example:", | ||||||
|  |   "此项可选,输入镜像站地址,格式为:": "This is optional, enter the mirror site address, the format is:", | ||||||
|  |   "模型映射": "Model mapping", | ||||||
|  |   "请输入默认 API 版本,例如:2023-03-15-preview,该配置可以被实际的请求查询参数所覆盖": "Please enter the default API version, for example: 2023-03-15-preview, this configuration can be overridden by the actual request query parameters", | ||||||
|  |   "默认": "Default", | ||||||
|  |   "图片演示": "Image demo", | ||||||
|  |   "参数替换为你的部署名称(模型名称中的点会被剔除)": "Replace the parameter with your deployment name (dots in the model name will be removed)", | ||||||
|  |   "模型映射必须是合法的 JSON 格式!": "Model mapping must be in valid JSON format!", | ||||||
|  |   "取消无限额度": "Cancel unlimited quota", | ||||||
|  |   "请输入新的剩余额度": "Please enter the new remaining quota", | ||||||
|  |   "请输入单个兑换码中包含的额度": "Please enter the quota included in a single redemption code", | ||||||
|  |   "请输入用户名": "Please enter username", | ||||||
|  |   "请输入显示名称": "Please enter display name", | ||||||
|  |   "请输入密码": "Please enter password", | ||||||
|  |   "模型部署名称必须和模型名称保持一致": "The model deployment name must be consistent with the model name", | ||||||
|  |   ",因为 One API 会把请求体中的 model": ", because One API will take the model in the request body", | ||||||
|  |   "请输入 AZURE_OPENAI_ENDPOINT": "Please enter AZURE_OPENAI_ENDPOINT", | ||||||
|  |   "请输入自定义渠道的 Base URL": "Please enter the Base URL of the custom channel", | ||||||
|  |   "Homepage URL 填": "Fill in the Homepage URL", | ||||||
|  |   "Authorization callback URL 填": "Fill in the Authorization callback URL" | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										8
									
								
								main.go
									
									
									
									
									
								
							
							
						
						
									
										8
									
								
								main.go
									
									
									
									
									
								
							| @@ -4,7 +4,6 @@ import ( | |||||||
| 	"embed" | 	"embed" | ||||||
| 	"github.com/gin-contrib/sessions" | 	"github.com/gin-contrib/sessions" | ||||||
| 	"github.com/gin-contrib/sessions/cookie" | 	"github.com/gin-contrib/sessions/cookie" | ||||||
| 	"github.com/gin-contrib/sessions/redis" |  | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"one-api/common" | 	"one-api/common" | ||||||
| 	"one-api/controller" | 	"one-api/controller" | ||||||
| @@ -55,6 +54,7 @@ func main() { | |||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			common.FatalLog("failed to parse SYNC_FREQUENCY: " + err.Error()) | 			common.FatalLog("failed to parse SYNC_FREQUENCY: " + err.Error()) | ||||||
| 		} | 		} | ||||||
|  | 		common.SyncFrequency = frequency | ||||||
| 		go model.SyncOptions(frequency) | 		go model.SyncOptions(frequency) | ||||||
| 		if common.RedisEnabled { | 		if common.RedisEnabled { | ||||||
| 			go model.SyncChannelCache(frequency) | 			go model.SyncChannelCache(frequency) | ||||||
| @@ -82,14 +82,8 @@ func main() { | |||||||
| 	server.Use(middleware.CORS()) | 	server.Use(middleware.CORS()) | ||||||
|  |  | ||||||
| 	// Initialize session store | 	// Initialize session store | ||||||
| 	if common.RedisEnabled { |  | ||||||
| 		opt := common.ParseRedisOption() |  | ||||||
| 		store, _ := redis.NewStore(opt.MinIdleConns, opt.Network, opt.Addr, opt.Password, []byte(common.SessionSecret)) |  | ||||||
| 		server.Use(sessions.Sessions("session", store)) |  | ||||||
| 	} else { |  | ||||||
| 	store := cookie.NewStore([]byte(common.SessionSecret)) | 	store := cookie.NewStore([]byte(common.SessionSecret)) | ||||||
| 	server.Use(sessions.Sessions("session", store)) | 	server.Use(sessions.Sessions("session", store)) | ||||||
| 	} |  | ||||||
|  |  | ||||||
| 	router.SetRouter(server, buildFS, indexPage) | 	router.SetRouter(server, buildFS, indexPage) | ||||||
| 	var port = os.Getenv("PORT") | 	var port = os.Getenv("PORT") | ||||||
|   | |||||||
| @@ -2,12 +2,13 @@ package middleware | |||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" |  | ||||||
| 	"net/http" | 	"net/http" | ||||||
| 	"one-api/common" | 	"one-api/common" | ||||||
| 	"one-api/model" | 	"one-api/model" | ||||||
| 	"strconv" | 	"strconv" | ||||||
| 	"strings" | 	"strings" | ||||||
|  |  | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| type ModelRequest struct { | type ModelRequest struct { | ||||||
| @@ -73,11 +74,26 @@ func Distribute() func(c *gin.Context) { | |||||||
| 					modelRequest.Model = "text-moderation-stable" | 					modelRequest.Model = "text-moderation-stable" | ||||||
| 				} | 				} | ||||||
| 			} | 			} | ||||||
|  | 			if strings.HasSuffix(c.Request.URL.Path, "embeddings") { | ||||||
|  | 				if modelRequest.Model == "" { | ||||||
|  | 					modelRequest.Model = c.Param("model") | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
|  | 			if strings.HasPrefix(c.Request.URL.Path, "/v1/images/generations") { | ||||||
|  | 				if modelRequest.Model == "" { | ||||||
|  | 					modelRequest.Model = "dall-e" | ||||||
|  | 				} | ||||||
|  | 			} | ||||||
| 			channel, err = model.CacheGetRandomSatisfiedChannel(userGroup, modelRequest.Model) | 			channel, err = model.CacheGetRandomSatisfiedChannel(userGroup, modelRequest.Model) | ||||||
| 			if err != nil { | 			if err != nil { | ||||||
|  | 				message := fmt.Sprintf("当前分组 %s 下对于模型 %s 无可用渠道", userGroup, modelRequest.Model) | ||||||
|  | 				if channel != nil { | ||||||
|  | 					common.SysError(fmt.Sprintf("渠道不存在:%d", channel.Id)) | ||||||
|  | 					message = "数据库一致性已被破坏,请联系管理员" | ||||||
|  | 				} | ||||||
| 				c.JSON(http.StatusServiceUnavailable, gin.H{ | 				c.JSON(http.StatusServiceUnavailable, gin.H{ | ||||||
| 					"error": gin.H{ | 					"error": gin.H{ | ||||||
| 						"message": "无可用渠道", | 						"message": message, | ||||||
| 						"type":    "one_api_error", | 						"type":    "one_api_error", | ||||||
| 					}, | 					}, | ||||||
| 				}) | 				}) | ||||||
| @@ -88,6 +104,7 @@ func Distribute() func(c *gin.Context) { | |||||||
| 		c.Set("channel", channel.Type) | 		c.Set("channel", channel.Type) | ||||||
| 		c.Set("channel_id", channel.Id) | 		c.Set("channel_id", channel.Id) | ||||||
| 		c.Set("channel_name", channel.Name) | 		c.Set("channel_name", channel.Name) | ||||||
|  | 		c.Set("model_mapping", channel.ModelMapping) | ||||||
| 		c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key)) | 		c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key)) | ||||||
| 		c.Set("base_url", channel.BaseURL) | 		c.Set("base_url", channel.BaseURL) | ||||||
| 		if channel.Type == common.ChannelTypeAzure { | 		if channel.Type == common.ChannelTypeAzure { | ||||||
|   | |||||||
| @@ -24,6 +24,7 @@ func GetRandomSatisfiedChannel(group string, model string) (*Channel, error) { | |||||||
| 		return nil, err | 		return nil, err | ||||||
| 	} | 	} | ||||||
| 	channel := Channel{} | 	channel := Channel{} | ||||||
|  | 	channel.Id = ability.ChannelId | ||||||
| 	err = DB.First(&channel, "id = ?", ability.ChannelId).Error | 	err = DB.First(&channel, "id = ?", ability.ChannelId).Error | ||||||
| 	return &channel, err | 	return &channel, err | ||||||
| } | } | ||||||
|   | |||||||
| @@ -12,11 +12,11 @@ import ( | |||||||
| 	"time" | 	"time" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| const ( | var ( | ||||||
| 	TokenCacheSeconds         = 60 * 60 | 	TokenCacheSeconds         = common.SyncFrequency | ||||||
| 	UserId2GroupCacheSeconds  = 60 * 60 | 	UserId2GroupCacheSeconds  = common.SyncFrequency | ||||||
| 	UserId2QuotaCacheSeconds  = 10 * 60 | 	UserId2QuotaCacheSeconds  = common.SyncFrequency | ||||||
| 	UserId2StatusCacheSeconds = 60 * 60 | 	UserId2StatusCacheSeconds = common.SyncFrequency | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func CacheGetTokenByKey(key string) (*Token, error) { | func CacheGetTokenByKey(key string) (*Token, error) { | ||||||
| @@ -35,7 +35,7 @@ func CacheGetTokenByKey(key string) (*Token, error) { | |||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return nil, err | 			return nil, err | ||||||
| 		} | 		} | ||||||
| 		err = common.RedisSet(fmt.Sprintf("token:%s", key), string(jsonBytes), TokenCacheSeconds*time.Second) | 		err = common.RedisSet(fmt.Sprintf("token:%s", key), string(jsonBytes), time.Duration(TokenCacheSeconds)*time.Second) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			common.SysError("Redis set token error: " + err.Error()) | 			common.SysError("Redis set token error: " + err.Error()) | ||||||
| 		} | 		} | ||||||
| @@ -55,7 +55,7 @@ func CacheGetUserGroup(id int) (group string, err error) { | |||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return "", err | 			return "", err | ||||||
| 		} | 		} | ||||||
| 		err = common.RedisSet(fmt.Sprintf("user_group:%d", id), group, UserId2GroupCacheSeconds*time.Second) | 		err = common.RedisSet(fmt.Sprintf("user_group:%d", id), group, time.Duration(UserId2GroupCacheSeconds)*time.Second) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			common.SysError("Redis set user group error: " + err.Error()) | 			common.SysError("Redis set user group error: " + err.Error()) | ||||||
| 		} | 		} | ||||||
| @@ -73,7 +73,7 @@ func CacheGetUserQuota(id int) (quota int, err error) { | |||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			return 0, err | 			return 0, err | ||||||
| 		} | 		} | ||||||
| 		err = common.RedisSet(fmt.Sprintf("user_quota:%d", id), fmt.Sprintf("%d", quota), UserId2QuotaCacheSeconds*time.Second) | 		err = common.RedisSet(fmt.Sprintf("user_quota:%d", id), fmt.Sprintf("%d", quota), time.Duration(UserId2QuotaCacheSeconds)*time.Second) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			common.SysError("Redis set user quota error: " + err.Error()) | 			common.SysError("Redis set user quota error: " + err.Error()) | ||||||
| 		} | 		} | ||||||
| @@ -83,6 +83,18 @@ func CacheGetUserQuota(id int) (quota int, err error) { | |||||||
| 	return quota, err | 	return quota, err | ||||||
| } | } | ||||||
|  |  | ||||||
|  | func CacheUpdateUserQuota(id int) error { | ||||||
|  | 	if !common.RedisEnabled { | ||||||
|  | 		return nil | ||||||
|  | 	} | ||||||
|  | 	quota, err := GetUserQuota(id) | ||||||
|  | 	if err != nil { | ||||||
|  | 		return err | ||||||
|  | 	} | ||||||
|  | 	err = common.RedisSet(fmt.Sprintf("user_quota:%d", id), fmt.Sprintf("%d", quota), time.Duration(UserId2QuotaCacheSeconds)*time.Second) | ||||||
|  | 	return err | ||||||
|  | } | ||||||
|  |  | ||||||
| func CacheIsUserEnabled(userId int) bool { | func CacheIsUserEnabled(userId int) bool { | ||||||
| 	if !common.RedisEnabled { | 	if !common.RedisEnabled { | ||||||
| 		return IsUserEnabled(userId) | 		return IsUserEnabled(userId) | ||||||
| @@ -94,7 +106,7 @@ func CacheIsUserEnabled(userId int) bool { | |||||||
| 			status = common.UserStatusEnabled | 			status = common.UserStatusEnabled | ||||||
| 		} | 		} | ||||||
| 		enabled = fmt.Sprintf("%d", status) | 		enabled = fmt.Sprintf("%d", status) | ||||||
| 		err = common.RedisSet(fmt.Sprintf("user_enabled:%d", userId), enabled, UserId2StatusCacheSeconds*time.Second) | 		err = common.RedisSet(fmt.Sprintf("user_enabled:%d", userId), enabled, time.Duration(UserId2StatusCacheSeconds)*time.Second) | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 			common.SysError("Redis set user enabled error: " + err.Error()) | 			common.SysError("Redis set user enabled error: " + err.Error()) | ||||||
| 		} | 		} | ||||||
| @@ -108,7 +120,7 @@ var channelSyncLock sync.RWMutex | |||||||
| func InitChannelCache() { | func InitChannelCache() { | ||||||
| 	newChannelId2channel := make(map[int]*Channel) | 	newChannelId2channel := make(map[int]*Channel) | ||||||
| 	var channels []*Channel | 	var channels []*Channel | ||||||
| 	DB.Find(&channels) | 	DB.Where("status = ?", common.ChannelStatusEnabled).Find(&channels) | ||||||
| 	for _, channel := range channels { | 	for _, channel := range channels { | ||||||
| 		newChannelId2channel[channel.Id] = channel | 		newChannelId2channel[channel.Id] = channel | ||||||
| 	} | 	} | ||||||
|   | |||||||
| @@ -22,6 +22,7 @@ type Channel struct { | |||||||
| 	Models             string  `json:"models"` | 	Models             string  `json:"models"` | ||||||
| 	Group              string  `json:"group" gorm:"type:varchar(32);default:'default'"` | 	Group              string  `json:"group" gorm:"type:varchar(32);default:'default'"` | ||||||
| 	UsedQuota          int64   `json:"used_quota" gorm:"bigint;default:0"` | 	UsedQuota          int64   `json:"used_quota" gorm:"bigint;default:0"` | ||||||
|  | 	ModelMapping       string  `json:"model_mapping" gorm:"type:varchar(1024);default:''"` | ||||||
| } | } | ||||||
|  |  | ||||||
| func GetAllChannels(startIdx int, num int, selectAll bool) ([]*Channel, error) { | func GetAllChannels(startIdx int, num int, selectAll bool) ([]*Channel, error) { | ||||||
| @@ -36,7 +37,7 @@ func GetAllChannels(startIdx int, num int, selectAll bool) ([]*Channel, error) { | |||||||
| } | } | ||||||
|  |  | ||||||
| func SearchChannels(keyword string) (channels []*Channel, err error) { | func SearchChannels(keyword string) (channels []*Channel, err error) { | ||||||
| 	err = DB.Omit("key").Where("id = ? or name LIKE ? or key = ?", keyword, keyword+"%", keyword).Find(&channels).Error | 	err = DB.Omit("key").Where("id = ? or name LIKE ? or `key` = ?", keyword, keyword+"%", keyword).Find(&channels).Error | ||||||
| 	return channels, err | 	return channels, err | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -34,6 +34,7 @@ func InitOptionMap() { | |||||||
| 	common.OptionMap["TurnstileCheckEnabled"] = strconv.FormatBool(common.TurnstileCheckEnabled) | 	common.OptionMap["TurnstileCheckEnabled"] = strconv.FormatBool(common.TurnstileCheckEnabled) | ||||||
| 	common.OptionMap["RegisterEnabled"] = strconv.FormatBool(common.RegisterEnabled) | 	common.OptionMap["RegisterEnabled"] = strconv.FormatBool(common.RegisterEnabled) | ||||||
| 	common.OptionMap["AutomaticDisableChannelEnabled"] = strconv.FormatBool(common.AutomaticDisableChannelEnabled) | 	common.OptionMap["AutomaticDisableChannelEnabled"] = strconv.FormatBool(common.AutomaticDisableChannelEnabled) | ||||||
|  | 	common.OptionMap["ApproximateTokenEnabled"] = strconv.FormatBool(common.ApproximateTokenEnabled) | ||||||
| 	common.OptionMap["LogConsumeEnabled"] = strconv.FormatBool(common.LogConsumeEnabled) | 	common.OptionMap["LogConsumeEnabled"] = strconv.FormatBool(common.LogConsumeEnabled) | ||||||
| 	common.OptionMap["DisplayInCurrencyEnabled"] = strconv.FormatBool(common.DisplayInCurrencyEnabled) | 	common.OptionMap["DisplayInCurrencyEnabled"] = strconv.FormatBool(common.DisplayInCurrencyEnabled) | ||||||
| 	common.OptionMap["DisplayTokenStatEnabled"] = strconv.FormatBool(common.DisplayTokenStatEnabled) | 	common.OptionMap["DisplayTokenStatEnabled"] = strconv.FormatBool(common.DisplayTokenStatEnabled) | ||||||
| @@ -67,6 +68,7 @@ func InitOptionMap() { | |||||||
| 	common.OptionMap["TopUpLink"] = common.TopUpLink | 	common.OptionMap["TopUpLink"] = common.TopUpLink | ||||||
| 	common.OptionMap["ChatLink"] = common.ChatLink | 	common.OptionMap["ChatLink"] = common.ChatLink | ||||||
| 	common.OptionMap["QuotaPerUnit"] = strconv.FormatFloat(common.QuotaPerUnit, 'f', -1, 64) | 	common.OptionMap["QuotaPerUnit"] = strconv.FormatFloat(common.QuotaPerUnit, 'f', -1, 64) | ||||||
|  | 	common.OptionMap["RetryTimes"] = strconv.Itoa(common.RetryTimes) | ||||||
| 	common.OptionMapRWMutex.Unlock() | 	common.OptionMapRWMutex.Unlock() | ||||||
| 	loadOptionsFromDatabase() | 	loadOptionsFromDatabase() | ||||||
| } | } | ||||||
| @@ -141,6 +143,8 @@ func updateOptionMap(key string, value string) (err error) { | |||||||
| 			common.RegisterEnabled = boolValue | 			common.RegisterEnabled = boolValue | ||||||
| 		case "AutomaticDisableChannelEnabled": | 		case "AutomaticDisableChannelEnabled": | ||||||
| 			common.AutomaticDisableChannelEnabled = boolValue | 			common.AutomaticDisableChannelEnabled = boolValue | ||||||
|  | 		case "ApproximateTokenEnabled": | ||||||
|  | 			common.ApproximateTokenEnabled = boolValue | ||||||
| 		case "LogConsumeEnabled": | 		case "LogConsumeEnabled": | ||||||
| 			common.LogConsumeEnabled = boolValue | 			common.LogConsumeEnabled = boolValue | ||||||
| 		case "DisplayInCurrencyEnabled": | 		case "DisplayInCurrencyEnabled": | ||||||
| @@ -193,6 +197,8 @@ func updateOptionMap(key string, value string) (err error) { | |||||||
| 		common.QuotaRemindThreshold, _ = strconv.Atoi(value) | 		common.QuotaRemindThreshold, _ = strconv.Atoi(value) | ||||||
| 	case "PreConsumedQuota": | 	case "PreConsumedQuota": | ||||||
| 		common.PreConsumedQuota, _ = strconv.Atoi(value) | 		common.PreConsumedQuota, _ = strconv.Atoi(value) | ||||||
|  | 	case "RetryTimes": | ||||||
|  | 		common.RetryTimes, _ = strconv.Atoi(value) | ||||||
| 	case "ModelRatio": | 	case "ModelRatio": | ||||||
| 		err = common.UpdateModelRatioByJSONString(value) | 		err = common.UpdateModelRatioByJSONString(value) | ||||||
| 	case "GroupRatio": | 	case "GroupRatio": | ||||||
|   | |||||||
| @@ -3,6 +3,7 @@ package model | |||||||
| import ( | import ( | ||||||
| 	"errors" | 	"errors" | ||||||
| 	"fmt" | 	"fmt" | ||||||
|  | 	"gorm.io/gorm" | ||||||
| 	"one-api/common" | 	"one-api/common" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| @@ -48,26 +49,28 @@ func Redeem(key string, userId int) (quota int, err error) { | |||||||
| 		return 0, errors.New("无效的 user id") | 		return 0, errors.New("无效的 user id") | ||||||
| 	} | 	} | ||||||
| 	redemption := &Redemption{} | 	redemption := &Redemption{} | ||||||
| 	err = DB.Where("`key` = ?", key).First(redemption).Error |  | ||||||
|  | 	err = DB.Transaction(func(tx *gorm.DB) error { | ||||||
|  | 		err := tx.Set("gorm:query_option", "FOR UPDATE").Where("`key` = ?", key).First(redemption).Error | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 		return 0, errors.New("无效的兑换码") | 			return errors.New("无效的兑换码") | ||||||
| 		} | 		} | ||||||
| 		if redemption.Status != common.RedemptionCodeStatusEnabled { | 		if redemption.Status != common.RedemptionCodeStatusEnabled { | ||||||
| 		return 0, errors.New("该兑换码已被使用") | 			return errors.New("该兑换码已被使用") | ||||||
| 		} | 		} | ||||||
| 	err = IncreaseUserQuota(userId, redemption.Quota) | 		err = tx.Model(&User{}).Where("id = ?", userId).Update("quota", gorm.Expr("quota + ?", redemption.Quota)).Error | ||||||
| 		if err != nil { | 		if err != nil { | ||||||
| 		return 0, err | 			return err | ||||||
| 		} | 		} | ||||||
| 	go func() { |  | ||||||
| 		redemption.RedeemedTime = common.GetTimestamp() | 		redemption.RedeemedTime = common.GetTimestamp() | ||||||
| 		redemption.Status = common.RedemptionCodeStatusUsed | 		redemption.Status = common.RedemptionCodeStatusUsed | ||||||
| 		err := redemption.SelectUpdate() | 		err = tx.Save(redemption).Error | ||||||
|  | 		return err | ||||||
|  | 	}) | ||||||
| 	if err != nil { | 	if err != nil { | ||||||
| 			common.SysError("failed to update redemption status: " + err.Error()) | 		return 0, errors.New("兑换失败," + err.Error()) | ||||||
| 	} | 	} | ||||||
| 	RecordLog(userId, LogTypeTopup, fmt.Sprintf("通过兑换码充值 %s", common.LogQuota(redemption.Quota))) | 	RecordLog(userId, LogTypeTopup, fmt.Sprintf("通过兑换码充值 %s", common.LogQuota(redemption.Quota))) | ||||||
| 	}() |  | ||||||
| 	return redemption.Quota, nil | 	return redemption.Quota, nil | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -1,10 +1,11 @@ | |||||||
| package router | package router | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"github.com/gin-contrib/gzip" |  | ||||||
| 	"github.com/gin-gonic/gin" |  | ||||||
| 	"one-api/controller" | 	"one-api/controller" | ||||||
| 	"one-api/middleware" | 	"one-api/middleware" | ||||||
|  |  | ||||||
|  | 	"github.com/gin-contrib/gzip" | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func SetApiRouter(router *gin.Engine) { | func SetApiRouter(router *gin.Engine) { | ||||||
| @@ -35,7 +36,7 @@ func SetApiRouter(router *gin.Engine) { | |||||||
| 			{ | 			{ | ||||||
| 				selfRoute.GET("/self", controller.GetSelf) | 				selfRoute.GET("/self", controller.GetSelf) | ||||||
| 				selfRoute.PUT("/self", controller.UpdateSelf) | 				selfRoute.PUT("/self", controller.UpdateSelf) | ||||||
| 				selfRoute.DELETE("/self", controller.DeleteSelf) | 				selfRoute.DELETE("/self", middleware.TurnstileCheck(), controller.DeleteSelf) | ||||||
| 				selfRoute.GET("/token", controller.GenerateAccessToken) | 				selfRoute.GET("/token", controller.GenerateAccessToken) | ||||||
| 				selfRoute.GET("/aff", controller.GetAffCode) | 				selfRoute.GET("/aff", controller.GetAffCode) | ||||||
| 				selfRoute.POST("/topup", controller.TopUp) | 				selfRoute.POST("/topup", controller.TopUp) | ||||||
|   | |||||||
| @@ -5,6 +5,7 @@ import ( | |||||||
| 	"fmt" | 	"fmt" | ||||||
| 	"github.com/gin-gonic/gin" | 	"github.com/gin-gonic/gin" | ||||||
| 	"net/http" | 	"net/http" | ||||||
|  | 	"one-api/common" | ||||||
| 	"os" | 	"os" | ||||||
| 	"strings" | 	"strings" | ||||||
| ) | ) | ||||||
| @@ -14,6 +15,10 @@ func SetRouter(router *gin.Engine, buildFS embed.FS, indexPage []byte) { | |||||||
| 	SetDashboardRouter(router) | 	SetDashboardRouter(router) | ||||||
| 	SetRelayRouter(router) | 	SetRelayRouter(router) | ||||||
| 	frontendBaseUrl := os.Getenv("FRONTEND_BASE_URL") | 	frontendBaseUrl := os.Getenv("FRONTEND_BASE_URL") | ||||||
|  | 	if common.IsMasterNode && frontendBaseUrl != "" { | ||||||
|  | 		frontendBaseUrl = "" | ||||||
|  | 		common.SysLog("FRONTEND_BASE_URL is ignored on master node") | ||||||
|  | 	} | ||||||
| 	if frontendBaseUrl == "" { | 	if frontendBaseUrl == "" { | ||||||
| 		SetWebRouter(router, buildFS, indexPage) | 		SetWebRouter(router, buildFS, indexPage) | ||||||
| 	} else { | 	} else { | ||||||
|   | |||||||
| @@ -1,9 +1,10 @@ | |||||||
| package router | package router | ||||||
|  |  | ||||||
| import ( | import ( | ||||||
| 	"github.com/gin-gonic/gin" |  | ||||||
| 	"one-api/controller" | 	"one-api/controller" | ||||||
| 	"one-api/middleware" | 	"one-api/middleware" | ||||||
|  |  | ||||||
|  | 	"github.com/gin-gonic/gin" | ||||||
| ) | ) | ||||||
|  |  | ||||||
| func SetRelayRouter(router *gin.Engine) { | func SetRelayRouter(router *gin.Engine) { | ||||||
| @@ -11,7 +12,7 @@ func SetRelayRouter(router *gin.Engine) { | |||||||
| 	modelsRouter := router.Group("/v1/models") | 	modelsRouter := router.Group("/v1/models") | ||||||
| 	modelsRouter.Use(middleware.TokenAuth()) | 	modelsRouter.Use(middleware.TokenAuth()) | ||||||
| 	{ | 	{ | ||||||
| 		modelsRouter.GET("/", controller.ListModels) | 		modelsRouter.GET("", controller.ListModels) | ||||||
| 		modelsRouter.GET("/:model", controller.RetrieveModel) | 		modelsRouter.GET("/:model", controller.RetrieveModel) | ||||||
| 	} | 	} | ||||||
| 	relayV1Router := router.Group("/v1") | 	relayV1Router := router.Group("/v1") | ||||||
| @@ -19,11 +20,12 @@ func SetRelayRouter(router *gin.Engine) { | |||||||
| 	{ | 	{ | ||||||
| 		relayV1Router.POST("/completions", controller.Relay) | 		relayV1Router.POST("/completions", controller.Relay) | ||||||
| 		relayV1Router.POST("/chat/completions", controller.Relay) | 		relayV1Router.POST("/chat/completions", controller.Relay) | ||||||
| 		relayV1Router.POST("/edits", controller.RelayNotImplemented) | 		relayV1Router.POST("/edits", controller.Relay) | ||||||
| 		relayV1Router.POST("/images/generations", controller.RelayNotImplemented) | 		relayV1Router.POST("/images/generations", controller.Relay) | ||||||
| 		relayV1Router.POST("/images/edits", controller.RelayNotImplemented) | 		relayV1Router.POST("/images/edits", controller.RelayNotImplemented) | ||||||
| 		relayV1Router.POST("/images/variations", controller.RelayNotImplemented) | 		relayV1Router.POST("/images/variations", controller.RelayNotImplemented) | ||||||
| 		relayV1Router.POST("/embeddings", controller.Relay) | 		relayV1Router.POST("/embeddings", controller.Relay) | ||||||
|  | 		relayV1Router.POST("/engines/:model/embeddings", controller.Relay) | ||||||
| 		relayV1Router.POST("/audio/transcriptions", controller.RelayNotImplemented) | 		relayV1Router.POST("/audio/transcriptions", controller.RelayNotImplemented) | ||||||
| 		relayV1Router.POST("/audio/translations", controller.RelayNotImplemented) | 		relayV1Router.POST("/audio/translations", controller.RelayNotImplemented) | ||||||
| 		relayV1Router.GET("/files", controller.RelayNotImplemented) | 		relayV1Router.GET("/files", controller.RelayNotImplemented) | ||||||
|   | |||||||
| @@ -30,6 +30,9 @@ function renderType(type) { | |||||||
| function renderBalance(type, balance) { | function renderBalance(type, balance) { | ||||||
|   switch (type) { |   switch (type) { | ||||||
|     case 1: // OpenAI |     case 1: // OpenAI | ||||||
|  |       return <span>${balance.toFixed(2)}</span>; | ||||||
|  |     case 4: // CloseAI | ||||||
|  |       return <span>¥{balance.toFixed(2)}</span>; | ||||||
|     case 8: // 自定义 |     case 8: // 自定义 | ||||||
|       return <span>${balance.toFixed(2)}</span>; |       return <span>${balance.toFixed(2)}</span>; | ||||||
|     case 5: // OpenAI-SB |     case 5: // OpenAI-SB | ||||||
| @@ -38,6 +41,8 @@ function renderBalance(type, balance) { | |||||||
|       return <span>{renderNumber(balance)}</span>; |       return <span>{renderNumber(balance)}</span>; | ||||||
|     case 12: // API2GPT |     case 12: // API2GPT | ||||||
|       return <span>¥{balance.toFixed(2)}</span>; |       return <span>¥{balance.toFixed(2)}</span>; | ||||||
|  |     case 13: // AIGC2D | ||||||
|  |       return <span>{renderNumber(balance)}</span>; | ||||||
|     default: |     default: | ||||||
|       return <span>不支持</span>; |       return <span>不支持</span>; | ||||||
|   } |   } | ||||||
| @@ -58,8 +63,8 @@ const ChannelsTable = () => { | |||||||
|       if (startIdx === 0) { |       if (startIdx === 0) { | ||||||
|         setChannels(data); |         setChannels(data); | ||||||
|       } else { |       } else { | ||||||
|         let newChannels = channels; |         let newChannels = [...channels]; | ||||||
|         newChannels.push(...data); |         newChannels.splice(startIdx * ITEMS_PER_PAGE, data.length, ...data); | ||||||
|         setChannels(newChannels); |         setChannels(newChannels); | ||||||
|       } |       } | ||||||
|     } else { |     } else { | ||||||
| @@ -80,7 +85,7 @@ const ChannelsTable = () => { | |||||||
|  |  | ||||||
|   const refresh = async () => { |   const refresh = async () => { | ||||||
|     setLoading(true); |     setLoading(true); | ||||||
|     await loadChannels(0); |     await loadChannels(activePage - 1); | ||||||
|   }; |   }; | ||||||
|  |  | ||||||
|   useEffect(() => { |   useEffect(() => { | ||||||
|   | |||||||
| @@ -1,15 +1,5 @@ | |||||||
| import React, { useContext, useEffect, useState } from 'react'; | import React, { useContext, useEffect, useState } from 'react'; | ||||||
| import { | import { Button, Divider, Form, Grid, Header, Image, Message, Modal, Segment } from 'semantic-ui-react'; | ||||||
|   Button, |  | ||||||
|   Divider, |  | ||||||
|   Form, |  | ||||||
|   Grid, |  | ||||||
|   Header, |  | ||||||
|   Image, |  | ||||||
|   Message, |  | ||||||
|   Modal, |  | ||||||
|   Segment, |  | ||||||
| } from 'semantic-ui-react'; |  | ||||||
| import { Link, useNavigate, useSearchParams } from 'react-router-dom'; | import { Link, useNavigate, useSearchParams } from 'react-router-dom'; | ||||||
| import { UserContext } from '../context/User'; | import { UserContext } from '../context/User'; | ||||||
| import { API, getLogo, showError, showSuccess } from '../helpers'; | import { API, getLogo, showError, showSuccess } from '../helpers'; | ||||||
| @@ -18,19 +8,18 @@ const LoginForm = () => { | |||||||
|   const [inputs, setInputs] = useState({ |   const [inputs, setInputs] = useState({ | ||||||
|     username: '', |     username: '', | ||||||
|     password: '', |     password: '', | ||||||
|     wechat_verification_code: '', |     wechat_verification_code: '' | ||||||
|   }); |   }); | ||||||
|   const [searchParams, setSearchParams] = useSearchParams(); |   const [searchParams, setSearchParams] = useSearchParams(); | ||||||
|   const [submitted, setSubmitted] = useState(false); |   const [submitted, setSubmitted] = useState(false); | ||||||
|   const { username, password } = inputs; |   const { username, password } = inputs; | ||||||
|   const [userState, userDispatch] = useContext(UserContext); |   const [userState, userDispatch] = useContext(UserContext); | ||||||
|   let navigate = useNavigate(); |   let navigate = useNavigate(); | ||||||
|  |  | ||||||
|   const [status, setStatus] = useState({}); |   const [status, setStatus] = useState({}); | ||||||
|   const logo = getLogo(); |   const logo = getLogo(); | ||||||
|  |  | ||||||
|   useEffect(() => { |   useEffect(() => { | ||||||
|     if (searchParams.get("expired")) { |     if (searchParams.get('expired')) { | ||||||
|       showError('未登录或登录已过期,请重新登录!'); |       showError('未登录或登录已过期,请重新登录!'); | ||||||
|     } |     } | ||||||
|     let status = localStorage.getItem('status'); |     let status = localStorage.getItem('status'); | ||||||
| @@ -76,9 +65,9 @@ const LoginForm = () => { | |||||||
|   async function handleSubmit(e) { |   async function handleSubmit(e) { | ||||||
|     setSubmitted(true); |     setSubmitted(true); | ||||||
|     if (username && password) { |     if (username && password) { | ||||||
|       const res = await API.post('/api/user/login', { |       const res = await API.post(`/api/user/login`, { | ||||||
|         username, |         username, | ||||||
|         password, |         password | ||||||
|       }); |       }); | ||||||
|       const { success, message, data } = res.data; |       const { success, message, data } = res.data; | ||||||
|       if (success) { |       if (success) { | ||||||
| @@ -93,44 +82,44 @@ const LoginForm = () => { | |||||||
|   } |   } | ||||||
|  |  | ||||||
|   return ( |   return ( | ||||||
|     <Grid textAlign="center" style={{ marginTop: '48px' }}> |     <Grid textAlign='center' style={{ marginTop: '48px' }}> | ||||||
|       <Grid.Column style={{ maxWidth: 450 }}> |       <Grid.Column style={{ maxWidth: 450 }}> | ||||||
|         <Header as="h2" color="" textAlign="center"> |         <Header as='h2' color='' textAlign='center'> | ||||||
|           <Image src={logo} /> 用户登录 |           <Image src={logo} /> 用户登录 | ||||||
|         </Header> |         </Header> | ||||||
|         <Form size="large"> |         <Form size='large'> | ||||||
|           <Segment> |           <Segment> | ||||||
|             <Form.Input |             <Form.Input | ||||||
|               fluid |               fluid | ||||||
|               icon="user" |               icon='user' | ||||||
|               iconPosition="left" |               iconPosition='left' | ||||||
|               placeholder="用户名" |               placeholder='用户名' | ||||||
|               name="username" |               name='username' | ||||||
|               value={username} |               value={username} | ||||||
|               onChange={handleChange} |               onChange={handleChange} | ||||||
|             /> |             /> | ||||||
|             <Form.Input |             <Form.Input | ||||||
|               fluid |               fluid | ||||||
|               icon="lock" |               icon='lock' | ||||||
|               iconPosition="left" |               iconPosition='left' | ||||||
|               placeholder="密码" |               placeholder='密码' | ||||||
|               name="password" |               name='password' | ||||||
|               type="password" |               type='password' | ||||||
|               value={password} |               value={password} | ||||||
|               onChange={handleChange} |               onChange={handleChange} | ||||||
|             /> |             /> | ||||||
|             <Button color="" fluid size="large" onClick={handleSubmit}> |             <Button color='green' fluid size='large' onClick={handleSubmit}> | ||||||
|               登录 |               登录 | ||||||
|             </Button> |             </Button> | ||||||
|           </Segment> |           </Segment> | ||||||
|         </Form> |         </Form> | ||||||
|         <Message> |         <Message> | ||||||
|           忘记密码? |           忘记密码? | ||||||
|           <Link to="/reset" className="btn btn-link"> |           <Link to='/reset' className='btn btn-link'> | ||||||
|             点击重置 |             点击重置 | ||||||
|           </Link> |           </Link> | ||||||
|           ; 没有账户? |           ; 没有账户? | ||||||
|           <Link to="/register" className="btn btn-link"> |           <Link to='/register' className='btn btn-link'> | ||||||
|             点击注册 |             点击注册 | ||||||
|           </Link> |           </Link> | ||||||
|         </Message> |         </Message> | ||||||
| @@ -140,8 +129,8 @@ const LoginForm = () => { | |||||||
|             {status.github_oauth ? ( |             {status.github_oauth ? ( | ||||||
|               <Button |               <Button | ||||||
|                 circular |                 circular | ||||||
|                 color="black" |                 color='black' | ||||||
|                 icon="github" |                 icon='github' | ||||||
|                 onClick={onGitHubOAuthClicked} |                 onClick={onGitHubOAuthClicked} | ||||||
|               /> |               /> | ||||||
|             ) : ( |             ) : ( | ||||||
| @@ -150,8 +139,8 @@ const LoginForm = () => { | |||||||
|             {status.wechat_login ? ( |             {status.wechat_login ? ( | ||||||
|               <Button |               <Button | ||||||
|                 circular |                 circular | ||||||
|                 color="green" |                 color='green' | ||||||
|                 icon="wechat" |                 icon='wechat' | ||||||
|                 onClick={onWeChatLoginClicked} |                 onClick={onWeChatLoginClicked} | ||||||
|               /> |               /> | ||||||
|             ) : ( |             ) : ( | ||||||
| @@ -175,18 +164,18 @@ const LoginForm = () => { | |||||||
|                   微信扫码关注公众号,输入「验证码」获取验证码(三分钟内有效) |                   微信扫码关注公众号,输入「验证码」获取验证码(三分钟内有效) | ||||||
|                 </p> |                 </p> | ||||||
|               </div> |               </div> | ||||||
|               <Form size="large"> |               <Form size='large'> | ||||||
|                 <Form.Input |                 <Form.Input | ||||||
|                   fluid |                   fluid | ||||||
|                   placeholder="验证码" |                   placeholder='验证码' | ||||||
|                   name="wechat_verification_code" |                   name='wechat_verification_code' | ||||||
|                   value={inputs.wechat_verification_code} |                   value={inputs.wechat_verification_code} | ||||||
|                   onChange={handleChange} |                   onChange={handleChange} | ||||||
|                 /> |                 /> | ||||||
|                 <Button |                 <Button | ||||||
|                   color="" |                   color='' | ||||||
|                   fluid |                   fluid | ||||||
|                   size="large" |                   size='large' | ||||||
|                   onClick={onSubmitWeChatVerificationCode} |                   onClick={onSubmitWeChatVerificationCode} | ||||||
|                 > |                 > | ||||||
|                   登录 |                   登录 | ||||||
|   | |||||||
| @@ -108,7 +108,7 @@ const LogsTable = () => { | |||||||
|         setLogs(data); |         setLogs(data); | ||||||
|       } else { |       } else { | ||||||
|         let newLogs = [...logs]; |         let newLogs = [...logs]; | ||||||
|         newLogs.push(...data); |         newLogs.splice(startIdx * ITEMS_PER_PAGE, data.length, ...data); | ||||||
|         setLogs(newLogs); |         setLogs(newLogs); | ||||||
|       } |       } | ||||||
|     } else { |     } else { | ||||||
|   | |||||||
| @@ -18,7 +18,9 @@ const OperationSetting = () => { | |||||||
|     ChannelDisableThreshold: 0, |     ChannelDisableThreshold: 0, | ||||||
|     LogConsumeEnabled: '', |     LogConsumeEnabled: '', | ||||||
|     DisplayInCurrencyEnabled: '', |     DisplayInCurrencyEnabled: '', | ||||||
|     DisplayTokenStatEnabled: '' |     DisplayTokenStatEnabled: '', | ||||||
|  |     ApproximateTokenEnabled: '', | ||||||
|  |     RetryTimes: 0, | ||||||
|   }); |   }); | ||||||
|   const [originInputs, setOriginInputs] = useState({}); |   const [originInputs, setOriginInputs] = useState({}); | ||||||
|   let [loading, setLoading] = useState(false); |   let [loading, setLoading] = useState(false); | ||||||
| @@ -74,9 +76,6 @@ const OperationSetting = () => { | |||||||
|   const submitConfig = async (group) => { |   const submitConfig = async (group) => { | ||||||
|     switch (group) { |     switch (group) { | ||||||
|       case 'monitor': |       case 'monitor': | ||||||
|         if (originInputs['AutomaticDisableChannelEnabled'] !== inputs.AutomaticDisableChannelEnabled) { |  | ||||||
|           await updateOption('AutomaticDisableChannelEnabled', inputs.AutomaticDisableChannelEnabled); |  | ||||||
|         } |  | ||||||
|         if (originInputs['ChannelDisableThreshold'] !== inputs.ChannelDisableThreshold) { |         if (originInputs['ChannelDisableThreshold'] !== inputs.ChannelDisableThreshold) { | ||||||
|           await updateOption('ChannelDisableThreshold', inputs.ChannelDisableThreshold); |           await updateOption('ChannelDisableThreshold', inputs.ChannelDisableThreshold); | ||||||
|         } |         } | ||||||
| @@ -124,6 +123,9 @@ const OperationSetting = () => { | |||||||
|         if (originInputs['QuotaPerUnit'] !== inputs.QuotaPerUnit) { |         if (originInputs['QuotaPerUnit'] !== inputs.QuotaPerUnit) { | ||||||
|           await updateOption('QuotaPerUnit', inputs.QuotaPerUnit); |           await updateOption('QuotaPerUnit', inputs.QuotaPerUnit); | ||||||
|         } |         } | ||||||
|  |         if (originInputs['RetryTimes'] !== inputs.RetryTimes) { | ||||||
|  |           await updateOption('RetryTimes', inputs.RetryTimes); | ||||||
|  |         } | ||||||
|         break; |         break; | ||||||
|     } |     } | ||||||
|   }; |   }; | ||||||
| @@ -135,7 +137,7 @@ const OperationSetting = () => { | |||||||
|           <Header as='h3'> |           <Header as='h3'> | ||||||
|             通用设置 |             通用设置 | ||||||
|           </Header> |           </Header> | ||||||
|           <Form.Group widths={3}> |           <Form.Group widths={4}> | ||||||
|             <Form.Input |             <Form.Input | ||||||
|               label='充值链接' |               label='充值链接' | ||||||
|               name='TopUpLink' |               name='TopUpLink' | ||||||
| @@ -164,6 +166,17 @@ const OperationSetting = () => { | |||||||
|               step='0.01' |               step='0.01' | ||||||
|               placeholder='一单位货币能兑换的额度' |               placeholder='一单位货币能兑换的额度' | ||||||
|             /> |             /> | ||||||
|  |             <Form.Input | ||||||
|  |               label='失败重试次数' | ||||||
|  |               name='RetryTimes' | ||||||
|  |               type={'number'} | ||||||
|  |               step='1' | ||||||
|  |               min='0' | ||||||
|  |               onChange={handleInputChange} | ||||||
|  |               autoComplete='new-password' | ||||||
|  |               value={inputs.RetryTimes} | ||||||
|  |               placeholder='失败重试次数' | ||||||
|  |             /> | ||||||
|           </Form.Group> |           </Form.Group> | ||||||
|           <Form.Group inline> |           <Form.Group inline> | ||||||
|             <Form.Checkbox |             <Form.Checkbox | ||||||
| @@ -184,6 +197,12 @@ const OperationSetting = () => { | |||||||
|               name='DisplayTokenStatEnabled' |               name='DisplayTokenStatEnabled' | ||||||
|               onChange={handleInputChange} |               onChange={handleInputChange} | ||||||
|             /> |             /> | ||||||
|  |             <Form.Checkbox | ||||||
|  |               checked={inputs.ApproximateTokenEnabled === 'true'} | ||||||
|  |               label='使用近似的方式估算 token 数以减少计算量' | ||||||
|  |               name='ApproximateTokenEnabled' | ||||||
|  |               onChange={handleInputChange} | ||||||
|  |             /> | ||||||
|           </Form.Group> |           </Form.Group> | ||||||
|           <Form.Button onClick={() => { |           <Form.Button onClick={() => { | ||||||
|             submitConfig('general').then(); |             submitConfig('general').then(); | ||||||
|   | |||||||
| @@ -12,6 +12,11 @@ const PasswordResetConfirm = () => { | |||||||
|  |  | ||||||
|   const [loading, setLoading] = useState(false); |   const [loading, setLoading] = useState(false); | ||||||
|  |  | ||||||
|  |   const [disableButton, setDisableButton] = useState(false); | ||||||
|  |   const [countdown, setCountdown] = useState(30); | ||||||
|  |  | ||||||
|  |   const [newPassword, setNewPassword] = useState(''); | ||||||
|  |  | ||||||
|   const [searchParams, setSearchParams] = useSearchParams(); |   const [searchParams, setSearchParams] = useSearchParams(); | ||||||
|   useEffect(() => { |   useEffect(() => { | ||||||
|     let token = searchParams.get('token'); |     let token = searchParams.get('token'); | ||||||
| @@ -22,7 +27,21 @@ const PasswordResetConfirm = () => { | |||||||
|     }); |     }); | ||||||
|   }, []); |   }, []); | ||||||
|  |  | ||||||
|  |   useEffect(() => { | ||||||
|  |     let countdownInterval = null; | ||||||
|  |     if (disableButton && countdown > 0) { | ||||||
|  |       countdownInterval = setInterval(() => { | ||||||
|  |         setCountdown(countdown - 1); | ||||||
|  |       }, 1000); | ||||||
|  |     } else if (countdown === 0) { | ||||||
|  |       setDisableButton(false); | ||||||
|  |       setCountdown(30); | ||||||
|  |     } | ||||||
|  |     return () => clearInterval(countdownInterval);  | ||||||
|  |   }, [disableButton, countdown]); | ||||||
|  |  | ||||||
|   async function handleSubmit(e) { |   async function handleSubmit(e) { | ||||||
|  |     setDisableButton(true); | ||||||
|     if (!email) return; |     if (!email) return; | ||||||
|     setLoading(true); |     setLoading(true); | ||||||
|     const res = await API.post(`/api/user/reset`, { |     const res = await API.post(`/api/user/reset`, { | ||||||
| @@ -32,8 +51,9 @@ const PasswordResetConfirm = () => { | |||||||
|     const { success, message } = res.data; |     const { success, message } = res.data; | ||||||
|     if (success) { |     if (success) { | ||||||
|       let password = res.data.data; |       let password = res.data.data; | ||||||
|  |       setNewPassword(password); | ||||||
|       await copy(password); |       await copy(password); | ||||||
|       showNotice(`密码已重置并已复制到剪贴板:${password}`); |       showNotice(`新密码已复制到剪贴板:${password}`); | ||||||
|     } else { |     } else { | ||||||
|       showError(message); |       showError(message); | ||||||
|     } |     } | ||||||
| @@ -57,14 +77,31 @@ const PasswordResetConfirm = () => { | |||||||
|               value={email} |               value={email} | ||||||
|               readOnly |               readOnly | ||||||
|             /> |             /> | ||||||
|  |             {newPassword && ( | ||||||
|  |               <Form.Input | ||||||
|  |               fluid | ||||||
|  |               icon='lock' | ||||||
|  |               iconPosition='left' | ||||||
|  |               placeholder='新密码' | ||||||
|  |               name='newPassword' | ||||||
|  |               value={newPassword} | ||||||
|  |               readOnly | ||||||
|  |               onClick={(e) => { | ||||||
|  |                 e.target.select(); | ||||||
|  |                 navigator.clipboard.writeText(newPassword); | ||||||
|  |                 showNotice(`密码已复制到剪贴板:${newPassword}`); | ||||||
|  |               }} | ||||||
|  |             />             | ||||||
|  |             )} | ||||||
|             <Button |             <Button | ||||||
|               color='' |               color='green' | ||||||
|               fluid |               fluid | ||||||
|               size='large' |               size='large' | ||||||
|               onClick={handleSubmit} |               onClick={handleSubmit} | ||||||
|               loading={loading} |               loading={loading} | ||||||
|  |               disabled={disableButton} | ||||||
|             > |             > | ||||||
|               提交 |               {disableButton ? `密码重置完成` : '提交'} | ||||||
|             </Button> |             </Button> | ||||||
|           </Segment> |           </Segment> | ||||||
|         </Form> |         </Form> | ||||||
|   | |||||||
| @@ -5,7 +5,7 @@ import Turnstile from 'react-turnstile'; | |||||||
|  |  | ||||||
| const PasswordResetForm = () => { | const PasswordResetForm = () => { | ||||||
|   const [inputs, setInputs] = useState({ |   const [inputs, setInputs] = useState({ | ||||||
|     email: '', |     email: '' | ||||||
|   }); |   }); | ||||||
|   const { email } = inputs; |   const { email } = inputs; | ||||||
|  |  | ||||||
| @@ -13,24 +13,29 @@ const PasswordResetForm = () => { | |||||||
|   const [turnstileEnabled, setTurnstileEnabled] = useState(false); |   const [turnstileEnabled, setTurnstileEnabled] = useState(false); | ||||||
|   const [turnstileSiteKey, setTurnstileSiteKey] = useState(''); |   const [turnstileSiteKey, setTurnstileSiteKey] = useState(''); | ||||||
|   const [turnstileToken, setTurnstileToken] = useState(''); |   const [turnstileToken, setTurnstileToken] = useState(''); | ||||||
|  |   const [disableButton, setDisableButton] = useState(false); | ||||||
|  |   const [countdown, setCountdown] = useState(30); | ||||||
|  |  | ||||||
|   useEffect(() => { |   useEffect(() => { | ||||||
|     let status = localStorage.getItem('status'); |     let countdownInterval = null; | ||||||
|     if (status) { |     if (disableButton && countdown > 0) { | ||||||
|       status = JSON.parse(status); |       countdownInterval = setInterval(() => { | ||||||
|       if (status.turnstile_check) { |         setCountdown(countdown - 1); | ||||||
|         setTurnstileEnabled(true); |       }, 1000); | ||||||
|         setTurnstileSiteKey(status.turnstile_site_key); |     } else if (countdown === 0) { | ||||||
|  |       setDisableButton(false); | ||||||
|  |       setCountdown(30); | ||||||
|     } |     } | ||||||
|     } |     return () => clearInterval(countdownInterval); | ||||||
|   }, []); |   }, [disableButton, countdown]); | ||||||
|  |  | ||||||
|   function handleChange(e) { |   function handleChange(e) { | ||||||
|     const { name, value } = e.target; |     const { name, value } = e.target; | ||||||
|     setInputs((inputs) => ({ ...inputs, [name]: value })); |     setInputs(inputs => ({ ...inputs, [name]: value })); | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   async function handleSubmit(e) { |   async function handleSubmit(e) { | ||||||
|  |     setDisableButton(true); | ||||||
|     if (!email) return; |     if (!email) return; | ||||||
|     if (turnstileEnabled && turnstileToken === '') { |     if (turnstileEnabled && turnstileToken === '') { | ||||||
|       showInfo('请稍后几秒重试,Turnstile 正在检查用户环境!'); |       showInfo('请稍后几秒重试,Turnstile 正在检查用户环境!'); | ||||||
| @@ -78,13 +83,14 @@ const PasswordResetForm = () => { | |||||||
|               <></> |               <></> | ||||||
|             )} |             )} | ||||||
|             <Button |             <Button | ||||||
|               color='' |               color='green' | ||||||
|               fluid |               fluid | ||||||
|               size='large' |               size='large' | ||||||
|               onClick={handleSubmit} |               onClick={handleSubmit} | ||||||
|               loading={loading} |               loading={loading} | ||||||
|  |               disabled={disableButton} | ||||||
|             > |             > | ||||||
|               提交 |               {disableButton ? `重试 (${countdown})` : '提交'} | ||||||
|             </Button> |             </Button> | ||||||
|           </Segment> |           </Segment> | ||||||
|         </Form> |         </Form> | ||||||
|   | |||||||
| @@ -1,22 +1,30 @@ | |||||||
| import React, { useEffect, useState } from 'react'; | import React, { useContext, useEffect, useState } from 'react'; | ||||||
| import { Button, Divider, Form, Header, Image, Message, Modal } from 'semantic-ui-react'; | import { Button, Divider, Form, Header, Image, Message, Modal } from 'semantic-ui-react'; | ||||||
| import { Link } from 'react-router-dom'; | import { Link, useNavigate } from 'react-router-dom'; | ||||||
| import { API, copy, showError, showInfo, showNotice, showSuccess } from '../helpers'; | import { API, copy, showError, showInfo, showNotice, showSuccess } from '../helpers'; | ||||||
| import Turnstile from 'react-turnstile'; | import Turnstile from 'react-turnstile'; | ||||||
|  | import { UserContext } from '../context/User'; | ||||||
|  |  | ||||||
| const PersonalSetting = () => { | const PersonalSetting = () => { | ||||||
|  |   const [userState, userDispatch] = useContext(UserContext); | ||||||
|  |   let navigate = useNavigate(); | ||||||
|  |  | ||||||
|   const [inputs, setInputs] = useState({ |   const [inputs, setInputs] = useState({ | ||||||
|     wechat_verification_code: '', |     wechat_verification_code: '', | ||||||
|     email_verification_code: '', |     email_verification_code: '', | ||||||
|     email: '', |     email: '', | ||||||
|  |     self_account_deletion_confirmation: '' | ||||||
|   }); |   }); | ||||||
|   const [status, setStatus] = useState({}); |   const [status, setStatus] = useState({}); | ||||||
|   const [showWeChatBindModal, setShowWeChatBindModal] = useState(false); |   const [showWeChatBindModal, setShowWeChatBindModal] = useState(false); | ||||||
|   const [showEmailBindModal, setShowEmailBindModal] = useState(false); |   const [showEmailBindModal, setShowEmailBindModal] = useState(false); | ||||||
|  |   const [showAccountDeleteModal, setShowAccountDeleteModal] = useState(false); | ||||||
|   const [turnstileEnabled, setTurnstileEnabled] = useState(false); |   const [turnstileEnabled, setTurnstileEnabled] = useState(false); | ||||||
|   const [turnstileSiteKey, setTurnstileSiteKey] = useState(''); |   const [turnstileSiteKey, setTurnstileSiteKey] = useState(''); | ||||||
|   const [turnstileToken, setTurnstileToken] = useState(''); |   const [turnstileToken, setTurnstileToken] = useState(''); | ||||||
|   const [loading, setLoading] = useState(false); |   const [loading, setLoading] = useState(false); | ||||||
|  |   const [disableButton, setDisableButton] = useState(false); | ||||||
|  |   const [countdown, setCountdown] = useState(30); | ||||||
|  |  | ||||||
|   useEffect(() => { |   useEffect(() => { | ||||||
|     let status = localStorage.getItem('status'); |     let status = localStorage.getItem('status'); | ||||||
| @@ -30,6 +38,19 @@ const PersonalSetting = () => { | |||||||
|     } |     } | ||||||
|   }, []); |   }, []); | ||||||
|  |  | ||||||
|  |   useEffect(() => { | ||||||
|  |     let countdownInterval = null; | ||||||
|  |     if (disableButton && countdown > 0) { | ||||||
|  |       countdownInterval = setInterval(() => { | ||||||
|  |         setCountdown(countdown - 1); | ||||||
|  |       }, 1000); | ||||||
|  |     } else if (countdown === 0) { | ||||||
|  |       setDisableButton(false); | ||||||
|  |       setCountdown(30); | ||||||
|  |     } | ||||||
|  |     return () => clearInterval(countdownInterval); // Clean up on unmount | ||||||
|  |   }, [disableButton, countdown]); | ||||||
|  |  | ||||||
|   const handleInputChange = (e, { name, value }) => { |   const handleInputChange = (e, { name, value }) => { | ||||||
|     setInputs((inputs) => ({ ...inputs, [name]: value })); |     setInputs((inputs) => ({ ...inputs, [name]: value })); | ||||||
|   }; |   }; | ||||||
| @@ -57,6 +78,26 @@ const PersonalSetting = () => { | |||||||
|     } |     } | ||||||
|   }; |   }; | ||||||
|  |  | ||||||
|  |   const deleteAccount = async () => { | ||||||
|  |     if (inputs.self_account_deletion_confirmation !== userState.user.username) { | ||||||
|  |       showError('请输入你的账户名以确认删除!'); | ||||||
|  |       return; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     const res = await API.delete('/api/user/self'); | ||||||
|  |     const { success, message } = res.data; | ||||||
|  |  | ||||||
|  |     if (success) { | ||||||
|  |       showSuccess('账户已删除!'); | ||||||
|  |       await API.get('/api/user/logout'); | ||||||
|  |       userDispatch({ type: 'logout' }); | ||||||
|  |       localStorage.removeItem('user'); | ||||||
|  |       navigate('/login'); | ||||||
|  |     } else { | ||||||
|  |       showError(message); | ||||||
|  |     } | ||||||
|  |   }; | ||||||
|  |  | ||||||
|   const bindWeChat = async () => { |   const bindWeChat = async () => { | ||||||
|     if (inputs.wechat_verification_code === '') return; |     if (inputs.wechat_verification_code === '') return; | ||||||
|     const res = await API.get( |     const res = await API.get( | ||||||
| @@ -78,6 +119,7 @@ const PersonalSetting = () => { | |||||||
|   }; |   }; | ||||||
|  |  | ||||||
|   const sendVerificationCode = async () => { |   const sendVerificationCode = async () => { | ||||||
|  |     setDisableButton(true); | ||||||
|     if (inputs.email === '') return; |     if (inputs.email === '') return; | ||||||
|     if (turnstileEnabled && turnstileToken === '') { |     if (turnstileEnabled && turnstileToken === '') { | ||||||
|       showInfo('请稍后几秒重试,Turnstile 正在检查用户环境!'); |       showInfo('请稍后几秒重试,Turnstile 正在检查用户环境!'); | ||||||
| @@ -123,6 +165,9 @@ const PersonalSetting = () => { | |||||||
|       </Button> |       </Button> | ||||||
|       <Button onClick={generateAccessToken}>生成系统访问令牌</Button> |       <Button onClick={generateAccessToken}>生成系统访问令牌</Button> | ||||||
|       <Button onClick={getAffLink}>复制邀请链接</Button> |       <Button onClick={getAffLink}>复制邀请链接</Button> | ||||||
|  |       <Button onClick={() => { | ||||||
|  |         setShowAccountDeleteModal(true); | ||||||
|  |       }}>删除个人账户</Button> | ||||||
|       <Divider /> |       <Divider /> | ||||||
|       <Header as='h3'>账号绑定</Header> |       <Header as='h3'>账号绑定</Header> | ||||||
|       { |       { | ||||||
| @@ -195,8 +240,8 @@ const PersonalSetting = () => { | |||||||
|                 name='email' |                 name='email' | ||||||
|                 type='email' |                 type='email' | ||||||
|                 action={ |                 action={ | ||||||
|                   <Button onClick={sendVerificationCode} disabled={loading}> |                   <Button onClick={sendVerificationCode} disabled={disableButton || loading}> | ||||||
|                     获取验证码 |                     {disableButton ? `重新发送(${countdown})` : '获取验证码'} | ||||||
|                   </Button> |                   </Button> | ||||||
|                 } |                 } | ||||||
|               /> |               /> | ||||||
| @@ -230,6 +275,47 @@ const PersonalSetting = () => { | |||||||
|           </Modal.Description> |           </Modal.Description> | ||||||
|         </Modal.Content> |         </Modal.Content> | ||||||
|       </Modal> |       </Modal> | ||||||
|  |       <Modal | ||||||
|  |         onClose={() => setShowAccountDeleteModal(false)} | ||||||
|  |         onOpen={() => setShowAccountDeleteModal(true)} | ||||||
|  |         open={showAccountDeleteModal} | ||||||
|  |         size={'tiny'} | ||||||
|  |         style={{ maxWidth: '450px' }} | ||||||
|  |       > | ||||||
|  |         <Modal.Header>确认删除自己的帐户</Modal.Header> | ||||||
|  |         <Modal.Content> | ||||||
|  |           <Modal.Description> | ||||||
|  |             <Form size='large'> | ||||||
|  |               <Form.Input | ||||||
|  |                 fluid | ||||||
|  |                 placeholder={`输入你的账户名 ${userState?.user?.username} 以确认删除`} | ||||||
|  |                 name='self_account_deletion_confirmation' | ||||||
|  |                 value={inputs.self_account_deletion_confirmation} | ||||||
|  |                 onChange={handleInputChange} | ||||||
|  |               /> | ||||||
|  |               {turnstileEnabled ? ( | ||||||
|  |                 <Turnstile | ||||||
|  |                   sitekey={turnstileSiteKey} | ||||||
|  |                   onVerify={(token) => { | ||||||
|  |                     setTurnstileToken(token); | ||||||
|  |                   }} | ||||||
|  |                 /> | ||||||
|  |               ) : ( | ||||||
|  |                 <></> | ||||||
|  |               )} | ||||||
|  |               <Button | ||||||
|  |                 color='red' | ||||||
|  |                 fluid | ||||||
|  |                 size='large' | ||||||
|  |                 onClick={deleteAccount} | ||||||
|  |                 loading={loading} | ||||||
|  |               > | ||||||
|  |                 删除 | ||||||
|  |               </Button> | ||||||
|  |             </Form> | ||||||
|  |           </Modal.Description> | ||||||
|  |         </Modal.Content> | ||||||
|  |       </Modal> | ||||||
|     </div> |     </div> | ||||||
|   ); |   ); | ||||||
| }; | }; | ||||||
|   | |||||||
| @@ -1,13 +1,5 @@ | |||||||
| import React, { useEffect, useState } from 'react'; | import React, { useEffect, useState } from 'react'; | ||||||
| import { | import { Button, Form, Grid, Header, Image, Message, Segment } from 'semantic-ui-react'; | ||||||
|   Button, |  | ||||||
|   Form, |  | ||||||
|   Grid, |  | ||||||
|   Header, |  | ||||||
|   Image, |  | ||||||
|   Message, |  | ||||||
|   Segment, |  | ||||||
| } from 'semantic-ui-react'; |  | ||||||
| import { Link, useNavigate } from 'react-router-dom'; | import { Link, useNavigate } from 'react-router-dom'; | ||||||
| import { API, getLogo, showError, showInfo, showSuccess } from '../helpers'; | import { API, getLogo, showError, showInfo, showSuccess } from '../helpers'; | ||||||
| import Turnstile from 'react-turnstile'; | import Turnstile from 'react-turnstile'; | ||||||
| @@ -18,7 +10,7 @@ const RegisterForm = () => { | |||||||
|     password: '', |     password: '', | ||||||
|     password2: '', |     password2: '', | ||||||
|     email: '', |     email: '', | ||||||
|     verification_code: '', |     verification_code: '' | ||||||
|   }); |   }); | ||||||
|   const { username, password, password2 } = inputs; |   const { username, password, password2 } = inputs; | ||||||
|   const [showEmailVerification, setShowEmailVerification] = useState(false); |   const [showEmailVerification, setShowEmailVerification] = useState(false); | ||||||
| @@ -178,7 +170,7 @@ const RegisterForm = () => { | |||||||
|               <></> |               <></> | ||||||
|             )} |             )} | ||||||
|             <Button |             <Button | ||||||
|               color='' |               color='green' | ||||||
|               fluid |               fluid | ||||||
|               size='large' |               size='large' | ||||||
|               onClick={handleSubmit} |               onClick={handleSubmit} | ||||||
|   | |||||||
| @@ -45,8 +45,8 @@ const TokensTable = () => { | |||||||
|       if (startIdx === 0) { |       if (startIdx === 0) { | ||||||
|         setTokens(data); |         setTokens(data); | ||||||
|       } else { |       } else { | ||||||
|         let newTokens = tokens; |         let newTokens = [...tokens]; | ||||||
|         newTokens.push(...data); |         newTokens.splice(startIdx * ITEMS_PER_PAGE, data.length, ...data); | ||||||
|         setTokens(newTokens); |         setTokens(newTokens); | ||||||
|       } |       } | ||||||
|     } else { |     } else { | ||||||
| @@ -67,7 +67,7 @@ const TokensTable = () => { | |||||||
|  |  | ||||||
|   const refresh = async () => { |   const refresh = async () => { | ||||||
|     setLoading(true); |     setLoading(true); | ||||||
|     await loadTokens(0); |     await loadTokens(activePage - 1); | ||||||
|   } |   } | ||||||
|  |  | ||||||
|   useEffect(() => { |   useEffect(() => { | ||||||
|   | |||||||
| @@ -183,14 +183,6 @@ const UsersTable = () => { | |||||||
|             > |             > | ||||||
|               分组 |               分组 | ||||||
|             </Table.HeaderCell> |             </Table.HeaderCell> | ||||||
|             <Table.HeaderCell |  | ||||||
|               style={{ cursor: 'pointer' }} |  | ||||||
|               onClick={() => { |  | ||||||
|                 sortUser('email'); |  | ||||||
|               }} |  | ||||||
|             > |  | ||||||
|               邮箱地址 |  | ||||||
|             </Table.HeaderCell> |  | ||||||
|             <Table.HeaderCell |             <Table.HeaderCell | ||||||
|               style={{ cursor: 'pointer' }} |               style={{ cursor: 'pointer' }} | ||||||
|               onClick={() => { |               onClick={() => { | ||||||
| @@ -233,20 +225,20 @@ const UsersTable = () => { | |||||||
|                   <Table.Cell> |                   <Table.Cell> | ||||||
|                     <Popup |                     <Popup | ||||||
|                       content={user.email ? user.email : '未绑定邮箱地址'} |                       content={user.email ? user.email : '未绑定邮箱地址'} | ||||||
|                       key={user.display_name} |                       key={user.username} | ||||||
|                       header={user.display_name ? user.display_name : user.username} |                       header={user.display_name ? user.display_name : user.username} | ||||||
|                       trigger={<span>{renderText(user.username, 10)}</span>} |                       trigger={<span>{renderText(user.username, 10)}</span>} | ||||||
|                       hoverable |                       hoverable | ||||||
|                     /> |                     /> | ||||||
|                   </Table.Cell> |                   </Table.Cell> | ||||||
|                   <Table.Cell>{renderGroup(user.group)}</Table.Cell> |                   <Table.Cell>{renderGroup(user.group)}</Table.Cell> | ||||||
|  |                   {/*<Table.Cell>*/} | ||||||
|  |                   {/*  {user.email ? <Popup hoverable content={user.email} trigger={<span>{renderText(user.email, 24)}</span>} /> : '无'}*/} | ||||||
|  |                   {/*</Table.Cell>*/} | ||||||
|                   <Table.Cell> |                   <Table.Cell> | ||||||
|                     {user.email ? <Popup hoverable content={user.email} trigger={<span>{renderText(user.email, 24)}</span>} /> : '无'} |                     <Popup content='剩余额度' trigger={<Label basic>{renderQuota(user.quota)}</Label>} /> | ||||||
|                   </Table.Cell> |                     <Popup content='已用额度' trigger={<Label basic>{renderQuota(user.used_quota)}</Label>} /> | ||||||
|                   <Table.Cell> |                     <Popup content='请求次数' trigger={<Label basic>{renderNumber(user.request_count)}</Label>} /> | ||||||
|                     <Popup content='剩余额度' trigger={<Label>{renderQuota(user.quota)}</Label>} /> |  | ||||||
|                     <Popup content='已用额度' trigger={<Label>{renderQuota(user.used_quota)}</Label>} /> |  | ||||||
|                     <Popup content='请求次数' trigger={<Label>{renderNumber(user.request_count)}</Label>} /> |  | ||||||
|                   </Table.Cell> |                   </Table.Cell> | ||||||
|                   <Table.Cell>{renderRole(user.role)}</Table.Cell> |                   <Table.Cell>{renderRole(user.role)}</Table.Cell> | ||||||
|                   <Table.Cell>{renderStatus(user.status)}</Table.Cell> |                   <Table.Cell>{renderStatus(user.status)}</Table.Cell> | ||||||
| @@ -320,7 +312,7 @@ const UsersTable = () => { | |||||||
|  |  | ||||||
|         <Table.Footer> |         <Table.Footer> | ||||||
|           <Table.Row> |           <Table.Row> | ||||||
|             <Table.HeaderCell colSpan='8'> |             <Table.HeaderCell colSpan='7'> | ||||||
|               <Button size='small' as={Link} to='/user/add' loading={loading}> |               <Button size='small' as={Link} to='/user/add' loading={loading}> | ||||||
|                 添加新的用户 |                 添加新的用户 | ||||||
|               </Button> |               </Button> | ||||||
|   | |||||||
| @@ -1,13 +1,18 @@ | |||||||
| export const CHANNEL_OPTIONS = [ | export const CHANNEL_OPTIONS = [ | ||||||
|   { key: 1, text: 'OpenAI', value: 1, color: 'green' }, |   { key: 1, text: 'OpenAI', value: 1, color: 'green' }, | ||||||
|   { key: 8, text: '自定义', value: 8, color: 'pink' }, |   { key: 14, text: 'Anthropic Claude', value: 14, color: 'black' }, | ||||||
|   { key: 3, text: 'Azure', value: 3, color: 'olive' }, |   { key: 3, text: 'Azure OpenAI', value: 3, color: 'olive' }, | ||||||
|   { key: 2, text: 'API2D', value: 2, color: 'blue' }, |   { key: 11, text: 'Google PaLM2', value: 11, color: 'orange' }, | ||||||
|   { key: 4, text: 'CloseAI', value: 4, color: 'teal' }, |   { key: 15, text: '百度文心千帆', value: 15, color: 'blue' }, | ||||||
|   { key: 5, text: 'OpenAI-SB', value: 5, color: 'brown' }, |   { key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet' }, | ||||||
|   { key: 6, text: 'OpenAI Max', value: 6, color: 'violet' }, |   { key: 8, text: '自定义渠道', value: 8, color: 'pink' }, | ||||||
|   { key: 7, text: 'OhMyGPT', value: 7, color: 'purple' }, |   { key: 2, text: '代理:API2D', value: 2, color: 'blue' }, | ||||||
|   { key: 9, text: 'AI.LS', value: 9, color: 'yellow' }, |   { key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown' }, | ||||||
|   { key: 10, text: 'AI Proxy', value: 10, color: 'purple' }, |   { key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple' }, | ||||||
|   { key: 12, text: 'API2GPT', value: 12, color: 'blue' } |   { key: 10, text: '代理:AI Proxy', value: 10, color: 'purple' }, | ||||||
|  |   { key: 4, text: '代理:CloseAI', value: 4, color: 'teal' }, | ||||||
|  |   { key: 6, text: '代理:OpenAI Max', value: 6, color: 'violet' }, | ||||||
|  |   { key: 9, text: '代理:AI.LS', value: 9, color: 'yellow' }, | ||||||
|  |   { key: 12, text: '代理:API2GPT', value: 12, color: 'blue' }, | ||||||
|  |   { key: 13, text: '代理:AIGC2D', value: 13, color: 'purple' } | ||||||
| ]; | ]; | ||||||
| @@ -46,9 +46,7 @@ const About = () => { | |||||||
|             about.startsWith('https://') ? <iframe |             about.startsWith('https://') ? <iframe | ||||||
|               src={about} |               src={about} | ||||||
|               style={{ width: '100%', height: '100vh', border: 'none' }} |               style={{ width: '100%', height: '100vh', border: 'none' }} | ||||||
|             /> : <Segment> |             /> : <div style={{ fontSize: 'larger' }} dangerouslySetInnerHTML={{ __html: about }}></div> | ||||||
|               <div style={{ fontSize: 'larger' }} dangerouslySetInnerHTML={{ __html: about }}></div> |  | ||||||
|             </Segment> |  | ||||||
|           } |           } | ||||||
|         </> |         </> | ||||||
|       } |       } | ||||||
|   | |||||||
| @@ -1,9 +1,15 @@ | |||||||
| import React, { useEffect, useState } from 'react'; | import React, { useEffect, useState } from 'react'; | ||||||
| import { Button, Form, Header, Message, Segment } from 'semantic-ui-react'; | import { Button, Form, Header, Input, Message, Segment } from 'semantic-ui-react'; | ||||||
| import { useParams } from 'react-router-dom'; | import { useParams } from 'react-router-dom'; | ||||||
| import { API, showError, showInfo, showSuccess } from '../../helpers'; | import { API, showError, showInfo, showSuccess, verifyJSON } from '../../helpers'; | ||||||
| import { CHANNEL_OPTIONS } from '../../constants'; | import { CHANNEL_OPTIONS } from '../../constants'; | ||||||
|  |  | ||||||
|  | const MODEL_MAPPING_EXAMPLE = { | ||||||
|  |   'gpt-3.5-turbo-0301': 'gpt-3.5-turbo', | ||||||
|  |   'gpt-4-0314': 'gpt-4', | ||||||
|  |   'gpt-4-32k-0314': 'gpt-4-32k' | ||||||
|  | }; | ||||||
|  |  | ||||||
| const EditChannel = () => { | const EditChannel = () => { | ||||||
|   const params = useParams(); |   const params = useParams(); | ||||||
|   const channelId = params.id; |   const channelId = params.id; | ||||||
| @@ -15,15 +21,18 @@ const EditChannel = () => { | |||||||
|     key: '', |     key: '', | ||||||
|     base_url: '', |     base_url: '', | ||||||
|     other: '', |     other: '', | ||||||
|  |     model_mapping: '', | ||||||
|     models: [], |     models: [], | ||||||
|     groups: ['default'] |     groups: ['default'] | ||||||
|   }; |   }; | ||||||
|   const [batch, setBatch] = useState(false); |   const [batch, setBatch] = useState(false); | ||||||
|   const [inputs, setInputs] = useState(originInputs); |   const [inputs, setInputs] = useState(originInputs); | ||||||
|  |   const [originModelOptions, setOriginModelOptions] = useState([]); | ||||||
|   const [modelOptions, setModelOptions] = useState([]); |   const [modelOptions, setModelOptions] = useState([]); | ||||||
|   const [groupOptions, setGroupOptions] = useState([]); |   const [groupOptions, setGroupOptions] = useState([]); | ||||||
|   const [basicModels, setBasicModels] = useState([]); |   const [basicModels, setBasicModels] = useState([]); | ||||||
|   const [fullModels, setFullModels] = useState([]); |   const [fullModels, setFullModels] = useState([]); | ||||||
|  |   const [customModel, setCustomModel] = useState(''); | ||||||
|   const handleInputChange = (e, { name, value }) => { |   const handleInputChange = (e, { name, value }) => { | ||||||
|     setInputs((inputs) => ({ ...inputs, [name]: value })); |     setInputs((inputs) => ({ ...inputs, [name]: value })); | ||||||
|   }; |   }; | ||||||
| @@ -42,6 +51,9 @@ const EditChannel = () => { | |||||||
|       } else { |       } else { | ||||||
|         data.groups = data.group.split(','); |         data.groups = data.group.split(','); | ||||||
|       } |       } | ||||||
|  |       if (data.model_mapping !== '') { | ||||||
|  |         data.model_mapping = JSON.stringify(JSON.parse(data.model_mapping), null, 2); | ||||||
|  |       } | ||||||
|       setInputs(data); |       setInputs(data); | ||||||
|     } else { |     } else { | ||||||
|       showError(message); |       showError(message); | ||||||
| @@ -52,13 +64,16 @@ const EditChannel = () => { | |||||||
|   const fetchModels = async () => { |   const fetchModels = async () => { | ||||||
|     try { |     try { | ||||||
|       let res = await API.get(`/api/channel/models`); |       let res = await API.get(`/api/channel/models`); | ||||||
|       setModelOptions(res.data.data.map((model) => ({ |       let localModelOptions = res.data.data.map((model) => ({ | ||||||
|         key: model.id, |         key: model.id, | ||||||
|         text: model.id, |         text: model.id, | ||||||
|         value: model.id |         value: model.id | ||||||
|       }))); |       })); | ||||||
|  |       setOriginModelOptions(localModelOptions); | ||||||
|       setFullModels(res.data.data.map((model) => model.id)); |       setFullModels(res.data.data.map((model) => model.id)); | ||||||
|       setBasicModels(res.data.data.filter((model) => !model.id.startsWith('gpt-4')).map((model) => model.id)); |       setBasicModels(res.data.data.filter((model) => { | ||||||
|  |         return model.id.startsWith('gpt-3') || model.id.startsWith('text-'); | ||||||
|  |       }).map((model) => model.id)); | ||||||
|     } catch (error) { |     } catch (error) { | ||||||
|       showError(error.message); |       showError(error.message); | ||||||
|     } |     } | ||||||
| @@ -77,6 +92,20 @@ const EditChannel = () => { | |||||||
|     } |     } | ||||||
|   }; |   }; | ||||||
|  |  | ||||||
|  |   useEffect(() => { | ||||||
|  |     let localModelOptions = [...originModelOptions]; | ||||||
|  |     inputs.models.forEach((model) => { | ||||||
|  |       if (!localModelOptions.find((option) => option.key === model)) { | ||||||
|  |         localModelOptions.push({ | ||||||
|  |           key: model, | ||||||
|  |           text: model, | ||||||
|  |           value: model | ||||||
|  |         }); | ||||||
|  |       } | ||||||
|  |     }); | ||||||
|  |     setModelOptions(localModelOptions); | ||||||
|  |   }, [originModelOptions, inputs.models]); | ||||||
|  |  | ||||||
|   useEffect(() => { |   useEffect(() => { | ||||||
|     if (isEdit) { |     if (isEdit) { | ||||||
|       loadChannel().then(); |       loadChannel().then(); | ||||||
| @@ -94,6 +123,10 @@ const EditChannel = () => { | |||||||
|       showInfo('请至少选择一个模型!'); |       showInfo('请至少选择一个模型!'); | ||||||
|       return; |       return; | ||||||
|     } |     } | ||||||
|  |     if (inputs.model_mapping !== '' && !verifyJSON(inputs.model_mapping)) { | ||||||
|  |       showInfo('模型映射必须是合法的 JSON 格式!'); | ||||||
|  |       return; | ||||||
|  |     } | ||||||
|     let localInputs = inputs; |     let localInputs = inputs; | ||||||
|     if (localInputs.base_url.endsWith('/')) { |     if (localInputs.base_url.endsWith('/')) { | ||||||
|       localInputs.base_url = localInputs.base_url.slice(0, localInputs.base_url.length - 1); |       localInputs.base_url = localInputs.base_url.slice(0, localInputs.base_url.length - 1); | ||||||
| @@ -131,6 +164,7 @@ const EditChannel = () => { | |||||||
|             <Form.Select |             <Form.Select | ||||||
|               label='类型' |               label='类型' | ||||||
|               name='type' |               name='type' | ||||||
|  |               required | ||||||
|               options={CHANNEL_OPTIONS} |               options={CHANNEL_OPTIONS} | ||||||
|               value={inputs.type} |               value={inputs.type} | ||||||
|               onChange={handleInputChange} |               onChange={handleInputChange} | ||||||
| @@ -181,25 +215,12 @@ const EditChannel = () => { | |||||||
|               </Form.Field> |               </Form.Field> | ||||||
|             ) |             ) | ||||||
|           } |           } | ||||||
|           { |  | ||||||
|             inputs.type !== 3 && inputs.type !== 8 && ( |  | ||||||
|               <Form.Field> |  | ||||||
|                 <Form.Input |  | ||||||
|                   label='镜像' |  | ||||||
|                   name='base_url' |  | ||||||
|                   placeholder={'请输入镜像站地址,格式为:https://domain.com,可不填,不填则使用渠道默认值'} |  | ||||||
|                   onChange={handleInputChange} |  | ||||||
|                   value={inputs.base_url} |  | ||||||
|                   autoComplete='new-password' |  | ||||||
|                 /> |  | ||||||
|               </Form.Field> |  | ||||||
|             ) |  | ||||||
|           } |  | ||||||
|           <Form.Field> |           <Form.Field> | ||||||
|             <Form.Input |             <Form.Input | ||||||
|               label='名称' |               label='名称' | ||||||
|  |               required | ||||||
|               name='name' |               name='name' | ||||||
|               placeholder={'请输入名称'} |               placeholder={'请为渠道命名'} | ||||||
|               onChange={handleInputChange} |               onChange={handleInputChange} | ||||||
|               value={inputs.name} |               value={inputs.name} | ||||||
|               autoComplete='new-password' |               autoComplete='new-password' | ||||||
| @@ -208,8 +229,9 @@ const EditChannel = () => { | |||||||
|           <Form.Field> |           <Form.Field> | ||||||
|             <Form.Dropdown |             <Form.Dropdown | ||||||
|               label='分组' |               label='分组' | ||||||
|               placeholder={'请选择分组'} |               placeholder={'请选择可以使用该渠道的分组'} | ||||||
|               name='groups' |               name='groups' | ||||||
|  |               required | ||||||
|               fluid |               fluid | ||||||
|               multiple |               multiple | ||||||
|               selection |               selection | ||||||
| @@ -224,8 +246,9 @@ const EditChannel = () => { | |||||||
|           <Form.Field> |           <Form.Field> | ||||||
|             <Form.Dropdown |             <Form.Dropdown | ||||||
|               label='模型' |               label='模型' | ||||||
|               placeholder={'请选择该通道所支持的模型'} |               placeholder={'请选择该渠道所支持的模型'} | ||||||
|               name='models' |               name='models' | ||||||
|  |               required | ||||||
|               fluid |               fluid | ||||||
|               multiple |               multiple | ||||||
|               selection |               selection | ||||||
| @@ -245,12 +268,50 @@ const EditChannel = () => { | |||||||
|             <Button type={'button'} onClick={() => { |             <Button type={'button'} onClick={() => { | ||||||
|               handleInputChange(null, { name: 'models', value: [] }); |               handleInputChange(null, { name: 'models', value: [] }); | ||||||
|             }}>清除所有模型</Button> |             }}>清除所有模型</Button> | ||||||
|  |             <Input | ||||||
|  |               action={ | ||||||
|  |                 <Button type={'button'} onClick={()=>{ | ||||||
|  |                   if (customModel.trim() === "") return; | ||||||
|  |                   if (inputs.models.includes(customModel)) return; | ||||||
|  |                   let localModels = [...inputs.models]; | ||||||
|  |                   localModels.push(customModel); | ||||||
|  |                   let localModelOptions = []; | ||||||
|  |                   localModelOptions.push({ | ||||||
|  |                     key: customModel, | ||||||
|  |                     text: customModel, | ||||||
|  |                     value: customModel, | ||||||
|  |                   }); | ||||||
|  |                   setModelOptions(modelOptions=>{ | ||||||
|  |                     return [...modelOptions, ...localModelOptions]; | ||||||
|  |                   }); | ||||||
|  |                   setCustomModel(''); | ||||||
|  |                   handleInputChange(null, { name: 'models', value: localModels }); | ||||||
|  |                 }}>填入</Button> | ||||||
|  |               } | ||||||
|  |               placeholder='输入自定义模型名称' | ||||||
|  |               value={customModel} | ||||||
|  |               onChange={(e, { value }) => { | ||||||
|  |                 setCustomModel(value); | ||||||
|  |               }} | ||||||
|  |             /> | ||||||
|           </div> |           </div> | ||||||
|  |           <Form.Field> | ||||||
|  |             <Form.TextArea | ||||||
|  |               label='模型映射' | ||||||
|  |               placeholder={`此项可选,用于修改请求体中的模型名称,为一个 JSON 字符串,键为请求中模型名称,值为要替换的模型名称,例如:\n${JSON.stringify(MODEL_MAPPING_EXAMPLE, null, 2)}`} | ||||||
|  |               name='model_mapping' | ||||||
|  |               onChange={handleInputChange} | ||||||
|  |               value={inputs.model_mapping} | ||||||
|  |               style={{ minHeight: 150, fontFamily: 'JetBrains Mono, Consolas' }} | ||||||
|  |               autoComplete='new-password' | ||||||
|  |             /> | ||||||
|  |           </Form.Field> | ||||||
|           { |           { | ||||||
|             batch ? <Form.Field> |             batch ? <Form.Field> | ||||||
|               <Form.TextArea |               <Form.TextArea | ||||||
|                 label='密钥' |                 label='密钥' | ||||||
|                 name='key' |                 name='key' | ||||||
|  |                 required | ||||||
|                 placeholder={'请输入密钥,一行一个'} |                 placeholder={'请输入密钥,一行一个'} | ||||||
|                 onChange={handleInputChange} |                 onChange={handleInputChange} | ||||||
|                 value={inputs.key} |                 value={inputs.key} | ||||||
| @@ -261,7 +322,8 @@ const EditChannel = () => { | |||||||
|               <Form.Input |               <Form.Input | ||||||
|                 label='密钥' |                 label='密钥' | ||||||
|                 name='key' |                 name='key' | ||||||
|                 placeholder={'请输入密钥'} |                 required | ||||||
|  |                 placeholder={inputs.type === 15 ? "请输入 access token,当前版本暂不支持自动刷新,请每 30 天更新一次" : '请输入渠道对应的鉴权密钥'} | ||||||
|                 onChange={handleInputChange} |                 onChange={handleInputChange} | ||||||
|                 value={inputs.key} |                 value={inputs.key} | ||||||
|                 autoComplete='new-password' |                 autoComplete='new-password' | ||||||
| @@ -278,7 +340,21 @@ const EditChannel = () => { | |||||||
|               /> |               /> | ||||||
|             ) |             ) | ||||||
|           } |           } | ||||||
|           <Button positive onClick={submit}>提交</Button> |           { | ||||||
|  |             inputs.type !== 3 && inputs.type !== 8 && ( | ||||||
|  |               <Form.Field> | ||||||
|  |                 <Form.Input | ||||||
|  |                   label='镜像' | ||||||
|  |                   name='base_url' | ||||||
|  |                   placeholder={'此项可选,用于通过镜像站来进行 API 调用,请输入镜像站地址,格式为:https://domain.com'} | ||||||
|  |                   onChange={handleInputChange} | ||||||
|  |                   value={inputs.base_url} | ||||||
|  |                   autoComplete='new-password' | ||||||
|  |                 /> | ||||||
|  |               </Form.Field> | ||||||
|  |             ) | ||||||
|  |           } | ||||||
|  |           <Button type={isEdit ? "button" : "submit"} positive onClick={submit}>提交</Button> | ||||||
|         </Form> |         </Form> | ||||||
|       </Segment> |       </Segment> | ||||||
|     </> |     </> | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ const EditToken = () => { | |||||||
|   const [loading, setLoading] = useState(isEdit); |   const [loading, setLoading] = useState(isEdit); | ||||||
|   const originInputs = { |   const originInputs = { | ||||||
|     name: '', |     name: '', | ||||||
|     remain_quota: 0, |     remain_quota: isEdit ? 0 : 500000, | ||||||
|     expired_time: -1, |     expired_time: -1, | ||||||
|     unlimited_quota: false |     unlimited_quota: false | ||||||
|   }; |   }; | ||||||
|   | |||||||
| @@ -7,12 +7,15 @@ const TopUp = () => { | |||||||
|   const [redemptionCode, setRedemptionCode] = useState(''); |   const [redemptionCode, setRedemptionCode] = useState(''); | ||||||
|   const [topUpLink, setTopUpLink] = useState(''); |   const [topUpLink, setTopUpLink] = useState(''); | ||||||
|   const [userQuota, setUserQuota] = useState(0); |   const [userQuota, setUserQuota] = useState(0); | ||||||
|  |   const [isSubmitting, setIsSubmitting] = useState(false); | ||||||
|  |  | ||||||
|   const topUp = async () => { |   const topUp = async () => { | ||||||
|     if (redemptionCode === '') { |     if (redemptionCode === '') { | ||||||
|       showInfo('请输入充值码!') |       showInfo('请输入充值码!') | ||||||
|       return; |       return; | ||||||
|     } |     } | ||||||
|  |     setIsSubmitting(true); | ||||||
|  |     try { | ||||||
|       const res = await API.post('/api/user/topup', { |       const res = await API.post('/api/user/topup', { | ||||||
|         key: redemptionCode |         key: redemptionCode | ||||||
|       }); |       }); | ||||||
| @@ -26,6 +29,11 @@ const TopUp = () => { | |||||||
|       } else { |       } else { | ||||||
|         showError(message); |         showError(message); | ||||||
|       } |       } | ||||||
|  |     } catch (err) { | ||||||
|  |       showError('请求失败'); | ||||||
|  |     } finally { | ||||||
|  |       setIsSubmitting(false);  | ||||||
|  |     } | ||||||
|   }; |   }; | ||||||
|  |  | ||||||
|   const openTopUpLink = () => { |   const openTopUpLink = () => { | ||||||
| @@ -74,8 +82,8 @@ const TopUp = () => { | |||||||
|             <Button color='green' onClick={openTopUpLink}> |             <Button color='green' onClick={openTopUpLink}> | ||||||
|               获取兑换码 |               获取兑换码 | ||||||
|             </Button> |             </Button> | ||||||
|             <Button color='yellow' onClick={topUp}> |             <Button color='yellow' onClick={topUp} disabled={isSubmitting}> | ||||||
|               充值 |                 {isSubmitting ? '兑换中...' : '兑换'} | ||||||
|             </Button> |             </Button> | ||||||
|           </Form> |           </Form> | ||||||
|         </Grid.Column> |         </Grid.Column> | ||||||
| @@ -92,5 +100,4 @@ const TopUp = () => { | |||||||
|   ); |   ); | ||||||
| }; | }; | ||||||
|  |  | ||||||
|  |  | ||||||
| export default TopUp; | export default TopUp; | ||||||
| @@ -2,6 +2,7 @@ import React, { useEffect, useState } from 'react'; | |||||||
| import { Button, Form, Header, Segment } from 'semantic-ui-react'; | import { Button, Form, Header, Segment } from 'semantic-ui-react'; | ||||||
| import { useParams } from 'react-router-dom'; | import { useParams } from 'react-router-dom'; | ||||||
| import { API, showError, showSuccess } from '../../helpers'; | import { API, showError, showSuccess } from '../../helpers'; | ||||||
|  | import { renderQuota, renderQuotaWithPrompt } from '../../helpers/render'; | ||||||
|  |  | ||||||
| const EditUser = () => { | const EditUser = () => { | ||||||
|   const params = useParams(); |   const params = useParams(); | ||||||
| @@ -134,7 +135,7 @@ const EditUser = () => { | |||||||
|               </Form.Field> |               </Form.Field> | ||||||
|               <Form.Field> |               <Form.Field> | ||||||
|                 <Form.Input |                 <Form.Input | ||||||
|                   label='剩余额度' |                   label={`剩余额度${renderQuotaWithPrompt(quota)}`} | ||||||
|                   name='quota' |                   name='quota' | ||||||
|                   placeholder={'请输入新的剩余额度'} |                   placeholder={'请输入新的剩余额度'} | ||||||
|                   onChange={handleInputChange} |                   onChange={handleInputChange} | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user