mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-10-26 11:23:43 +08:00
Compare commits
16 Commits
v0.4.0-alp
...
v0.4.2-alp
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9d0bec83df | ||
|
|
f97a9ce597 | ||
|
|
4339f45f74 | ||
|
|
e398e0756b | ||
|
|
64db39320a | ||
|
|
0b4bf30908 | ||
|
|
d29c273073 | ||
|
|
74f508e847 | ||
|
|
145bb14cb2 | ||
|
|
8901f03864 | ||
|
|
813bf0bd66 | ||
|
|
45e9fd66e7 | ||
|
|
e0d0674f81 | ||
|
|
4b6adaec0b | ||
|
|
9301b3fed3 | ||
|
|
c6edb78ac9 |
20
README.md
20
README.md
@@ -44,12 +44,13 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用
|
|||||||
<a href="https://iamazing.cn/page/reward">赞赏支持</a>
|
<a href="https://iamazing.cn/page/reward">赞赏支持</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
> **Warning**:从 `v0.2` 版本升级到 `v0.3` 版本需要手动迁移数据库,请手动执行[数据库迁移脚本](./bin/migration_v0.2-v0.3.sql)。
|
> **Note**:使用 Docker 拉取的最新镜像可能是 `alpha` 版本,如果追求稳定性请手动指定版本。
|
||||||
|
|
||||||
|
> **Warning**:从 `v0.3` 版本升级到 `v0.4` 版本需要手动迁移数据库,请手动执行[数据库迁移脚本](./bin/migration_v0.3-v0.4.sql)。
|
||||||
|
|
||||||
## 功能
|
## 功能
|
||||||
1. 支持多种 API 访问渠道,欢迎 PR 或提 issue 添加更多渠道:
|
1. 支持多种 API 访问渠道,欢迎 PR 或提 issue 添加更多渠道:
|
||||||
+ [x] OpenAI 官方通道
|
+ [x] OpenAI 官方通道(支持配置代理)
|
||||||
+ [x] **Azure OpenAI API**
|
+ [x] **Azure OpenAI API**
|
||||||
+ [x] [API2D](https://api2d.com/r/197971)
|
+ [x] [API2D](https://api2d.com/r/197971)
|
||||||
+ [x] [OhMyGPT](https://aigptx.top?aff=uFpUl2Kf)
|
+ [x] [OhMyGPT](https://aigptx.top?aff=uFpUl2Kf)
|
||||||
@@ -58,23 +59,26 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用
|
|||||||
+ [x] [OpenAI Max](https://openaimax.com)
|
+ [x] [OpenAI Max](https://openaimax.com)
|
||||||
+ [x] [OpenAI-SB](https://openai-sb.com)
|
+ [x] [OpenAI-SB](https://openai-sb.com)
|
||||||
+ [x] [CloseAI](https://console.openai-asia.com/r/2412)
|
+ [x] [CloseAI](https://console.openai-asia.com/r/2412)
|
||||||
+ [x] 自定义渠道:例如使用自行搭建的 OpenAI 代理
|
+ [x] 自定义渠道:例如各种未收录的第三方代理服务
|
||||||
2. 支持通过**负载均衡**的方式访问多个渠道。
|
2. 支持通过**负载均衡**的方式访问多个渠道。
|
||||||
3. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
3. 支持 **stream 模式**,可以通过流式传输实现打字机效果。
|
||||||
4. 支持**多机部署**,[详见此处](#多机部署)。
|
4. 支持**多机部署**,[详见此处](#多机部署)。
|
||||||
5. 支持**令牌管理**,设置令牌的过期时间和使用次数。
|
5. 支持**令牌管理**,设置令牌的过期时间和使用次数。
|
||||||
6. 支持**兑换码管理**,支持批量生成和导出兑换码,可使用兑换码为账户进行充值。
|
6. 支持**兑换码管理**,支持批量生成和导出兑换码,可使用兑换码为账户进行充值。
|
||||||
7. 支持**通道管理**,批量创建通道。
|
7. 支持**通道管理**,批量创建通道。
|
||||||
8. 支持发布公告,设置充值链接,设置新用户初始额度。
|
8. 支持**用户分组**以及**渠道分组**。
|
||||||
9. 支持丰富的**自定义**设置,
|
9. 支持渠道**设置模型列表**。
|
||||||
|
10. 支持**查看额度明细**。
|
||||||
|
11. 支持发布公告,设置充值链接,设置新用户初始额度。
|
||||||
|
12. 支持丰富的**自定义**设置,
|
||||||
1. 支持自定义系统名称,logo 以及页脚。
|
1. 支持自定义系统名称,logo 以及页脚。
|
||||||
2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。
|
2. 支持自定义首页和关于页面,可以选择使用 HTML & Markdown 代码进行自定义,或者使用一个单独的网页通过 iframe 嵌入。
|
||||||
10. 支持通过系统访问令牌访问管理 API。
|
13. 支持通过系统访问令牌访问管理 API。
|
||||||
11. 支持用户管理,支持**多种用户登录注册方式**:
|
14. 支持用户管理,支持**多种用户登录注册方式**:
|
||||||
+ 邮箱登录注册以及通过邮箱进行密码重置。
|
+ 邮箱登录注册以及通过邮箱进行密码重置。
|
||||||
+ [GitHub 开放授权](https://github.com/settings/applications/new)。
|
+ [GitHub 开放授权](https://github.com/settings/applications/new)。
|
||||||
+ 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。
|
+ 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。
|
||||||
12. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。
|
15. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。
|
||||||
|
|
||||||
## 部署
|
## 部署
|
||||||
### 基于 Docker 进行部署
|
### 基于 Docker 进行部署
|
||||||
|
|||||||
17
bin/migration_v0.3-v0.4.sql
Normal file
17
bin/migration_v0.3-v0.4.sql
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
INSERT INTO abilities (`group`, model, channel_id, enabled)
|
||||||
|
SELECT c.`group`, m.model, c.id, 1
|
||||||
|
FROM channels c
|
||||||
|
CROSS JOIN (
|
||||||
|
SELECT 'gpt-3.5-turbo' AS model UNION ALL
|
||||||
|
SELECT 'gpt-3.5-turbo-0301' AS model UNION ALL
|
||||||
|
SELECT 'gpt-4' AS model UNION ALL
|
||||||
|
SELECT 'gpt-4-0314' AS model
|
||||||
|
) AS m
|
||||||
|
WHERE c.status = 1
|
||||||
|
AND NOT EXISTS (
|
||||||
|
SELECT 1
|
||||||
|
FROM abilities a
|
||||||
|
WHERE a.`group` = c.`group`
|
||||||
|
AND a.model = m.model
|
||||||
|
AND a.channel_id = c.id
|
||||||
|
);
|
||||||
@@ -25,6 +25,7 @@ var OptionMap map[string]string
|
|||||||
var OptionMapRWMutex sync.RWMutex
|
var OptionMapRWMutex sync.RWMutex
|
||||||
|
|
||||||
var ItemsPerPage = 10
|
var ItemsPerPage = 10
|
||||||
|
var MaxRecentItems = 100
|
||||||
|
|
||||||
var PasswordLoginEnabled = true
|
var PasswordLoginEnabled = true
|
||||||
var PasswordRegisterEnabled = true
|
var PasswordRegisterEnabled = true
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ var ModelRatio = map[string]float64{
|
|||||||
"gpt-4-0314": 15,
|
"gpt-4-0314": 15,
|
||||||
"gpt-4-32k": 30,
|
"gpt-4-32k": 30,
|
||||||
"gpt-4-32k-0314": 30,
|
"gpt-4-32k-0314": 30,
|
||||||
"gpt-3.5-turbo": 1,
|
"gpt-3.5-turbo": 1, // $0.002 / 1K tokens
|
||||||
"gpt-3.5-turbo-0301": 1,
|
"gpt-3.5-turbo-0301": 1,
|
||||||
"text-ada-001": 0.2,
|
"text-ada-001": 0.2,
|
||||||
"text-babbage-001": 0.25,
|
"text-babbage-001": 0.25,
|
||||||
@@ -26,8 +26,8 @@ var ModelRatio = map[string]float64{
|
|||||||
"ada": 10,
|
"ada": 10,
|
||||||
"text-embedding-ada-002": 0.2,
|
"text-embedding-ada-002": 0.2,
|
||||||
"text-search-ada-doc-001": 10,
|
"text-search-ada-doc-001": 10,
|
||||||
"text-moderation-stable": 10,
|
"text-moderation-stable": 0.1,
|
||||||
"text-moderation-latest": 10,
|
"text-moderation-latest": 0.1,
|
||||||
}
|
}
|
||||||
|
|
||||||
func ModelRatio2JSONString() string {
|
func ModelRatio2JSONString() string {
|
||||||
|
|||||||
@@ -41,7 +41,9 @@ func updateChannelBalance(channel *model.Channel) (float64, error) {
|
|||||||
baseURL := common.ChannelBaseURLs[channel.Type]
|
baseURL := common.ChannelBaseURLs[channel.Type]
|
||||||
switch channel.Type {
|
switch channel.Type {
|
||||||
case common.ChannelTypeOpenAI:
|
case common.ChannelTypeOpenAI:
|
||||||
// do nothing
|
if channel.BaseURL != "" {
|
||||||
|
baseURL = channel.BaseURL
|
||||||
|
}
|
||||||
case common.ChannelTypeAzure:
|
case common.ChannelTypeAzure:
|
||||||
return 0, errors.New("尚未实现")
|
return 0, errors.New("尚未实现")
|
||||||
case common.ChannelTypeCustom:
|
case common.ChannelTypeCustom:
|
||||||
|
|||||||
@@ -27,6 +27,8 @@ func testChannel(channel *model.Channel, request *ChatRequest) error {
|
|||||||
} else {
|
} else {
|
||||||
if channel.Type == common.ChannelTypeCustom {
|
if channel.Type == common.ChannelTypeCustom {
|
||||||
requestURL = channel.BaseURL
|
requestURL = channel.BaseURL
|
||||||
|
} else if channel.Type == common.ChannelTypeOpenAI && channel.BaseURL != "" {
|
||||||
|
requestURL = channel.BaseURL
|
||||||
}
|
}
|
||||||
requestURL += "/v1/chat/completions"
|
requestURL += "/v1/chat/completions"
|
||||||
}
|
}
|
||||||
@@ -56,8 +58,8 @@ func testChannel(channel *model.Channel, request *ChatRequest) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if response.Error.Message != "" || response.Error.Code != "" {
|
if response.Usage.CompletionTokens == 0 {
|
||||||
return errors.New(fmt.Sprintf("type %s, code %s, message %s", response.Error.Type, response.Error.Code, response.Error.Message))
|
return errors.New(fmt.Sprintf("type %s, code %v, message %s", response.Error.Type, response.Error.Code, response.Error.Message))
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
86
controller/log.go
Normal file
86
controller/log.go
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
package controller
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"one-api/common"
|
||||||
|
"one-api/model"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetAllLogs(c *gin.Context) {
|
||||||
|
p, _ := strconv.Atoi(c.Query("p"))
|
||||||
|
if p < 0 {
|
||||||
|
p = 0
|
||||||
|
}
|
||||||
|
logType, _ := strconv.Atoi(c.Query("type"))
|
||||||
|
logs, err := model.GetAllLogs(logType, p*common.ItemsPerPage, common.ItemsPerPage)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(200, gin.H{
|
||||||
|
"success": false,
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(200, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "",
|
||||||
|
"data": logs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetUserLogs(c *gin.Context) {
|
||||||
|
p, _ := strconv.Atoi(c.Query("p"))
|
||||||
|
if p < 0 {
|
||||||
|
p = 0
|
||||||
|
}
|
||||||
|
userId := c.GetInt("id")
|
||||||
|
logType, _ := strconv.Atoi(c.Query("type"))
|
||||||
|
logs, err := model.GetUserLogs(userId, logType, p*common.ItemsPerPage, common.ItemsPerPage)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(200, gin.H{
|
||||||
|
"success": false,
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(200, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "",
|
||||||
|
"data": logs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func SearchAllLogs(c *gin.Context) {
|
||||||
|
keyword := c.Query("keyword")
|
||||||
|
logs, err := model.SearchAllLogs(keyword)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(200, gin.H{
|
||||||
|
"success": false,
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(200, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "",
|
||||||
|
"data": logs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func SearchUserLogs(c *gin.Context) {
|
||||||
|
keyword := c.Query("keyword")
|
||||||
|
userId := c.GetInt("id")
|
||||||
|
logs, err := model.SearchUserLogs(userId, keyword)
|
||||||
|
if err != nil {
|
||||||
|
c.JSON(200, gin.H{
|
||||||
|
"success": false,
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
c.JSON(200, gin.H{
|
||||||
|
"success": true,
|
||||||
|
"message": "",
|
||||||
|
"data": logs,
|
||||||
|
})
|
||||||
|
}
|
||||||
@@ -116,6 +116,69 @@ func init() {
|
|||||||
Root: "text-embedding-ada-002",
|
Root: "text-embedding-ada-002",
|
||||||
Parent: nil,
|
Parent: nil,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Id: "text-davinci-003",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "openai",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "text-davinci-003",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: "text-davinci-002",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "openai",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "text-davinci-002",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: "text-curie-001",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "openai",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "text-curie-001",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: "text-babbage-001",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "openai",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "text-babbage-001",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: "text-ada-001",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "openai",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "text-ada-001",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: "text-moderation-latest",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "openai",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "text-moderation-latest",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: "text-moderation-stable",
|
||||||
|
Object: "model",
|
||||||
|
Created: 1677649963,
|
||||||
|
OwnedBy: "openai",
|
||||||
|
Permission: permission,
|
||||||
|
Root: "text-moderation-stable",
|
||||||
|
Parent: nil,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
openAIModelsMap = make(map[string]OpenAIModels)
|
openAIModelsMap = make(map[string]OpenAIModels)
|
||||||
for _, model := range openAIModels {
|
for _, model := range openAIModels {
|
||||||
|
|||||||
@@ -19,6 +19,14 @@ type Message struct {
|
|||||||
Name *string `json:"name,omitempty"`
|
Name *string `json:"name,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
RelayModeUnknown = iota
|
||||||
|
RelayModeChatCompletions
|
||||||
|
RelayModeCompletions
|
||||||
|
RelayModeEmbeddings
|
||||||
|
RelayModeModeration
|
||||||
|
)
|
||||||
|
|
||||||
// https://platform.openai.com/docs/api-reference/chat
|
// https://platform.openai.com/docs/api-reference/chat
|
||||||
|
|
||||||
type GeneralOpenAIRequest struct {
|
type GeneralOpenAIRequest struct {
|
||||||
@@ -30,6 +38,7 @@ type GeneralOpenAIRequest struct {
|
|||||||
Temperature float64 `json:"temperature"`
|
Temperature float64 `json:"temperature"`
|
||||||
TopP float64 `json:"top_p"`
|
TopP float64 `json:"top_p"`
|
||||||
N int `json:"n"`
|
N int `json:"n"`
|
||||||
|
Input string `json:"input"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatRequest struct {
|
type ChatRequest struct {
|
||||||
@@ -56,7 +65,7 @@ type OpenAIError struct {
|
|||||||
Message string `json:"message"`
|
Message string `json:"message"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Param string `json:"param"`
|
Param string `json:"param"`
|
||||||
Code string `json:"code"`
|
Code any `json:"code"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type OpenAIErrorWithStatusCode struct {
|
type OpenAIErrorWithStatusCode struct {
|
||||||
@@ -69,7 +78,7 @@ type TextResponse struct {
|
|||||||
Error OpenAIError `json:"error"`
|
Error OpenAIError `json:"error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type StreamResponse struct {
|
type ChatCompletionsStreamResponse struct {
|
||||||
Choices []struct {
|
Choices []struct {
|
||||||
Delta struct {
|
Delta struct {
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
@@ -78,11 +87,28 @@ type StreamResponse struct {
|
|||||||
} `json:"choices"`
|
} `json:"choices"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type CompletionsStreamResponse struct {
|
||||||
|
Choices []struct {
|
||||||
|
Text string `json:"text"`
|
||||||
|
FinishReason string `json:"finish_reason"`
|
||||||
|
} `json:"choices"`
|
||||||
|
}
|
||||||
|
|
||||||
func Relay(c *gin.Context) {
|
func Relay(c *gin.Context) {
|
||||||
err := relayHelper(c)
|
relayMode := RelayModeUnknown
|
||||||
|
if strings.HasPrefix(c.Request.URL.Path, "/v1/chat/completions") {
|
||||||
|
relayMode = RelayModeChatCompletions
|
||||||
|
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/completions") {
|
||||||
|
relayMode = RelayModeCompletions
|
||||||
|
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/embeddings") {
|
||||||
|
relayMode = RelayModeEmbeddings
|
||||||
|
} else if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") {
|
||||||
|
relayMode = RelayModeModeration
|
||||||
|
}
|
||||||
|
err := relayHelper(c, relayMode)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if err.StatusCode == http.StatusTooManyRequests {
|
if err.StatusCode == http.StatusTooManyRequests {
|
||||||
err.OpenAIError.Message = "负载已满,请稍后再试,或升级账户以提升服务质量。"
|
err.OpenAIError.Message = "当前分组负载已饱和,请稍后再试,或升级账户以提升服务质量。"
|
||||||
}
|
}
|
||||||
c.JSON(err.StatusCode, gin.H{
|
c.JSON(err.StatusCode, gin.H{
|
||||||
"error": err.OpenAIError,
|
"error": err.OpenAIError,
|
||||||
@@ -110,7 +136,7 @@ func errorWrapper(err error, code string, statusCode int) *OpenAIErrorWithStatus
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func relayHelper(c *gin.Context) *OpenAIErrorWithStatusCode {
|
func relayHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
|
||||||
channelType := c.GetInt("channel")
|
channelType := c.GetInt("channel")
|
||||||
tokenId := c.GetInt("token_id")
|
tokenId := c.GetInt("token_id")
|
||||||
consumeQuota := c.GetBool("consume_quota")
|
consumeQuota := c.GetBool("consume_quota")
|
||||||
@@ -121,10 +147,17 @@ func relayHelper(c *gin.Context) *OpenAIErrorWithStatusCode {
|
|||||||
return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
|
return errorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if relayMode == RelayModeModeration && textRequest.Model == "" {
|
||||||
|
textRequest.Model = "text-moderation-latest"
|
||||||
|
}
|
||||||
baseURL := common.ChannelBaseURLs[channelType]
|
baseURL := common.ChannelBaseURLs[channelType]
|
||||||
requestURL := c.Request.URL.String()
|
requestURL := c.Request.URL.String()
|
||||||
if channelType == common.ChannelTypeCustom {
|
if channelType == common.ChannelTypeCustom {
|
||||||
baseURL = c.GetString("base_url")
|
baseURL = c.GetString("base_url")
|
||||||
|
} else if channelType == common.ChannelTypeOpenAI {
|
||||||
|
if c.GetString("base_url") != "" {
|
||||||
|
baseURL = c.GetString("base_url")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)
|
fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)
|
||||||
if channelType == common.ChannelTypeAzure {
|
if channelType == common.ChannelTypeAzure {
|
||||||
@@ -148,8 +181,15 @@ func relayHelper(c *gin.Context) *OpenAIErrorWithStatusCode {
|
|||||||
err := relayPaLM(textRequest, c)
|
err := relayPaLM(textRequest, c)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
var promptTokens int
|
||||||
promptTokens := countTokenMessages(textRequest.Messages, textRequest.Model)
|
switch relayMode {
|
||||||
|
case RelayModeChatCompletions:
|
||||||
|
promptTokens = countTokenMessages(textRequest.Messages, textRequest.Model)
|
||||||
|
case RelayModeCompletions:
|
||||||
|
promptTokens = countTokenText(textRequest.Prompt, textRequest.Model)
|
||||||
|
case RelayModeModeration:
|
||||||
|
promptTokens = countTokenText(textRequest.Input, textRequest.Model)
|
||||||
|
}
|
||||||
preConsumedTokens := common.PreConsumedQuota
|
preConsumedTokens := common.PreConsumedQuota
|
||||||
if textRequest.MaxTokens != 0 {
|
if textRequest.MaxTokens != 0 {
|
||||||
preConsumedTokens = promptTokens + textRequest.MaxTokens
|
preConsumedTokens = promptTokens + textRequest.MaxTokens
|
||||||
@@ -208,11 +248,16 @@ func relayHelper(c *gin.Context) *OpenAIErrorWithStatusCode {
|
|||||||
quota = textResponse.Usage.PromptTokens + textResponse.Usage.CompletionTokens*completionRatio
|
quota = textResponse.Usage.PromptTokens + textResponse.Usage.CompletionTokens*completionRatio
|
||||||
}
|
}
|
||||||
quota = int(float64(quota) * ratio)
|
quota = int(float64(quota) * ratio)
|
||||||
|
if ratio != 0 && quota <= 0 {
|
||||||
|
quota = 1
|
||||||
|
}
|
||||||
quotaDelta := quota - preConsumedQuota
|
quotaDelta := quota - preConsumedQuota
|
||||||
err := model.PostConsumeTokenQuota(tokenId, quotaDelta)
|
err := model.PostConsumeTokenQuota(tokenId, quotaDelta)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.SysError("Error consuming token remain quota: " + err.Error())
|
common.SysError("Error consuming token remain quota: " + err.Error())
|
||||||
}
|
}
|
||||||
|
userId := c.GetInt("id")
|
||||||
|
model.RecordLog(userId, model.LogTypeConsume, fmt.Sprintf("使用模型 %s 消耗 %d 点额度", textRequest.Model, quota))
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
@@ -245,7 +290,9 @@ func relayHelper(c *gin.Context) *OpenAIErrorWithStatusCode {
|
|||||||
dataChan <- data
|
dataChan <- data
|
||||||
data = data[6:]
|
data = data[6:]
|
||||||
if !strings.HasPrefix(data, "[DONE]") {
|
if !strings.HasPrefix(data, "[DONE]") {
|
||||||
var streamResponse StreamResponse
|
switch relayMode {
|
||||||
|
case RelayModeChatCompletions:
|
||||||
|
var streamResponse ChatCompletionsStreamResponse
|
||||||
err = json.Unmarshal([]byte(data), &streamResponse)
|
err = json.Unmarshal([]byte(data), &streamResponse)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.SysError("Error unmarshalling stream response: " + err.Error())
|
common.SysError("Error unmarshalling stream response: " + err.Error())
|
||||||
@@ -254,6 +301,17 @@ func relayHelper(c *gin.Context) *OpenAIErrorWithStatusCode {
|
|||||||
for _, choice := range streamResponse.Choices {
|
for _, choice := range streamResponse.Choices {
|
||||||
streamResponseText += choice.Delta.Content
|
streamResponseText += choice.Delta.Content
|
||||||
}
|
}
|
||||||
|
case RelayModeCompletions:
|
||||||
|
var streamResponse CompletionsStreamResponse
|
||||||
|
err = json.Unmarshal([]byte(data), &streamResponse)
|
||||||
|
if err != nil {
|
||||||
|
common.SysError("Error unmarshalling stream response: " + err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, choice := range streamResponse.Choices {
|
||||||
|
streamResponseText += choice.Text
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stopChan <- true
|
stopChan <- true
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package controller
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
"github.com/gin-contrib/sessions"
|
"github.com/gin-contrib/sessions"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"net/http"
|
"net/http"
|
||||||
@@ -351,6 +352,9 @@ func UpdateUser(c *gin.Context) {
|
|||||||
})
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if originUser.Quota != updatedUser.Quota {
|
||||||
|
model.RecordLog(originUser.Id, model.LogTypeManage, fmt.Sprintf("管理员将用户额度从 %d 点修改为 %d 点", originUser.Quota, updatedUser.Quota))
|
||||||
|
}
|
||||||
c.JSON(http.StatusOK, gin.H{
|
c.JSON(http.StatusOK, gin.H{
|
||||||
"success": true,
|
"success": true,
|
||||||
"message": "",
|
"message": "",
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
"one-api/common"
|
"one-api/common"
|
||||||
"one-api/model"
|
"one-api/model"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ModelRequest struct {
|
type ModelRequest struct {
|
||||||
@@ -64,6 +65,11 @@ func Distribute() func(c *gin.Context) {
|
|||||||
c.Abort()
|
c.Abort()
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(c.Request.URL.Path, "/v1/moderations") {
|
||||||
|
if modelRequest.Model == "" {
|
||||||
|
modelRequest.Model = "text-moderation-stable"
|
||||||
|
}
|
||||||
|
}
|
||||||
userId := c.GetInt("id")
|
userId := c.GetInt("id")
|
||||||
userGroup, _ := model.GetUserGroup(userId)
|
userGroup, _ := model.GetUserGroup(userId)
|
||||||
channel, err = model.GetRandomSatisfiedChannel(userGroup, modelRequest.Model)
|
channel, err = model.GetRandomSatisfiedChannel(userGroup, modelRequest.Model)
|
||||||
@@ -82,12 +88,10 @@ func Distribute() func(c *gin.Context) {
|
|||||||
c.Set("channel_id", channel.Id)
|
c.Set("channel_id", channel.Id)
|
||||||
c.Set("channel_name", channel.Name)
|
c.Set("channel_name", channel.Name)
|
||||||
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
|
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
|
||||||
if channel.Type == common.ChannelTypeCustom || channel.Type == common.ChannelTypeAzure {
|
|
||||||
c.Set("base_url", channel.BaseURL)
|
c.Set("base_url", channel.BaseURL)
|
||||||
if channel.Type == common.ChannelTypeAzure {
|
if channel.Type == common.ChannelTypeAzure {
|
||||||
c.Set("api_version", channel.Other)
|
c.Set("api_version", channel.Other)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,9 +13,6 @@ type Ability struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func GetRandomSatisfiedChannel(group string, model string) (*Channel, error) {
|
func GetRandomSatisfiedChannel(group string, model string) (*Channel, error) {
|
||||||
if group == "default" {
|
|
||||||
return GetRandomChannel()
|
|
||||||
}
|
|
||||||
ability := Ability{}
|
ability := Ability{}
|
||||||
var err error = nil
|
var err error = nil
|
||||||
if common.UsingSQLite {
|
if common.UsingSQLite {
|
||||||
|
|||||||
67
model/log.go
Normal file
67
model/log.go
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
package model
|
||||||
|
|
||||||
|
import (
|
||||||
|
"gorm.io/gorm"
|
||||||
|
"one-api/common"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Log struct {
|
||||||
|
Id int `json:"id"`
|
||||||
|
UserId int `json:"user_id" gorm:"index"`
|
||||||
|
CreatedAt int64 `json:"created_at" gorm:"bigint"`
|
||||||
|
Type int `json:"type" gorm:"index"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
LogTypeUnknown = iota
|
||||||
|
LogTypeTopup
|
||||||
|
LogTypeConsume
|
||||||
|
LogTypeManage
|
||||||
|
LogTypeSystem
|
||||||
|
)
|
||||||
|
|
||||||
|
func RecordLog(userId int, logType int, content string) {
|
||||||
|
log := &Log{
|
||||||
|
UserId: userId,
|
||||||
|
CreatedAt: common.GetTimestamp(),
|
||||||
|
Type: logType,
|
||||||
|
Content: content,
|
||||||
|
}
|
||||||
|
err := DB.Create(log).Error
|
||||||
|
if err != nil {
|
||||||
|
common.SysError("failed to record log: " + err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetAllLogs(logType int, startIdx int, num int) (logs []*Log, err error) {
|
||||||
|
var tx *gorm.DB
|
||||||
|
if logType == LogTypeUnknown {
|
||||||
|
tx = DB
|
||||||
|
} else {
|
||||||
|
tx = DB.Where("type = ?", logType)
|
||||||
|
}
|
||||||
|
err = tx.Order("id desc").Limit(num).Offset(startIdx).Find(&logs).Error
|
||||||
|
return logs, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetUserLogs(userId int, logType int, startIdx int, num int) (logs []*Log, err error) {
|
||||||
|
var tx *gorm.DB
|
||||||
|
if logType == LogTypeUnknown {
|
||||||
|
tx = DB.Where("user_id = ?", userId)
|
||||||
|
} else {
|
||||||
|
tx = DB.Where("user_id = ? and type = ?", userId, logType)
|
||||||
|
}
|
||||||
|
err = tx.Order("id desc").Limit(num).Offset(startIdx).Omit("id").Find(&logs).Error
|
||||||
|
return logs, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func SearchAllLogs(keyword string) (logs []*Log, err error) {
|
||||||
|
err = DB.Where("type = ? or content LIKE ?", keyword, keyword+"%").Order("id desc").Limit(common.MaxRecentItems).Find(&logs).Error
|
||||||
|
return logs, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func SearchUserLogs(userId int, keyword string) (logs []*Log, err error) {
|
||||||
|
err = DB.Where("user_id = ? and type = ?", userId, keyword).Order("id desc").Limit(common.MaxRecentItems).Omit("id").Find(&logs).Error
|
||||||
|
return logs, err
|
||||||
|
}
|
||||||
@@ -79,6 +79,10 @@ func InitDB() (err error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
err = db.AutoMigrate(&Log{})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
err = createRootAccountIfNeed()
|
err = createRootAccountIfNeed()
|
||||||
return err
|
return err
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package model
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -65,6 +66,7 @@ func Redeem(key string, userId int) (quota int, err error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
common.SysError("更新兑换码状态失败:" + err.Error())
|
common.SysError("更新兑换码状态失败:" + err.Error())
|
||||||
}
|
}
|
||||||
|
RecordLog(userId, LogTypeTopup, fmt.Sprintf("通过兑换码充值 %d 点额度", redemption.Quota))
|
||||||
}()
|
}()
|
||||||
return redemption.Quota, nil
|
return redemption.Quota, nil
|
||||||
}
|
}
|
||||||
@@ -83,7 +85,7 @@ func (redemption *Redemption) SelectUpdate() error {
|
|||||||
// Update Make sure your token's fields is completed, because this will update non-zero values
|
// Update Make sure your token's fields is completed, because this will update non-zero values
|
||||||
func (redemption *Redemption) Update() error {
|
func (redemption *Redemption) Update() error {
|
||||||
var err error
|
var err error
|
||||||
err = DB.Model(redemption).Select("name", "status", "redeemed_time").Updates(redemption).Error
|
err = DB.Model(redemption).Select("name", "status", "quota", "redeemed_time").Updates(redemption).Error
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package model
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"gorm.io/gorm"
|
"gorm.io/gorm"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
"strings"
|
"strings"
|
||||||
@@ -73,8 +74,14 @@ func (user *User) Insert() error {
|
|||||||
}
|
}
|
||||||
user.Quota = common.QuotaForNewUser
|
user.Quota = common.QuotaForNewUser
|
||||||
user.AccessToken = common.GetUUID()
|
user.AccessToken = common.GetUUID()
|
||||||
err = DB.Create(user).Error
|
result := DB.Create(user)
|
||||||
return err
|
if result.Error != nil {
|
||||||
|
return result.Error
|
||||||
|
}
|
||||||
|
if common.QuotaForNewUser > 0 {
|
||||||
|
RecordLog(user.Id, LogTypeSystem, fmt.Sprintf("新用户注册赠送 %d 点额度", common.QuotaForNewUser))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (user *User) Update(updatePassword bool) error {
|
func (user *User) Update(updatePassword bool) error {
|
||||||
|
|||||||
@@ -93,5 +93,10 @@ func SetApiRouter(router *gin.Engine) {
|
|||||||
redemptionRoute.PUT("/", controller.UpdateRedemption)
|
redemptionRoute.PUT("/", controller.UpdateRedemption)
|
||||||
redemptionRoute.DELETE("/:id", controller.DeleteRedemption)
|
redemptionRoute.DELETE("/:id", controller.DeleteRedemption)
|
||||||
}
|
}
|
||||||
|
logRoute := apiRouter.Group("/log")
|
||||||
|
logRoute.GET("/", middleware.AdminAuth(), controller.GetAllLogs)
|
||||||
|
logRoute.GET("/search", middleware.AdminAuth(), controller.SearchAllLogs)
|
||||||
|
logRoute.GET("/self", middleware.UserAuth(), controller.GetUserLogs)
|
||||||
|
logRoute.GET("/self/search", middleware.UserAuth(), controller.SearchUserLogs)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ func SetRelayRouter(router *gin.Engine) {
|
|||||||
relayV1Router := router.Group("/v1")
|
relayV1Router := router.Group("/v1")
|
||||||
relayV1Router.Use(middleware.TokenAuth(), middleware.Distribute())
|
relayV1Router.Use(middleware.TokenAuth(), middleware.Distribute())
|
||||||
{
|
{
|
||||||
relayV1Router.POST("/completions", controller.RelayNotImplemented)
|
relayV1Router.POST("/completions", controller.Relay)
|
||||||
relayV1Router.POST("/chat/completions", controller.Relay)
|
relayV1Router.POST("/chat/completions", controller.Relay)
|
||||||
relayV1Router.POST("/edits", controller.RelayNotImplemented)
|
relayV1Router.POST("/edits", controller.RelayNotImplemented)
|
||||||
relayV1Router.POST("/images/generations", controller.RelayNotImplemented)
|
relayV1Router.POST("/images/generations", controller.RelayNotImplemented)
|
||||||
@@ -37,6 +37,6 @@ func SetRelayRouter(router *gin.Engine) {
|
|||||||
relayV1Router.POST("/fine-tunes/:id/cancel", controller.RelayNotImplemented)
|
relayV1Router.POST("/fine-tunes/:id/cancel", controller.RelayNotImplemented)
|
||||||
relayV1Router.GET("/fine-tunes/:id/events", controller.RelayNotImplemented)
|
relayV1Router.GET("/fine-tunes/:id/events", controller.RelayNotImplemented)
|
||||||
relayV1Router.DELETE("/models/:model", controller.RelayNotImplemented)
|
relayV1Router.DELETE("/models/:model", controller.RelayNotImplemented)
|
||||||
relayV1Router.POST("/moderations", controller.RelayNotImplemented)
|
relayV1Router.POST("/moderations", controller.Relay)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ import EditChannel from './pages/Channel/EditChannel';
|
|||||||
import Redemption from './pages/Redemption';
|
import Redemption from './pages/Redemption';
|
||||||
import EditRedemption from './pages/Redemption/EditRedemption';
|
import EditRedemption from './pages/Redemption/EditRedemption';
|
||||||
import TopUp from './pages/TopUp';
|
import TopUp from './pages/TopUp';
|
||||||
|
import Log from './pages/Log';
|
||||||
|
|
||||||
const Home = lazy(() => import('./pages/Home'));
|
const Home = lazy(() => import('./pages/Home'));
|
||||||
const About = lazy(() => import('./pages/About'));
|
const About = lazy(() => import('./pages/About'));
|
||||||
@@ -250,6 +251,14 @@ function App() {
|
|||||||
</PrivateRoute>
|
</PrivateRoute>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
<Route
|
||||||
|
path='/log'
|
||||||
|
element={
|
||||||
|
<PrivateRoute>
|
||||||
|
<Log />
|
||||||
|
</PrivateRoute>
|
||||||
|
}
|
||||||
|
/>
|
||||||
<Route
|
<Route
|
||||||
path='/about'
|
path='/about'
|
||||||
element={
|
element={
|
||||||
|
|||||||
@@ -41,6 +41,11 @@ const headerButtons = [
|
|||||||
icon: 'user',
|
icon: 'user',
|
||||||
admin: true,
|
admin: true,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: '日志',
|
||||||
|
to: '/log',
|
||||||
|
icon: 'book',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: '设置',
|
name: '设置',
|
||||||
to: '/setting',
|
to: '/setting',
|
||||||
|
|||||||
256
web/src/components/LogsTable.js
Normal file
256
web/src/components/LogsTable.js
Normal file
@@ -0,0 +1,256 @@
|
|||||||
|
import React, { useEffect, useState } from 'react';
|
||||||
|
import { Button, Label, Pagination, Select, Table } from 'semantic-ui-react';
|
||||||
|
import { API, isAdmin, showError, timestamp2string } from '../helpers';
|
||||||
|
|
||||||
|
import { ITEMS_PER_PAGE } from '../constants';
|
||||||
|
|
||||||
|
function renderTimestamp(timestamp) {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{timestamp2string(timestamp)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const MODE_OPTIONS = [
|
||||||
|
{ key: 'all', text: '全部用户', value: 'all' },
|
||||||
|
{ key: 'self', text: '当前用户', value: 'self' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const LOG_OPTIONS = [
|
||||||
|
{ key: '0', text: '全部', value: 0 },
|
||||||
|
{ key: '1', text: '充值', value: 1 },
|
||||||
|
{ key: '2', text: '消费', value: 2 },
|
||||||
|
{ key: '3', text: '管理', value: 3 },
|
||||||
|
{ key: '4', text: '系统', value: 4 }
|
||||||
|
];
|
||||||
|
|
||||||
|
function renderType(type) {
|
||||||
|
switch (type) {
|
||||||
|
case 1:
|
||||||
|
return <Label basic color='green'> 充值 </Label>;
|
||||||
|
case 2:
|
||||||
|
return <Label basic color='olive'> 消费 </Label>;
|
||||||
|
case 3:
|
||||||
|
return <Label basic color='orange'> 管理 </Label>;
|
||||||
|
case 4:
|
||||||
|
return <Label basic color='purple'> 系统 </Label>;
|
||||||
|
default:
|
||||||
|
return <Label basic color='black'> 未知 </Label>;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const LogsTable = () => {
|
||||||
|
const [logs, setLogs] = useState([]);
|
||||||
|
const [loading, setLoading] = useState(true);
|
||||||
|
const [activePage, setActivePage] = useState(1);
|
||||||
|
const [searchKeyword, setSearchKeyword] = useState('');
|
||||||
|
const [searching, setSearching] = useState(false);
|
||||||
|
const [logType, setLogType] = useState(0);
|
||||||
|
const [mode, setMode] = useState('self'); // all, self
|
||||||
|
const showModePanel = isAdmin();
|
||||||
|
|
||||||
|
const loadLogs = async (startIdx) => {
|
||||||
|
let url = `/api/log/self/?p=${startIdx}&type=${logType}`;
|
||||||
|
if (mode === 'all') {
|
||||||
|
url = `/api/log/?p=${startIdx}&type=${logType}`;
|
||||||
|
}
|
||||||
|
const res = await API.get(url);
|
||||||
|
const { success, message, data } = res.data;
|
||||||
|
if (success) {
|
||||||
|
if (startIdx === 0) {
|
||||||
|
setLogs(data);
|
||||||
|
} else {
|
||||||
|
let newLogs = logs;
|
||||||
|
newLogs.push(...data);
|
||||||
|
setLogs(newLogs);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
showError(message);
|
||||||
|
}
|
||||||
|
setLoading(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
const onPaginationChange = (e, { activePage }) => {
|
||||||
|
(async () => {
|
||||||
|
if (activePage === Math.ceil(logs.length / ITEMS_PER_PAGE) + 1) {
|
||||||
|
// In this case we have to load more data and then append them.
|
||||||
|
await loadLogs(activePage - 1);
|
||||||
|
}
|
||||||
|
setActivePage(activePage);
|
||||||
|
})();
|
||||||
|
};
|
||||||
|
|
||||||
|
const refresh = async () => {
|
||||||
|
setLoading(true);
|
||||||
|
await loadLogs(0);
|
||||||
|
};
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
loadLogs(0)
|
||||||
|
.then()
|
||||||
|
.catch((reason) => {
|
||||||
|
showError(reason);
|
||||||
|
});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
refresh().then();
|
||||||
|
}, [mode, logType]);
|
||||||
|
|
||||||
|
const searchLogs = async () => {
|
||||||
|
if (searchKeyword === '') {
|
||||||
|
// if keyword is blank, load files instead.
|
||||||
|
await loadLogs(0);
|
||||||
|
setActivePage(1);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setSearching(true);
|
||||||
|
const res = await API.get(`/api/log/self/search?keyword=${searchKeyword}`);
|
||||||
|
const { success, message, data } = res.data;
|
||||||
|
if (success) {
|
||||||
|
setLogs(data);
|
||||||
|
setActivePage(1);
|
||||||
|
} else {
|
||||||
|
showError(message);
|
||||||
|
}
|
||||||
|
setSearching(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleKeywordChange = async (e, { value }) => {
|
||||||
|
setSearchKeyword(value.trim());
|
||||||
|
};
|
||||||
|
|
||||||
|
const sortLog = (key) => {
|
||||||
|
if (logs.length === 0) return;
|
||||||
|
setLoading(true);
|
||||||
|
let sortedLogs = [...logs];
|
||||||
|
sortedLogs.sort((a, b) => {
|
||||||
|
return ('' + a[key]).localeCompare(b[key]);
|
||||||
|
});
|
||||||
|
if (sortedLogs[0].id === logs[0].id) {
|
||||||
|
sortedLogs.reverse();
|
||||||
|
}
|
||||||
|
setLogs(sortedLogs);
|
||||||
|
setLoading(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Table basic>
|
||||||
|
<Table.Header>
|
||||||
|
<Table.Row>
|
||||||
|
<Table.HeaderCell
|
||||||
|
style={{ cursor: 'pointer' }}
|
||||||
|
onClick={() => {
|
||||||
|
sortLog('created_time');
|
||||||
|
}}
|
||||||
|
width={3}
|
||||||
|
>
|
||||||
|
时间
|
||||||
|
</Table.HeaderCell>
|
||||||
|
{
|
||||||
|
showModePanel && (
|
||||||
|
<Table.HeaderCell
|
||||||
|
style={{ cursor: 'pointer' }}
|
||||||
|
onClick={() => {
|
||||||
|
sortLog('user_id');
|
||||||
|
}}
|
||||||
|
width={1}
|
||||||
|
>
|
||||||
|
用户
|
||||||
|
</Table.HeaderCell>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
<Table.HeaderCell
|
||||||
|
style={{ cursor: 'pointer' }}
|
||||||
|
onClick={() => {
|
||||||
|
sortLog('type');
|
||||||
|
}}
|
||||||
|
width={2}
|
||||||
|
>
|
||||||
|
类型
|
||||||
|
</Table.HeaderCell>
|
||||||
|
<Table.HeaderCell
|
||||||
|
style={{ cursor: 'pointer' }}
|
||||||
|
onClick={() => {
|
||||||
|
sortLog('content');
|
||||||
|
}}
|
||||||
|
width={showModePanel ? 10 : 11}
|
||||||
|
>
|
||||||
|
详情
|
||||||
|
</Table.HeaderCell>
|
||||||
|
</Table.Row>
|
||||||
|
</Table.Header>
|
||||||
|
|
||||||
|
<Table.Body>
|
||||||
|
{logs
|
||||||
|
.slice(
|
||||||
|
(activePage - 1) * ITEMS_PER_PAGE,
|
||||||
|
activePage * ITEMS_PER_PAGE
|
||||||
|
)
|
||||||
|
.map((log, idx) => {
|
||||||
|
if (log.deleted) return <></>;
|
||||||
|
return (
|
||||||
|
<Table.Row key={log.created_at}>
|
||||||
|
<Table.Cell>{renderTimestamp(log.created_at)}</Table.Cell>
|
||||||
|
{
|
||||||
|
showModePanel && (
|
||||||
|
<Table.Cell><Label>{log.user_id}</Label></Table.Cell>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
<Table.Cell>{renderType(log.type)}</Table.Cell>
|
||||||
|
<Table.Cell>{log.content}</Table.Cell>
|
||||||
|
</Table.Row>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</Table.Body>
|
||||||
|
|
||||||
|
<Table.Footer>
|
||||||
|
<Table.Row>
|
||||||
|
<Table.HeaderCell colSpan={showModePanel ? '5' : '4'}>
|
||||||
|
{
|
||||||
|
showModePanel && (
|
||||||
|
<Select
|
||||||
|
placeholder='选择模式'
|
||||||
|
options={MODE_OPTIONS}
|
||||||
|
style={{ marginRight: '8px' }}
|
||||||
|
name='mode'
|
||||||
|
value={mode}
|
||||||
|
onChange={(e, { name, value }) => {
|
||||||
|
setMode(value);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
<Select
|
||||||
|
placeholder='选择明细分类'
|
||||||
|
options={LOG_OPTIONS}
|
||||||
|
style={{ marginRight: '8px' }}
|
||||||
|
name='logType'
|
||||||
|
value={logType}
|
||||||
|
onChange={(e, { name, value }) => {
|
||||||
|
setLogType(value);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
<Button size='small' onClick={refresh} loading={loading}>刷新</Button>
|
||||||
|
<Pagination
|
||||||
|
floated='right'
|
||||||
|
activePage={activePage}
|
||||||
|
onPageChange={onPaginationChange}
|
||||||
|
size='small'
|
||||||
|
siblingRange={1}
|
||||||
|
totalPages={
|
||||||
|
Math.ceil(logs.length / ITEMS_PER_PAGE) +
|
||||||
|
(logs.length % ITEMS_PER_PAGE === 0 ? 1 : 0)
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</Table.HeaderCell>
|
||||||
|
</Table.Row>
|
||||||
|
</Table.Footer>
|
||||||
|
</Table>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default LogsTable;
|
||||||
@@ -198,6 +198,20 @@ const EditChannel = () => {
|
|||||||
handleInputChange(null, { name: 'models', value: fullModels });
|
handleInputChange(null, { name: 'models', value: fullModels });
|
||||||
}}>填入所有模型</Button>
|
}}>填入所有模型</Button>
|
||||||
</div>
|
</div>
|
||||||
|
{
|
||||||
|
inputs.type === 1 && (
|
||||||
|
<Form.Field>
|
||||||
|
<Form.Input
|
||||||
|
label='代理'
|
||||||
|
name='base_url'
|
||||||
|
placeholder={'请输入 OpenAI API 代理地址,如果不需要请留空,格式为:https://api.openai.com'}
|
||||||
|
onChange={handleInputChange}
|
||||||
|
value={inputs.base_url}
|
||||||
|
autoComplete='new-password'
|
||||||
|
/>
|
||||||
|
</Form.Field>
|
||||||
|
)
|
||||||
|
}
|
||||||
{
|
{
|
||||||
batch ? <Form.Field>
|
batch ? <Form.Field>
|
||||||
<Form.TextArea
|
<Form.TextArea
|
||||||
|
|||||||
14
web/src/pages/Log/index.js
Normal file
14
web/src/pages/Log/index.js
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import React from 'react';
|
||||||
|
import { Header, Segment } from 'semantic-ui-react';
|
||||||
|
import LogsTable from '../../components/LogsTable';
|
||||||
|
|
||||||
|
const Token = () => (
|
||||||
|
<>
|
||||||
|
<Segment>
|
||||||
|
<Header as='h3'>额度明细</Header>
|
||||||
|
<LogsTable />
|
||||||
|
</Segment>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
|
||||||
|
export default Token;
|
||||||
Reference in New Issue
Block a user