merge upstream

Signed-off-by: wozulong <>
This commit is contained in:
wozulong 2024-08-15 22:30:28 +08:00
commit fefe5913e9
17 changed files with 276 additions and 213 deletions

View File

@ -33,6 +33,7 @@ var defaultModelRatio = map[string]float64{
"gpt-4-32k-0613": 30, "gpt-4-32k-0613": 30,
"gpt-4o-mini": 0.075, // $0.00015 / 1K tokens "gpt-4o-mini": 0.075, // $0.00015 / 1K tokens
"gpt-4o-mini-2024-07-18": 0.075, "gpt-4o-mini-2024-07-18": 0.075,
"chatgpt-4o-latest": 2.5, // $0.01 / 1K tokens
"gpt-4o": 2.5, // $0.005 / 1K tokens "gpt-4o": 2.5, // $0.005 / 1K tokens
"gpt-4o-2024-05-13": 2.5, // $0.005 / 1K tokens "gpt-4o-2024-05-13": 2.5, // $0.005 / 1K tokens
"gpt-4o-2024-08-06": 1.25, // $0.01 / 1K tokens "gpt-4o-2024-08-06": 1.25, // $0.01 / 1K tokens
@ -343,6 +344,9 @@ func GetCompletionRatio(name string) float64 {
} }
return 2 return 2
} }
if name == "chatgpt-4o-latest" {
return 3
}
if strings.HasPrefix(name, "claude-instant-1") { if strings.HasPrefix(name, "claude-instant-1") {
return 3 return 3
} else if strings.HasPrefix(name, "claude-2") { } else if strings.HasPrefix(name, "claude-2") {

View File

@ -1,18 +1,19 @@
package controller package controller
import ( import (
"github.com/gin-gonic/gin"
"net/http" "net/http"
"one-api/common" "one-api/common"
"one-api/model" "one-api/model"
"strconv" "strconv"
"github.com/gin-gonic/gin"
) )
func GetAllLogs(c *gin.Context) { func GetAllLogs(c *gin.Context) {
p, _ := strconv.Atoi(c.Query("p")) p, _ := strconv.Atoi(c.Query("p"))
pageSize, _ := strconv.Atoi(c.Query("page_size")) pageSize, _ := strconv.Atoi(c.Query("page_size"))
if p < 0 { if p < 1 {
p = 0 p = 1
} }
if pageSize < 0 { if pageSize < 0 {
pageSize = common.ItemsPerPage pageSize = common.ItemsPerPage
@ -24,7 +25,7 @@ func GetAllLogs(c *gin.Context) {
tokenName := c.Query("token_name") tokenName := c.Query("token_name")
modelName := c.Query("model_name") modelName := c.Query("model_name")
channel, _ := strconv.Atoi(c.Query("channel")) channel, _ := strconv.Atoi(c.Query("channel"))
logs, total, err := model.GetAllLogs(logType, startTimestamp, endTimestamp, modelName, username, tokenName, p*pageSize, pageSize, channel) logs, total, err := model.GetAllLogs(logType, startTimestamp, endTimestamp, modelName, username, tokenName, (p-1)*pageSize, pageSize, channel)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"success": false, "success": false,
@ -35,17 +36,20 @@ func GetAllLogs(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"success": true, "success": true,
"message": "", "message": "",
"data": map[string]any{
"items": logs,
"total": total, "total": total,
"data": logs, "page": p,
"page_size": pageSize,
},
}) })
return
} }
func GetUserLogs(c *gin.Context) { func GetUserLogs(c *gin.Context) {
p, _ := strconv.Atoi(c.Query("p")) p, _ := strconv.Atoi(c.Query("p"))
pageSize, _ := strconv.Atoi(c.Query("page_size")) pageSize, _ := strconv.Atoi(c.Query("page_size"))
if p < 0 { if p < 1 {
p = 0 p = 1
} }
if pageSize < 0 { if pageSize < 0 {
pageSize = common.ItemsPerPage pageSize = common.ItemsPerPage
@ -59,7 +63,7 @@ func GetUserLogs(c *gin.Context) {
endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64) endTimestamp, _ := strconv.ParseInt(c.Query("end_timestamp"), 10, 64)
tokenName := c.Query("token_name") tokenName := c.Query("token_name")
modelName := c.Query("model_name") modelName := c.Query("model_name")
logs, total, err := model.GetUserLogs(userId, logType, startTimestamp, endTimestamp, modelName, tokenName, p*pageSize, pageSize) logs, total, err := model.GetUserLogs(userId, logType, startTimestamp, endTimestamp, modelName, tokenName, (p-1)*pageSize, pageSize)
if err != nil { if err != nil {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"success": false, "success": false,
@ -70,8 +74,12 @@ func GetUserLogs(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"success": true, "success": true,
"message": "", "message": "",
"data": map[string]any{
"items": logs,
"total": total, "total": total,
"data": logs, "page": p,
"page_size": pageSize,
},
}) })
return return
} }

View File

@ -42,6 +42,11 @@ func main() {
if err != nil { if err != nil {
common.FatalLog("failed to initialize database: " + err.Error()) common.FatalLog("failed to initialize database: " + err.Error())
} }
// Initialize SQL Database
err = model.InitLogDB()
if err != nil {
common.FatalLog("failed to initialize database: " + err.Error())
}
defer func() { defer func() {
err := model.CloseDB() err := model.CloseDB()
if err != nil { if err != nil {

View File

@ -106,16 +106,23 @@ func SearchChannels(keyword string, group string, model string) ([]*Channel, err
// 构造WHERE子句 // 构造WHERE子句
var whereClause string var whereClause string
var args []interface{} var args []interface{}
if group != "" { if group != "" && group != "null" {
whereClause = "(id = ? OR name LIKE ? OR " + keyCol + " = ?) AND " + groupCol + " = ? AND " + modelsCol + " LIKE ?" var groupCondition string
args = append(args, common.String2Int(keyword), "%"+keyword+"%", keyword, group, "%"+model+"%") if common.UsingMySQL {
groupCondition = `CONCAT(',', ` + groupCol + `, ',') LIKE ?`
} else {
// sqlite, PostgreSQL
groupCondition = `(',' || ` + groupCol + ` || ',') LIKE ?`
}
whereClause = "(id = ? OR name LIKE ? OR " + keyCol + " = ?) AND " + modelsCol + ` LIKE ? AND ` + groupCondition
args = append(args, common.String2Int(keyword), "%"+keyword+"%", keyword, "%"+model+"%", "%,"+group+",%")
} else { } else {
whereClause = "(id = ? OR name LIKE ? OR " + keyCol + " = ?) AND " + modelsCol + " LIKE ?" whereClause = "(id = ? OR name LIKE ? OR " + keyCol + " = ?) AND " + modelsCol + " LIKE ?"
args = append(args, common.String2Int(keyword), "%"+keyword+"%", keyword, "%"+model+"%") args = append(args, common.String2Int(keyword), "%"+keyword+"%", keyword, "%"+model+"%")
} }
// 执行查询 // 执行查询
err := baseQuery.Where(whereClause, args...).Find(&channels).Error err := baseQuery.Where(whereClause, args...).Order("priority desc").Find(&channels).Error
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -3,11 +3,12 @@ package model
import ( import (
"context" "context"
"fmt" "fmt"
"github.com/bytedance/gopkg/util/gopool"
"gorm.io/gorm"
"one-api/common" "one-api/common"
"strings" "strings"
"time" "time"
"github.com/bytedance/gopkg/util/gopool"
"gorm.io/gorm"
) )
type Log struct { type Log struct {
@ -38,7 +39,7 @@ const (
) )
func GetLogByKey(key string) (logs []*Log, err error) { func GetLogByKey(key string) (logs []*Log, err error) {
err = DB.Joins("left join tokens on tokens.id = logs.token_id").Where("tokens.key = ?", strings.TrimPrefix(key, "sk-")).Find(&logs).Error err = LOG_DB.Joins("left join tokens on tokens.id = logs.token_id").Where("tokens.key = ?", strings.TrimPrefix(key, "sk-")).Find(&logs).Error
return logs, err return logs, err
} }
@ -54,7 +55,7 @@ func RecordLog(userId int, logType int, content string) {
Type: logType, Type: logType,
Content: content, Content: content,
} }
err := DB.Create(log).Error err := LOG_DB.Create(log).Error
if err != nil { if err != nil {
common.SysError("failed to record log: " + err.Error()) common.SysError("failed to record log: " + err.Error())
} }
@ -84,7 +85,7 @@ func RecordConsumeLog(ctx context.Context, userId int, channelId int, promptToke
IsStream: isStream, IsStream: isStream,
Other: otherStr, Other: otherStr,
} }
err := DB.Create(log).Error err := LOG_DB.Create(log).Error
if err != nil { if err != nil {
common.LogError(ctx, "failed to record log: "+err.Error()) common.LogError(ctx, "failed to record log: "+err.Error())
} }
@ -98,9 +99,9 @@ func RecordConsumeLog(ctx context.Context, userId int, channelId int, promptToke
func GetAllLogs(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string, startIdx int, num int, channel int) (logs []*Log, total int64, err error) { func GetAllLogs(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string, startIdx int, num int, channel int) (logs []*Log, total int64, err error) {
var tx *gorm.DB var tx *gorm.DB
if logType == LogTypeUnknown { if logType == LogTypeUnknown {
tx = DB tx = LOG_DB
} else { } else {
tx = DB.Where("type = ?", logType) tx = LOG_DB.Where("type = ?", logType)
} }
if modelName != "" { if modelName != "" {
tx = tx.Where("model_name like ?", modelName) tx = tx.Where("model_name like ?", modelName)
@ -120,22 +121,23 @@ func GetAllLogs(logType int, startTimestamp int64, endTimestamp int64, modelName
if channel != 0 { if channel != 0 {
tx = tx.Where("channel_id = ?", channel) tx = tx.Where("channel_id = ?", channel)
} }
err = tx.Model(&Log{}).Count(&total).Error err = tx.Model(&Log{}).Count(&total).Error
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
err = tx.Order("id desc").Limit(num).Offset(startIdx).Find(&logs).Error err = tx.Order("id desc").Limit(num).Offset(startIdx).Find(&logs).Error
if err != nil {
return nil, 0, err
}
return logs, total, err return logs, total, err
} }
func GetUserLogs(userId int, logType int, startTimestamp int64, endTimestamp int64, modelName string, tokenName string, startIdx int, num int) (logs []*Log, total int64, err error) { func GetUserLogs(userId int, logType int, startTimestamp int64, endTimestamp int64, modelName string, tokenName string, startIdx int, num int) (logs []*Log, total int64, err error) {
var tx *gorm.DB var tx *gorm.DB
if logType == LogTypeUnknown { if logType == LogTypeUnknown {
tx = DB.Where("user_id = ?", userId) tx = LOG_DB.Where("user_id = ?", userId)
} else { } else {
tx = DB.Where("user_id = ? and type = ?", userId, logType) tx = LOG_DB.Where("user_id = ? and type = ?", userId, logType)
} }
if modelName != "" { if modelName != "" {
tx = tx.Where("model_name like ?", modelName) tx = tx.Where("model_name like ?", modelName)
@ -149,14 +151,11 @@ func GetUserLogs(userId int, logType int, startTimestamp int64, endTimestamp int
if endTimestamp != 0 { if endTimestamp != 0 {
tx = tx.Where("created_at <= ?", endTimestamp) tx = tx.Where("created_at <= ?", endTimestamp)
} }
err = tx.Model(&Log{}).Count(&total).Error err = tx.Model(&Log{}).Count(&total).Error
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
err = tx.Order("id desc").Limit(num).Offset(startIdx).Omit("id").Find(&logs).Error err = tx.Order("id desc").Limit(num).Offset(startIdx).Omit("id").Find(&logs).Error
return logs, total, err
for i := range logs { for i := range logs {
var otherMap map[string]interface{} var otherMap map[string]interface{}
otherMap = common.StrToMap(logs[i].Other) otherMap = common.StrToMap(logs[i].Other)
@ -170,12 +169,12 @@ func GetUserLogs(userId int, logType int, startTimestamp int64, endTimestamp int
} }
func SearchAllLogs(keyword string) (logs []*Log, err error) { func SearchAllLogs(keyword string) (logs []*Log, err error) {
err = DB.Where("type = ? or content LIKE ?", keyword, keyword+"%").Order("id desc").Limit(common.MaxRecentItems).Find(&logs).Error err = LOG_DB.Where("type = ? or content LIKE ?", keyword, keyword+"%").Order("id desc").Limit(common.MaxRecentItems).Find(&logs).Error
return logs, err return logs, err
} }
func SearchUserLogs(userId int, keyword string) (logs []*Log, err error) { func SearchUserLogs(userId int, keyword string) (logs []*Log, err error) {
err = DB.Where("user_id = ? and type = ?", userId, keyword).Order("id desc").Limit(common.MaxRecentItems).Omit("id").Find(&logs).Error err = LOG_DB.Where("user_id = ? and type = ?", userId, keyword).Order("id desc").Limit(common.MaxRecentItems).Omit("id").Find(&logs).Error
return logs, err return logs, err
} }
@ -186,10 +185,10 @@ type Stat struct {
} }
func SumUsedQuota(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string, channel int) (stat Stat) { func SumUsedQuota(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string, channel int) (stat Stat) {
tx := DB.Table("logs").Select("sum(quota) quota") tx := LOG_DB.Table("logs").Select("sum(quota) quota")
// 为rpm和tpm创建单独的查询 // 为rpm和tpm创建单独的查询
rpmTpmQuery := DB.Table("logs").Select("count(*) rpm, sum(prompt_tokens) + sum(completion_tokens) tpm") rpmTpmQuery := LOG_DB.Table("logs").Select("count(*) rpm, sum(prompt_tokens) + sum(completion_tokens) tpm")
if username != "" { if username != "" {
tx = tx.Where("username = ?", username) tx = tx.Where("username = ?", username)
@ -206,8 +205,8 @@ func SumUsedQuota(logType int, startTimestamp int64, endTimestamp int64, modelNa
tx = tx.Where("created_at <= ?", endTimestamp) tx = tx.Where("created_at <= ?", endTimestamp)
} }
if modelName != "" { if modelName != "" {
tx = tx.Where("model_name = ?", modelName) tx = tx.Where("model_name like ?", modelName)
rpmTpmQuery = rpmTpmQuery.Where("model_name = ?", modelName) rpmTpmQuery = rpmTpmQuery.Where("model_name like ?", modelName)
} }
if channel != 0 { if channel != 0 {
tx = tx.Where("channel_id = ?", channel) tx = tx.Where("channel_id = ?", channel)
@ -228,7 +227,7 @@ func SumUsedQuota(logType int, startTimestamp int64, endTimestamp int64, modelNa
} }
func SumUsedToken(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string) (token int) { func SumUsedToken(logType int, startTimestamp int64, endTimestamp int64, modelName string, username string, tokenName string) (token int) {
tx := DB.Table("logs").Select("ifnull(sum(prompt_tokens),0) + ifnull(sum(completion_tokens),0)") tx := LOG_DB.Table("logs").Select("ifnull(sum(prompt_tokens),0) + ifnull(sum(completion_tokens),0)")
if username != "" { if username != "" {
tx = tx.Where("username = ?", username) tx = tx.Where("username = ?", username)
} }
@ -249,6 +248,6 @@ func SumUsedToken(logType int, startTimestamp int64, endTimestamp int64, modelNa
} }
func DeleteOldLog(targetTimestamp int64) (int64, error) { func DeleteOldLog(targetTimestamp int64) (int64, error) {
result := DB.Where("created_at < ?", targetTimestamp).Delete(&Log{}) result := LOG_DB.Where("created_at < ?", targetTimestamp).Delete(&Log{})
return result.RowsAffected, result.Error return result.RowsAffected, result.Error
} }

View File

@ -15,6 +15,8 @@ import (
var DB *gorm.DB var DB *gorm.DB
var LOG_DB *gorm.DB
func createRootAccountIfNeed() error { func createRootAccountIfNeed() error {
var user User var user User
//if user.Status != common.UserStatusEnabled { //if user.Status != common.UserStatusEnabled {
@ -38,9 +40,9 @@ func createRootAccountIfNeed() error {
return nil return nil
} }
func chooseDB() (*gorm.DB, error) { func chooseDB(envName string) (*gorm.DB, error) {
if os.Getenv("SQL_DSN") != "" { dsn := os.Getenv(envName)
dsn := os.Getenv("SQL_DSN") if dsn != "" {
if strings.HasPrefix(dsn, "postgres://") { if strings.HasPrefix(dsn, "postgres://") {
// Use PostgreSQL // Use PostgreSQL
common.SysLog("using PostgreSQL as database") common.SysLog("using PostgreSQL as database")
@ -52,6 +54,13 @@ func chooseDB() (*gorm.DB, error) {
PrepareStmt: true, // precompile SQL PrepareStmt: true, // precompile SQL
}) })
} }
if strings.HasPrefix(dsn, "local") {
common.SysLog("SQL_DSN not set, using SQLite as database")
common.UsingSQLite = true
return gorm.Open(sqlite.Open(common.SQLitePath), &gorm.Config{
PrepareStmt: true, // precompile SQL
})
}
// Use MySQL // Use MySQL
common.SysLog("using MySQL as database") common.SysLog("using MySQL as database")
// check parseTime // check parseTime
@ -76,7 +85,7 @@ func chooseDB() (*gorm.DB, error) {
} }
func InitDB() (err error) { func InitDB() (err error) {
db, err := chooseDB() db, err := chooseDB("SQL_DSN")
if err == nil { if err == nil {
if common.DebugEnabled { if common.DebugEnabled {
db = db.Debug() db = db.Debug()
@ -100,52 +109,7 @@ func InitDB() (err error) {
// _, _ = sqlDB.Exec("ALTER TABLE midjourneys MODIFY status VARCHAR(20);") // TODO: delete this line when most users have upgraded // _, _ = sqlDB.Exec("ALTER TABLE midjourneys MODIFY status VARCHAR(20);") // TODO: delete this line when most users have upgraded
//} //}
common.SysLog("database migration started") common.SysLog("database migration started")
err = db.AutoMigrate(&Channel{}) err = migrateDB()
if err != nil {
return err
}
err = db.AutoMigrate(&Token{})
if err != nil {
return err
}
err = db.AutoMigrate(&User{})
if err != nil {
return err
}
err = db.AutoMigrate(&Option{})
if err != nil {
return err
}
err = db.AutoMigrate(&Redemption{})
if err != nil {
return err
}
err = db.AutoMigrate(&Ability{})
if err != nil {
return err
}
err = db.AutoMigrate(&Log{})
if err != nil {
return err
}
err = db.AutoMigrate(&Midjourney{})
if err != nil {
return err
}
err = db.AutoMigrate(&TopUp{})
if err != nil {
return err
}
err = db.AutoMigrate(&QuotaData{})
if err != nil {
return err
}
err = db.AutoMigrate(&Task{})
if err != nil {
return err
}
common.SysLog("database migrated")
err = createRootAccountIfNeed()
return err return err
} else { } else {
common.FatalLog(err) common.FatalLog(err)
@ -153,8 +117,103 @@ func InitDB() (err error) {
return err return err
} }
func CloseDB() error { func InitLogDB() (err error) {
sqlDB, err := DB.DB() if os.Getenv("LOG_SQL_DSN") == "" {
LOG_DB = DB
return
}
db, err := chooseDB("LOG_SQL_DSN")
if err == nil {
if common.DebugEnabled {
db = db.Debug()
}
LOG_DB = db
sqlDB, err := LOG_DB.DB()
if err != nil {
return err
}
sqlDB.SetMaxIdleConns(common.GetEnvOrDefault("SQL_MAX_IDLE_CONNS", 100))
sqlDB.SetMaxOpenConns(common.GetEnvOrDefault("SQL_MAX_OPEN_CONNS", 1000))
sqlDB.SetConnMaxLifetime(time.Second * time.Duration(common.GetEnvOrDefault("SQL_MAX_LIFETIME", 60)))
if !common.IsMasterNode {
return nil
}
//if common.UsingMySQL {
// _, _ = sqlDB.Exec("DROP INDEX idx_channels_key ON channels;") // TODO: delete this line when most users have upgraded
// _, _ = sqlDB.Exec("ALTER TABLE midjourneys MODIFY action VARCHAR(40);") // TODO: delete this line when most users have upgraded
// _, _ = sqlDB.Exec("ALTER TABLE midjourneys MODIFY progress VARCHAR(30);") // TODO: delete this line when most users have upgraded
// _, _ = sqlDB.Exec("ALTER TABLE midjourneys MODIFY status VARCHAR(20);") // TODO: delete this line when most users have upgraded
//}
common.SysLog("database migration started")
err = migrateLOGDB()
return err
} else {
common.FatalLog(err)
}
return err
}
func migrateDB() error {
err := DB.AutoMigrate(&Channel{})
if err != nil {
return err
}
err = DB.AutoMigrate(&Token{})
if err != nil {
return err
}
err = DB.AutoMigrate(&User{})
if err != nil {
return err
}
err = DB.AutoMigrate(&Option{})
if err != nil {
return err
}
err = DB.AutoMigrate(&Redemption{})
if err != nil {
return err
}
err = DB.AutoMigrate(&Ability{})
if err != nil {
return err
}
err = DB.AutoMigrate(&Log{})
if err != nil {
return err
}
err = DB.AutoMigrate(&Midjourney{})
if err != nil {
return err
}
err = DB.AutoMigrate(&TopUp{})
if err != nil {
return err
}
err = DB.AutoMigrate(&QuotaData{})
if err != nil {
return err
}
err = DB.AutoMigrate(&Task{})
if err != nil {
return err
}
common.SysLog("database migrated")
err = createRootAccountIfNeed()
return err
}
func migrateLOGDB() error {
var err error
if err = LOG_DB.AutoMigrate(&Log{}); err != nil {
return err
}
return nil
}
func closeDB(db *gorm.DB) error {
sqlDB, err := db.DB()
if err != nil { if err != nil {
return err return err
} }
@ -162,6 +221,16 @@ func CloseDB() error {
return err return err
} }
func CloseDB() error {
if LOG_DB != DB {
err := closeDB(LOG_DB)
if err != nil {
return err
}
}
return closeDB(DB)
}
var ( var (
lastPingTime time.Time lastPingTime time.Time
pingMutex sync.Mutex pingMutex sync.Mutex

View File

@ -33,7 +33,7 @@ type ClaudeMessage struct {
type Tool struct { type Tool struct {
Name string `json:"name"` Name string `json:"name"`
Description string `json:"description,omitempty"` Description string `json:"description,omitempty"`
InputSchema InputSchema `json:"input_schema"` InputSchema map[string]interface{} `json:"input_schema"`
} }
type InputSchema struct { type InputSchema struct {

View File

@ -63,15 +63,21 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
for _, tool := range textRequest.Tools { for _, tool := range textRequest.Tools {
if params, ok := tool.Function.Parameters.(map[string]any); ok { if params, ok := tool.Function.Parameters.(map[string]any); ok {
claudeTools = append(claudeTools, Tool{ claudeTool := Tool{
Name: tool.Function.Name, Name: tool.Function.Name,
Description: tool.Function.Description, Description: tool.Function.Description,
InputSchema: InputSchema{ }
Type: params["type"].(string), claudeTool.InputSchema = make(map[string]interface{})
Properties: params["properties"], claudeTool.InputSchema["type"] = params["type"].(string)
Required: params["required"], claudeTool.InputSchema["properties"] = params["properties"]
}, claudeTool.InputSchema["required"] = params["required"]
}) for s, a := range params {
if s == "type" || s == "properties" || s == "required" {
continue
}
claudeTool.InputSchema[s] = a
}
claudeTools = append(claudeTools, claudeTool)
} }
} }

View File

@ -8,6 +8,7 @@ var ModelList = []string{
"gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-0613", "gpt-4-32k", "gpt-4-32k-0314", "gpt-4-32k-0613",
"gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09", "gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09",
"gpt-4-vision-preview", "gpt-4-vision-preview",
"chatgpt-4o-latest",
"gpt-4o", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06",
"gpt-4o-mini", "gpt-4o-mini-2024-07-18", "gpt-4o-mini", "gpt-4o-mini-2024-07-18",
"text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large", "text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large",

View File

@ -75,7 +75,7 @@ func AudioHelper(c *gin.Context) *dto.OpenAIErrorWithStatusCode {
return service.OpenAIErrorWrapperLocal(err, "get_user_quota_failed", http.StatusInternalServerError) return service.OpenAIErrorWrapperLocal(err, "get_user_quota_failed", http.StatusInternalServerError)
} }
if userQuota-preConsumedQuota < 0 { if userQuota-preConsumedQuota < 0 {
return service.OpenAIErrorWrapperLocal(errors.New("user quota is not enough"), "insufficient_user_quota", http.StatusForbidden) return service.OpenAIErrorWrapperLocal(errors.New(fmt.Sprintf("audio pre-consumed quota failed, user quota: %d, need quota: %d", userQuota, preConsumedQuota)), "insufficient_user_quota", http.StatusBadRequest)
} }
err = model.CacheDecreaseUserQuota(relayInfo.UserId, preConsumedQuota) err = model.CacheDecreaseUserQuota(relayInfo.UserId, preConsumedQuota)
if err != nil { if err != nil {

View File

@ -125,7 +125,7 @@ func ImageHelper(c *gin.Context, relayMode int) *dto.OpenAIErrorWithStatusCode {
quota := int(imageRatio * groupRatio * common.QuotaPerUnit) quota := int(imageRatio * groupRatio * common.QuotaPerUnit)
if userQuota-quota < 0 { if userQuota-quota < 0 {
return service.OpenAIErrorWrapperLocal(errors.New("user quota is not enough"), "insufficient_user_quota", http.StatusForbidden) return service.OpenAIErrorWrapperLocal(errors.New(fmt.Sprintf("image pre-consumed quota failed, user quota: %d, need quota: %d", userQuota, quota)), "insufficient_user_quota", http.StatusBadRequest)
} }
adaptor := GetAdaptor(relayInfo.ApiType) adaptor := GetAdaptor(relayInfo.ApiType)

View File

@ -247,9 +247,12 @@ func preConsumeQuota(c *gin.Context, preConsumedQuota int, relayInfo *relaycommo
if err != nil { if err != nil {
return 0, 0, service.OpenAIErrorWrapperLocal(err, "get_user_quota_failed", http.StatusInternalServerError) return 0, 0, service.OpenAIErrorWrapperLocal(err, "get_user_quota_failed", http.StatusInternalServerError)
} }
if userQuota <= 0 || userQuota-preConsumedQuota < 0 { if userQuota <= 0 {
return 0, 0, service.OpenAIErrorWrapperLocal(errors.New("user quota is not enough"), "insufficient_user_quota", http.StatusForbidden) return 0, 0, service.OpenAIErrorWrapperLocal(errors.New("user quota is not enough"), "insufficient_user_quota", http.StatusForbidden)
} }
if userQuota-preConsumedQuota < 0 {
return 0, 0, service.OpenAIErrorWrapperLocal(errors.New(fmt.Sprintf("chat pre-consumed quota failed, user quota: %d, need quota: %d", userQuota, preConsumedQuota)), "insufficient_user_quota", http.StatusBadRequest)
}
err = model.CacheDecreaseUserQuota(relayInfo.UserId, preConsumedQuota) err = model.CacheDecreaseUserQuota(relayInfo.UserId, preConsumedQuota)
if err != nil { if err != nil {
return 0, 0, service.OpenAIErrorWrapperLocal(err, "decrease_user_quota_failed", http.StatusInternalServerError) return 0, 0, service.OpenAIErrorWrapperLocal(err, "decrease_user_quota_failed", http.StatusInternalServerError)

View File

@ -28,13 +28,11 @@ func MidjourneyErrorWithStatusCodeWrapper(code int, desc string, statusCode int)
// OpenAIErrorWrapper wraps an error into an OpenAIErrorWithStatusCode // OpenAIErrorWrapper wraps an error into an OpenAIErrorWithStatusCode
func OpenAIErrorWrapper(err error, code string, statusCode int) *dto.OpenAIErrorWithStatusCode { func OpenAIErrorWrapper(err error, code string, statusCode int) *dto.OpenAIErrorWithStatusCode {
text := err.Error() text := err.Error()
// 定义一个正则表达式匹配URL lowerText := strings.ToLower(text)
if strings.Contains(text, "Post") || strings.Contains(text, "dial") { if strings.Contains(lowerText, "post") || strings.Contains(lowerText, "dial") || strings.Contains(lowerText, "http") {
common.SysLog(fmt.Sprintf("error: %s", text)) common.SysLog(fmt.Sprintf("error: %s", text))
text = "请求上游地址失败" text = "请求上游地址失败"
} }
//避免暴露内部错误
openAIError := dto.OpenAIError{ openAIError := dto.OpenAIError{
Message: text, Message: text,
Type: "new_api_error", Type: "new_api_error",
@ -113,14 +111,12 @@ func TaskErrorWrapperLocal(err error, code string, statusCode int) *dto.TaskErro
func TaskErrorWrapper(err error, code string, statusCode int) *dto.TaskError { func TaskErrorWrapper(err error, code string, statusCode int) *dto.TaskError {
text := err.Error() text := err.Error()
lowerText := strings.ToLower(text)
// 定义一个正则表达式匹配URL if strings.Contains(lowerText, "post") || strings.Contains(lowerText, "dial") || strings.Contains(lowerText, "http") {
if strings.Contains(text, "Post") || strings.Contains(text, "dial") {
common.SysLog(fmt.Sprintf("error: %s", text)) common.SysLog(fmt.Sprintf("error: %s", text))
text = "请求上游地址失败" text = "请求上游地址失败"
} }
//避免暴露内部错误 //避免暴露内部错误
taskError := &dto.TaskError{ taskError := &dto.TaskError{
Code: code, Code: code,
Message: text, Message: text,

View File

@ -50,7 +50,7 @@
] ]
}, },
"devDependencies": { "devDependencies": {
"@so1ve/prettier-config": "^2.0.0", "@so1ve/prettier-config": "^3.1.0",
"@vitejs/plugin-react": "^4.2.1", "@vitejs/plugin-react": "^4.2.1",
"prettier": "^3.0.0", "prettier": "^3.0.0",
"typescript": "4.4.2", "typescript": "4.4.2",

View File

@ -12,10 +12,10 @@ importers:
version: 2.53.2(react@18.2.0) version: 2.53.2(react@18.2.0)
'@douyinfe/semi-ui': '@douyinfe/semi-ui':
specifier: ^2.55.3 specifier: ^2.55.3
version: 2.55.3(react-dom@18.2.0)(react@18.2.0) version: 2.55.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@visactor/react-vchart': '@visactor/react-vchart':
specifier: ~1.8.8 specifier: ~1.8.8
version: 1.8.11(react-dom@18.2.0)(react@18.2.0) version: 1.8.11(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@visactor/vchart': '@visactor/vchart':
specifier: ~1.8.8 specifier: ~1.8.8
version: 1.8.11 version: 1.8.11
@ -48,26 +48,26 @@ importers:
version: 1.0.4 version: 1.0.4
react-router-dom: react-router-dom:
specifier: ^6.3.0 specifier: ^6.3.0
version: 6.22.2(react-dom@18.2.0)(react@18.2.0) version: 6.22.2(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
react-telegram-login: react-telegram-login:
specifier: ^1.1.2 specifier: ^1.1.2
version: 1.1.2(react@18.2.0) version: 1.1.2(react@18.2.0)
react-toastify: react-toastify:
specifier: ^9.0.8 specifier: ^9.0.8
version: 9.1.3(react-dom@18.2.0)(react@18.2.0) version: 9.1.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
react-turnstile: react-turnstile:
specifier: ^1.0.5 specifier: ^1.0.5
version: 1.1.3(react-dom@18.2.0)(react@18.2.0) version: 1.1.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
semantic-ui-offline: semantic-ui-offline:
specifier: ^2.5.0 specifier: ^2.5.0
version: 2.5.0 version: 2.5.0
semantic-ui-react: semantic-ui-react:
specifier: ^2.1.3 specifier: ^2.1.3
version: 2.1.5(react-dom@18.2.0)(react@18.2.0) version: 2.1.5(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
devDependencies: devDependencies:
'@so1ve/prettier-config': '@so1ve/prettier-config':
specifier: ^2.0.0 specifier: ^3.1.0
version: 2.0.0(prettier@3.2.5) version: 3.1.0(prettier@3.2.5)
'@vitejs/plugin-react': '@vitejs/plugin-react':
specifier: ^4.2.1 specifier: ^4.2.1
version: 4.2.1(vite@5.2.5) version: 4.2.1(vite@5.2.5)
@ -89,10 +89,10 @@ packages:
} }
engines: { node: '>=6.0.0' } engines: { node: '>=6.0.0' }
'@astrojs/compiler@1.8.2': '@astrojs/compiler@2.10.2':
resolution: resolution:
{ {
integrity: sha512-o/ObKgtMzl8SlpIdzaxFnt7SATKPxu4oIP/1NL+HDJRzxfJcAkOTAb/ZKMRyULbz4q+1t2/DAebs2Z1QairkZw==, integrity: sha512-bvH+v8AirwpRWCkYJEyWYdc5Cs/BjG2ZTxIJzttHilXgfKJAdW2496KsUQKzf5j2tOHtaHXKKn9hb9WZiBGpEg==,
} }
'@babel/code-frame@7.23.5': '@babel/code-frame@7.23.5':
@ -873,18 +873,18 @@ packages:
react: ^16.0.0 || ^17.0.0 || ^18.0.0 react: ^16.0.0 || ^17.0.0 || ^18.0.0
react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0
'@so1ve/prettier-config@2.0.0': '@so1ve/prettier-config@3.1.0':
resolution: resolution:
{ {
integrity: sha512-s6qsH5Rf4Bl+J0LU9rKmSWe/rYRdsYw0ELyXhDDDqEaTWtah4NpHKJuVWARuKqj0TWLBeWmyWUoIH/Bkp/DHaw==, integrity: sha512-9GJ1yXKBC4DzqCTTaZoBf8zw7WWkVuXcccZt1Aqk4lj6ab/GiNUnjPGajUVYLjaqAEOKqM7jUSUfTjk2JTjCAg==,
} }
peerDependencies: peerDependencies:
prettier: ^3.0.0 prettier: ^3.0.0
'@so1ve/prettier-plugin-toml@2.0.0': '@so1ve/prettier-plugin-toml@3.1.0':
resolution: resolution:
{ {
integrity: sha512-GvuFdTqhs3qxbhKTiCXWMXITmNLSdndUp7ql1yJbzzWaGqAdb3UH+R+0ZhtAEctBSx90MWAWW3kkW/Iba02tCg==, integrity: sha512-8WZAGjAVNIJlkfWL6wHKxlUuEBY45fdd5qY5bR/Z6r/txgzKXk/r9qi1DTwc17gi/WcNuRrcRugecRT+mWbIYg==,
} }
peerDependencies: peerDependencies:
prettier: ^3.0.0 prettier: ^3.0.0
@ -1887,17 +1887,17 @@ packages:
} }
hasBin: true hasBin: true
prettier-plugin-astro@0.13.0: prettier-plugin-astro@0.14.1:
resolution: resolution:
{ {
integrity: sha512-5HrJNnPmZqTUNoA97zn4gNQv9BgVhv+et03314WpQ9H9N8m2L9OSV798olwmG2YLXPl1iSstlJCR1zB3x5xG4g==, integrity: sha512-RiBETaaP9veVstE4vUwSIcdATj6dKmXljouXc/DDNwBSPTp8FRkLGDSGFClKsAFeeg+13SB0Z1JZvbD76bigJw==,
} }
engines: { node: ^14.15.0 || >=16.0.0 } engines: { node: ^14.15.0 || >=16.0.0 }
prettier-plugin-curly-and-jsdoc@2.0.0: prettier-plugin-curly-and-jsdoc@3.1.0:
resolution: resolution:
{ {
integrity: sha512-uSjWOWmX8+yrCrfhJSI58ODqtX7lXx07M8JYeOC1hfRv+vCttfiDlZoM27mNChGitJNKI+pCBvMMBYh8JiV0HQ==, integrity: sha512-4QMOHnLlkP2jTRWS0MFH6j+cuOiXLvXOqCLKbtwwVd8PPyq8NenW5AAwfwqiTNHBQG/DmzViPphRrwgN0XkUVQ==,
} }
peerDependencies: peerDependencies:
prettier: ^3.0.0 prettier: ^3.0.0
@ -2417,7 +2417,7 @@ snapshots:
'@jridgewell/gen-mapping': 0.3.5 '@jridgewell/gen-mapping': 0.3.5
'@jridgewell/trace-mapping': 0.3.24 '@jridgewell/trace-mapping': 0.3.24
'@astrojs/compiler@1.8.2': {} '@astrojs/compiler@2.10.2': {}
'@babel/code-frame@7.23.5': '@babel/code-frame@7.23.5':
dependencies: dependencies:
@ -2565,7 +2565,7 @@ snapshots:
react: 18.2.0 react: 18.2.0
tslib: 2.6.2 tslib: 2.6.2
'@dnd-kit/core@6.1.0(react-dom@18.2.0)(react@18.2.0)': '@dnd-kit/core@6.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0)':
dependencies: dependencies:
'@dnd-kit/accessibility': 3.1.0(react@18.2.0) '@dnd-kit/accessibility': 3.1.0(react@18.2.0)
'@dnd-kit/utilities': 3.2.2(react@18.2.0) '@dnd-kit/utilities': 3.2.2(react@18.2.0)
@ -2573,9 +2573,9 @@ snapshots:
react-dom: 18.2.0(react@18.2.0) react-dom: 18.2.0(react@18.2.0)
tslib: 2.6.2 tslib: 2.6.2
'@dnd-kit/sortable@7.0.2(@dnd-kit/core@6.1.0)(react@18.2.0)': '@dnd-kit/sortable@7.0.2(@dnd-kit/core@6.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)':
dependencies: dependencies:
'@dnd-kit/core': 6.1.0(react-dom@18.2.0)(react@18.2.0) '@dnd-kit/core': 6.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@dnd-kit/utilities': 3.2.2(react@18.2.0) '@dnd-kit/utilities': 3.2.2(react@18.2.0)
react: 18.2.0 react: 18.2.0
tslib: 2.6.2 tslib: 2.6.2
@ -2627,10 +2627,10 @@ snapshots:
dependencies: dependencies:
glob: 7.2.3 glob: 7.2.3
'@douyinfe/semi-ui@2.55.3(react-dom@18.2.0)(react@18.2.0)': '@douyinfe/semi-ui@2.55.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0)':
dependencies: dependencies:
'@dnd-kit/core': 6.1.0(react-dom@18.2.0)(react@18.2.0) '@dnd-kit/core': 6.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@dnd-kit/sortable': 7.0.2(@dnd-kit/core@6.1.0)(react@18.2.0) '@dnd-kit/sortable': 7.0.2(@dnd-kit/core@6.1.0(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(react@18.2.0)
'@dnd-kit/utilities': 3.2.2(react@18.2.0) '@dnd-kit/utilities': 3.2.2(react@18.2.0)
'@douyinfe/semi-animation': 2.55.3 '@douyinfe/semi-animation': 2.55.3
'@douyinfe/semi-animation-react': 2.55.3 '@douyinfe/semi-animation-react': 2.55.3
@ -2648,8 +2648,8 @@ snapshots:
prop-types: 15.8.1 prop-types: 15.8.1
react: 18.2.0 react: 18.2.0
react-dom: 18.2.0(react@18.2.0) react-dom: 18.2.0(react@18.2.0)
react-resizable: 3.0.5(react-dom@18.2.0)(react@18.2.0) react-resizable: 3.0.5(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
react-window: 1.8.10(react-dom@18.2.0)(react@18.2.0) react-window: 1.8.10(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
scroll-into-view-if-needed: 2.2.31 scroll-into-view-if-needed: 2.2.31
utility-types: 3.11.0 utility-types: 3.11.0
@ -2722,13 +2722,13 @@ snapshots:
'@esbuild/win32-x64@0.20.2': '@esbuild/win32-x64@0.20.2':
optional: true optional: true
'@fluentui/react-component-event-listener@0.63.1(react-dom@18.2.0)(react@18.2.0)': '@fluentui/react-component-event-listener@0.63.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0)':
dependencies: dependencies:
'@babel/runtime': 7.24.0 '@babel/runtime': 7.24.0
react: 18.2.0 react: 18.2.0
react-dom: 18.2.0(react@18.2.0) react-dom: 18.2.0(react@18.2.0)
'@fluentui/react-component-ref@0.63.1(react-dom@18.2.0)(react@18.2.0)': '@fluentui/react-component-ref@0.63.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0)':
dependencies: dependencies:
'@babel/runtime': 7.24.0 '@babel/runtime': 7.24.0
react: 18.2.0 react: 18.2.0
@ -2846,22 +2846,22 @@ snapshots:
'@rollup/rollup-win32-x64-msvc@4.13.0': '@rollup/rollup-win32-x64-msvc@4.13.0':
optional: true optional: true
'@semantic-ui-react/event-stack@3.1.3(react-dom@18.2.0)(react@18.2.0)': '@semantic-ui-react/event-stack@3.1.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0)':
dependencies: dependencies:
exenv: 1.2.2 exenv: 1.2.2
prop-types: 15.8.1 prop-types: 15.8.1
react: 18.2.0 react: 18.2.0
react-dom: 18.2.0(react@18.2.0) react-dom: 18.2.0(react@18.2.0)
'@so1ve/prettier-config@2.0.0(prettier@3.2.5)': '@so1ve/prettier-config@3.1.0(prettier@3.2.5)':
dependencies: dependencies:
'@so1ve/prettier-plugin-toml': 2.0.0(prettier@3.2.5) '@so1ve/prettier-plugin-toml': 3.1.0(prettier@3.2.5)
prettier: 3.2.5 prettier: 3.2.5
prettier-plugin-astro: 0.13.0 prettier-plugin-astro: 0.14.1
prettier-plugin-curly-and-jsdoc: 2.0.0(prettier@3.2.5) prettier-plugin-curly-and-jsdoc: 3.1.0(prettier@3.2.5)
prettier-plugin-pkgsort: 0.2.1(prettier@3.2.5) prettier-plugin-pkgsort: 0.2.1(prettier@3.2.5)
'@so1ve/prettier-plugin-toml@2.0.0(prettier@3.2.5)': '@so1ve/prettier-plugin-toml@3.1.0(prettier@3.2.5)':
dependencies: dependencies:
prettier: 3.2.5 prettier: 3.2.5
@ -2926,7 +2926,7 @@ snapshots:
'@types/parse-json@4.0.2': {} '@types/parse-json@4.0.2': {}
'@visactor/react-vchart@1.8.11(react-dom@18.2.0)(react@18.2.0)': '@visactor/react-vchart@1.8.11(react-dom@18.2.0(react@18.2.0))(react@18.2.0)':
dependencies: dependencies:
'@visactor/vchart': 1.8.11 '@visactor/vchart': 1.8.11
'@visactor/vgrammar-core': 0.10.11 '@visactor/vgrammar-core': 0.10.11
@ -3503,13 +3503,13 @@ snapshots:
sort-object-keys: 1.1.3 sort-object-keys: 1.1.3
sort-order: 1.1.2 sort-order: 1.1.2
prettier-plugin-astro@0.13.0: prettier-plugin-astro@0.14.1:
dependencies: dependencies:
'@astrojs/compiler': 1.8.2 '@astrojs/compiler': 2.10.2
prettier: 3.2.5 prettier: 3.2.5
sass-formatter: 0.7.9 sass-formatter: 0.7.9
prettier-plugin-curly-and-jsdoc@2.0.0(prettier@3.2.5): prettier-plugin-curly-and-jsdoc@3.1.0(prettier@3.2.5):
dependencies: dependencies:
prettier: 3.2.5 prettier: 3.2.5
@ -3534,7 +3534,7 @@ snapshots:
react: 18.2.0 react: 18.2.0
scheduler: 0.23.0 scheduler: 0.23.0
react-draggable@4.4.6(react-dom@18.2.0)(react@18.2.0): react-draggable@4.4.6(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies: dependencies:
clsx: 1.2.1 clsx: 1.2.1
prop-types: 15.8.1 prop-types: 15.8.1
@ -3556,7 +3556,7 @@ snapshots:
react-is@18.2.0: {} react-is@18.2.0: {}
react-popper@2.3.0(@popperjs/core@2.11.8)(react-dom@18.2.0)(react@18.2.0): react-popper@2.3.0(@popperjs/core@2.11.8)(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies: dependencies:
'@popperjs/core': 2.11.8 '@popperjs/core': 2.11.8
react: 18.2.0 react: 18.2.0
@ -3566,15 +3566,15 @@ snapshots:
react-refresh@0.14.0: {} react-refresh@0.14.0: {}
react-resizable@3.0.5(react-dom@18.2.0)(react@18.2.0): react-resizable@3.0.5(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies: dependencies:
prop-types: 15.8.1 prop-types: 15.8.1
react: 18.2.0 react: 18.2.0
react-draggable: 4.4.6(react-dom@18.2.0)(react@18.2.0) react-draggable: 4.4.6(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
transitivePeerDependencies: transitivePeerDependencies:
- react-dom - react-dom
react-router-dom@6.22.2(react-dom@18.2.0)(react@18.2.0): react-router-dom@6.22.2(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies: dependencies:
'@remix-run/router': 1.15.2 '@remix-run/router': 1.15.2
react: 18.2.0 react: 18.2.0
@ -3590,18 +3590,18 @@ snapshots:
dependencies: dependencies:
react: 18.2.0 react: 18.2.0
react-toastify@9.1.3(react-dom@18.2.0)(react@18.2.0): react-toastify@9.1.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies: dependencies:
clsx: 1.2.1 clsx: 1.2.1
react: 18.2.0 react: 18.2.0
react-dom: 18.2.0(react@18.2.0) react-dom: 18.2.0(react@18.2.0)
react-turnstile@1.1.3(react-dom@18.2.0)(react@18.2.0): react-turnstile@1.1.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies: dependencies:
react: 18.2.0 react: 18.2.0
react-dom: 18.2.0(react@18.2.0) react-dom: 18.2.0(react@18.2.0)
react-window@1.8.10(react-dom@18.2.0)(react@18.2.0): react-window@1.8.10(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies: dependencies:
'@babel/runtime': 7.24.0 '@babel/runtime': 7.24.0
memoize-one: 5.2.1 memoize-one: 5.2.1
@ -3683,13 +3683,13 @@ snapshots:
fs-extra: 4.0.3 fs-extra: 4.0.3
jquery: 3.7.1 jquery: 3.7.1
semantic-ui-react@2.1.5(react-dom@18.2.0)(react@18.2.0): semantic-ui-react@2.1.5(react-dom@18.2.0(react@18.2.0))(react@18.2.0):
dependencies: dependencies:
'@babel/runtime': 7.24.0 '@babel/runtime': 7.24.0
'@fluentui/react-component-event-listener': 0.63.1(react-dom@18.2.0)(react@18.2.0) '@fluentui/react-component-event-listener': 0.63.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@fluentui/react-component-ref': 0.63.1(react-dom@18.2.0)(react@18.2.0) '@fluentui/react-component-ref': 0.63.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
'@popperjs/core': 2.11.8 '@popperjs/core': 2.11.8
'@semantic-ui-react/event-stack': 3.1.3(react-dom@18.2.0)(react@18.2.0) '@semantic-ui-react/event-stack': 3.1.3(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
clsx: 1.2.1 clsx: 1.2.1
keyboard-key: 1.1.0 keyboard-key: 1.1.0
lodash: 4.17.21 lodash: 4.17.21
@ -3698,7 +3698,7 @@ snapshots:
react: 18.2.0 react: 18.2.0
react-dom: 18.2.0(react@18.2.0) react-dom: 18.2.0(react@18.2.0)
react-is: 18.2.0 react-is: 18.2.0
react-popper: 2.3.0(@popperjs/core@2.11.8)(react-dom@18.2.0)(react@18.2.0) react-popper: 2.3.0(@popperjs/core@2.11.8)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)
shallowequal: 1.1.0 shallowequal: 1.1.0
semver@6.3.1: {} semver@6.3.1: {}

View File

@ -749,7 +749,8 @@ const ChannelsTable = () => {
<Form.Select <Form.Select
field='group' field='group'
label='分组' label='分组'
optionList={groupOptions} optionList={[{ label: '选择分组', value: null }, ...groupOptions]}
initValue={null}
onChange={(v) => { onChange={(v) => {
setSearchGroup(v); setSearchGroup(v);
searchChannels(searchKeyword, v, searchModel); searchChannels(searchKeyword, v, searchModel);

View File

@ -415,8 +415,6 @@ const LogsTable = () => {
const [activePage, setActivePage] = useState(1); const [activePage, setActivePage] = useState(1);
const [logCount, setLogCount] = useState(ITEMS_PER_PAGE); const [logCount, setLogCount] = useState(ITEMS_PER_PAGE);
const [pageSize, setPageSize] = useState(ITEMS_PER_PAGE); const [pageSize, setPageSize] = useState(ITEMS_PER_PAGE);
const [searchKeyword, setSearchKeyword] = useState('');
const [searching, setSearching] = useState(false);
const [logType, setLogType] = useState(0); const [logType, setLogType] = useState(0);
const isAdminUser = isAdmin(); const isAdminUser = isAdmin();
let now = new Date(); let now = new Date();
@ -518,10 +516,7 @@ const LogsTable = () => {
logs[i].timestamp2string = timestamp2string(logs[i].created_at); logs[i].timestamp2string = timestamp2string(logs[i].created_at);
logs[i].key = '' + logs[i].id; logs[i].key = '' + logs[i].id;
} }
// data.key = '' + data.id
setLogs(logs); setLogs(logs);
setLogCount(logs.length + ITEMS_PER_PAGE);
// console.log(logCount);
}; };
const loadLogs = async (startIdx, pageSize, logType = 0) => { const loadLogs = async (startIdx, pageSize, logType = 0) => {
@ -539,37 +534,28 @@ const LogsTable = () => {
const res = await API.get(url); const res = await API.get(url);
const { success, message, data } = res.data; const { success, message, data } = res.data;
if (success) { if (success) {
if (startIdx === 0) { const newPageData = data.items;
setLogsFormat(data); setActivePage(data.page);
} else { setPageSize(data.page_size);
let newLogs = [...logs]; setLogCount(data.total);
newLogs.splice(startIdx * pageSize, data.length, ...data);
setLogsFormat(newLogs); setLogsFormat(newPageData);
}
} else { } else {
showError(message); showError(message);
} }
setLoading(false); setLoading(false);
}; };
const pageData = logs.slice(
(activePage - 1) * pageSize,
activePage * pageSize,
);
const handlePageChange = (page) => { const handlePageChange = (page) => {
setActivePage(page); setActivePage(page);
if (page === Math.ceil(logs.length / pageSize) + 1) { loadLogs(page, pageSize, logType).then((r) => {});
// In this case we have to load more data and then append them.
loadLogs(page - 1, pageSize, logType).then((r) => {});
}
}; };
const handlePageSizeChange = async (size) => { const handlePageSizeChange = async (size) => {
localStorage.setItem('page-size', size + ''); localStorage.setItem('page-size', size + '');
setPageSize(size); setPageSize(size);
setActivePage(1); setActivePage(1);
loadLogs(0, size) loadLogs(activePage, size)
.then() .then()
.catch((reason) => { .catch((reason) => {
showError(reason); showError(reason);
@ -577,27 +563,24 @@ const LogsTable = () => {
}; };
const refresh = async () => { const refresh = async () => {
// setLoading(true);
setActivePage(1); setActivePage(1);
handleEyeClick(); handleEyeClick();
await loadLogs(0, pageSize, logType); await loadLogs(activePage, pageSize, logType);
}; };
const copyText = async (text) => { const copyText = async (text) => {
if (await copy(text)) { if (await copy(text)) {
showSuccess('已复制:' + text); showSuccess('已复制:' + text);
} else { } else {
// setSearchKeyword(text);
Modal.error({ title: '无法复制到剪贴板,请手动复制', content: text }); Modal.error({ title: '无法复制到剪贴板,请手动复制', content: text });
} }
}; };
useEffect(() => { useEffect(() => {
// console.log('default effect')
const localPageSize = const localPageSize =
parseInt(localStorage.getItem('page-size')) || ITEMS_PER_PAGE; parseInt(localStorage.getItem('page-size')) || ITEMS_PER_PAGE;
setPageSize(localPageSize); setPageSize(localPageSize);
loadLogs(0, localPageSize) loadLogs(activePage, localPageSize)
.then() .then()
.catch((reason) => { .catch((reason) => {
showError(reason); showError(reason);
@ -605,25 +588,6 @@ const LogsTable = () => {
handleEyeClick(); handleEyeClick();
}, []); }, []);
const searchLogs = async () => {
if (searchKeyword === '') {
// if keyword is blank, load files instead.
await loadLogs(0, pageSize);
setActivePage(1);
return;
}
setSearching(true);
const res = await API.get(`/api/log/self/search?keyword=${searchKeyword}`);
const { success, message, data } = res.data;
if (success) {
setLogs(data);
setActivePage(1);
} else {
showError(message);
}
setSearching(false);
};
return ( return (
<> <>
<Layout> <Layout>
@ -722,7 +686,7 @@ const LogsTable = () => {
<Table <Table
style={{ marginTop: 5 }} style={{ marginTop: 5 }}
columns={columns} columns={columns}
dataSource={pageData} dataSource={logs}
pagination={{ pagination={{
currentPage: activePage, currentPage: activePage,
pageSize: pageSize, pageSize: pageSize,
@ -730,7 +694,7 @@ const LogsTable = () => {
pageSizeOpts: [10, 20, 50, 100], pageSizeOpts: [10, 20, 50, 100],
showSizeChanger: true, showSizeChanger: true,
onPageSizeChange: (size) => { onPageSizeChange: (size) => {
handlePageSizeChange(size).then(); handlePageSizeChange(size);
}, },
onPageChange: handlePageChange, onPageChange: handlePageChange,
}} }}