Compare commits

...

26 Commits

Author SHA1 Message Date
CaIon
a33f685f3c fix: log page type error (close #154) 2024-04-03 23:57:49 +08:00
CaIon
3d0f77ffb6 Merge remote-tracking branch 'origin/main' 2024-04-03 23:51:32 +08:00
CaIon
5ce8e6dab6 fix: update user quote (close #161) 2024-04-03 23:51:25 +08:00
Calcium-Ion
5a5b7d618d Merge pull request #171 from QuentinHsu/perf-setting-tab-navigation
perf(Setting): setting tab navigation
2024-04-03 23:32:19 +08:00
Calcium-Ion
ad8ce915ec Merge pull request #175 from ye4293/test
修改了用户注册使用临时邮箱验证的问题
2024-04-03 23:31:50 +08:00
Calcium-Ion
456fb875de Merge pull request #176 from QuentinHsu/perf-helpers-renderGroup
refactor(helpers): renderGroup function
2024-04-03 23:31:02 +08:00
QuentinHsu
3e90b6d516 refactor(helpers): renderGroup function 2024-04-02 13:16:02 +08:00
QuentinHsu
d6e373fbe4 fix(helpers): add key prop to Tag components 2024-04-02 10:58:44 +08:00
Ghostz
224746b45a Update misc.go 2024-04-02 01:13:12 +08:00
Calcium-Ion
ac827b1862 Merge pull request #174 from AI-ASS/main 2024-04-01 19:51:02 +08:00
GAI Group
658bf2ad57 Rename .prettierrc.mjs to .prettierrc.mjs 2024-04-01 19:49:56 +08:00
Calcium-Ion
c25f48b7c5 Merge pull request #172 from MapleEve/main
Support Claude TopK
2024-04-01 18:15:45 +08:00
QuentinHsu
290dcf7587 perf(Setting): add useEffect and useNavigate hooks to Setting component 2024-04-01 16:59:07 +08:00
Maple Gao
278fd39195 feat: add Claude TopK 2024-04-01 14:33:58 +08:00
QuentinHsu
aa23c51a53 perf(Setting): add tabActiveKey state to Setting component 2024-04-01 13:33:57 +08:00
Calcium-Ion
87919b032d Merge pull request #167 from weikecloud/main
增加MJ上游构图失败判断
2024-03-30 16:27:03 +08:00
Calcium-Ion
f7a4f18aff Update midjourney.go 2024-03-30 16:26:39 +08:00
余生一个白恩
706449dede 增加上游构图失败判断 2024-03-30 13:21:05 +08:00
CaIon
36d164be0e fix: SearchUsers (close #160) 2024-03-29 22:49:08 +08:00
CaIon
d80a7d3c97 Merge remote-tracking branch 'origin/main' 2024-03-29 22:28:10 +08:00
CaIon
44a8ade4ba fix: remove sensitive check on completion (close #157) 2024-03-29 22:20:14 +08:00
Xyfacai
2cca2a989e Merge pull request #165 from xyfacai/fork/mj-mode-path
fix: 支持 /mj-{mode} 路径
2024-03-29 17:45:23 +08:00
Xiangyuan Liu
3065bf92ae fix: 支持 /mj-{mode} 路径 2024-03-29 17:45:00 +08:00
Xiangyuan Liu
2e595bdafb fix: 支持 /mj-{mode} 路径 2024-03-29 16:58:19 +08:00
Xiangyuan Liu
49df4b6eed feat: 支持 /mj-{mode} 路径 2024-03-29 16:48:50 +08:00
CaIon
5c39f54040 feat: able to set smtp ssl 2024-03-28 12:18:11 +08:00
41 changed files with 268 additions and 261 deletions

View File

@@ -75,6 +75,7 @@ var LogConsumeEnabled = true
var SMTPServer = ""
var SMTPPort = 587
var SMTPSSLEnabled = false
var SMTPAccount = ""
var SMTPFrom = ""
var SMTPToken = ""

View File

@@ -24,7 +24,7 @@ func SendEmail(subject string, receiver string, content string) error {
addr := fmt.Sprintf("%s:%d", SMTPServer, SMTPPort)
to := strings.Split(receiver, ";")
var err error
if SMTPPort == 465 {
if SMTPPort == 465 || SMTPSSLEnabled {
tlsConfig := &tls.Config{
InsecureSkipVerify: true,
ServerName: SMTPServer,

View File

@@ -4,7 +4,8 @@ import "strings"
var CheckSensitiveEnabled = true
var CheckSensitiveOnPromptEnabled = true
var CheckSensitiveOnCompletionEnabled = true
//var CheckSensitiveOnCompletionEnabled = true
// StopOnSensitiveEnabled 如果检测到敏感词,是否立刻停止生成,否则替换敏感词
var StopOnSensitiveEnabled = true
@@ -37,6 +38,6 @@ func ShouldCheckPromptSensitive() bool {
return CheckSensitiveEnabled && CheckSensitiveOnPromptEnabled
}
func ShouldCheckCompletionSensitive() bool {
return CheckSensitiveEnabled && CheckSensitiveOnCompletionEnabled
}
//func ShouldCheckCompletionSensitive() bool {
// return CheckSensitiveEnabled && CheckSensitiveOnCompletionEnabled
//}

View File

@@ -87,7 +87,7 @@ func testChannel(channel *model.Channel, testModel string) (err error, openaiErr
err := relaycommon.RelayErrorHandler(resp)
return fmt.Errorf("status code %d: %s", resp.StatusCode, err.Error.Message), &err.Error
}
usage, respErr, _ := adaptor.DoResponse(c, resp, meta)
usage, respErr := adaptor.DoResponse(c, resp, meta)
if respErr != nil {
return fmt.Errorf("%s", respErr.Error.Message), &respErr.Error
}

View File

@@ -147,7 +147,7 @@ func UpdateMidjourneyTaskBulk() {
task.Buttons = string(buttonStr)
}
if task.Progress != "100%" && responseItem.FailReason != "" {
if (task.Progress != "100%" && responseItem.FailReason != "") || (task.Progress == "100%" && task.Status == "FAILURE") {
common.LogInfo(ctx, task.MjId+" 构建失败,"+task.FailReason)
task.Progress = "100%"
err = model.CacheUpdateUserQuota(task.UserId)

View File

@@ -121,17 +121,27 @@ func SendEmailVerification(c *gin.Context) {
return
}
if common.EmailDomainRestrictionEnabled {
parts := strings.Split(email, "@")
localPart := parts[0]
domainPart := parts[1]
containsSpecialSymbols := strings.Contains(localPart, "+") || strings.Count(localPart, ".") > 1
allowed := false
for _, domain := range common.EmailDomainWhitelist {
if strings.HasSuffix(email, "@"+domain) {
if domainPart == domain {
allowed = true
break
}
}
if !allowed {
if allowed && !containsSpecialSymbols {
c.JSON(http.StatusOK, gin.H{
"success": true,
"message": "Your email address is allowed.",
})
} else {
c.JSON(http.StatusOK, gin.H{
"success": false,
"message": "管理员启用了邮箱域名白名单,您的邮箱地址的域名不在白名单中",
"message": "The administrator has enabled the email domain name whitelist, and your email address is not allowed due to special symbols or it's not in the whitelist.",
})
return
}

View File

@@ -11,6 +11,12 @@ type TextResponseWithError struct {
Error OpenAIError `json:"error"`
}
type SimpleResponse struct {
Usage `json:"usage"`
Error OpenAIError `json:"error"`
Choices []OpenAITextResponseChoice `json:"choices"`
}
type TextResponse struct {
Id string `json:"id"`
Object string `json:"object"`

View File

@@ -44,7 +44,7 @@ func Distribute() func(c *gin.Context) {
// Select a channel for the user
var modelRequest ModelRequest
var err error
if strings.HasPrefix(c.Request.URL.Path, "/mj") {
if strings.Contains(c.Request.URL.Path, "/mj/") {
relayMode := relayconstant.Path2RelayModeMidjourney(c.Request.URL.Path)
if relayMode == relayconstant.RelayModeMidjourneyTaskFetch ||
relayMode == relayconstant.RelayModeMidjourneyTaskFetchByCondition ||

View File

@@ -50,6 +50,7 @@ func InitOptionMap() {
common.OptionMap["SMTPPort"] = strconv.Itoa(common.SMTPPort)
common.OptionMap["SMTPAccount"] = ""
common.OptionMap["SMTPToken"] = ""
common.OptionMap["SMTPSSLEnabled"] = strconv.FormatBool(common.SMTPSSLEnabled)
common.OptionMap["Notice"] = ""
common.OptionMap["About"] = ""
common.OptionMap["HomePageContent"] = ""
@@ -92,7 +93,7 @@ func InitOptionMap() {
common.OptionMap["MjNotifyEnabled"] = strconv.FormatBool(constant.MjNotifyEnabled)
common.OptionMap["CheckSensitiveEnabled"] = strconv.FormatBool(constant.CheckSensitiveEnabled)
common.OptionMap["CheckSensitiveOnPromptEnabled"] = strconv.FormatBool(constant.CheckSensitiveOnPromptEnabled)
common.OptionMap["CheckSensitiveOnCompletionEnabled"] = strconv.FormatBool(constant.CheckSensitiveOnCompletionEnabled)
//common.OptionMap["CheckSensitiveOnCompletionEnabled"] = strconv.FormatBool(constant.CheckSensitiveOnCompletionEnabled)
common.OptionMap["StopOnSensitiveEnabled"] = strconv.FormatBool(constant.StopOnSensitiveEnabled)
common.OptionMap["SensitiveWords"] = constant.SensitiveWordsToString()
common.OptionMap["StreamCacheQueueLength"] = strconv.Itoa(constant.StreamCacheQueueLength)
@@ -195,10 +196,12 @@ func updateOptionMap(key string, value string) (err error) {
constant.CheckSensitiveEnabled = boolValue
case "CheckSensitiveOnPromptEnabled":
constant.CheckSensitiveOnPromptEnabled = boolValue
case "CheckSensitiveOnCompletionEnabled":
constant.CheckSensitiveOnCompletionEnabled = boolValue
//case "CheckSensitiveOnCompletionEnabled":
// constant.CheckSensitiveOnCompletionEnabled = boolValue
case "StopOnSensitiveEnabled":
constant.StopOnSensitiveEnabled = boolValue
case "SMTPSSLEnabled":
common.SMTPSSLEnabled = boolValue
}
}
switch key {

View File

@@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"one-api/common"
"strconv"
"strings"
"time"
@@ -72,8 +73,26 @@ func GetAllUsers(startIdx int, num int) (users []*User, err error) {
return users, err
}
func SearchUsers(keyword string) (users []*User, err error) {
err = DB.Omit("password").Where("id = ? or username LIKE ? or email LIKE ? or display_name LIKE ?", keyword, keyword+"%", keyword+"%", keyword+"%").Find(&users).Error
func SearchUsers(keyword string) ([]*User, error) {
var users []*User
var err error
// 尝试将关键字转换为整数ID
keywordInt, err := strconv.Atoi(keyword)
if err == nil {
// 如果转换成功按照ID搜索用户
err = DB.Unscoped().Omit("password").Where("id = ?", keywordInt).Find(&users).Error
if err != nil || len(users) > 0 {
// 如果依据ID找到用户或者发生错误返回结果或错误
return users, err
}
}
// 如果ID转换失败或者没有找到用户依据其他字段进行模糊搜索
err = DB.Unscoped().Omit("password").
Where("username LIKE ? OR email LIKE ? OR display_name LIKE ?", keyword+"%", keyword+"%", keyword+"%").
Find(&users).Error
return users, err
}
@@ -206,10 +225,11 @@ func (user *User) Update(updatePassword bool) error {
}
newUser := *user
DB.First(&user, user.Id)
err = DB.Model(user).Updates(newUser).Error
err = DB.Model(user).Select("*").Updates(newUser).Error
if err == nil {
if common.RedisEnabled {
_ = common.RedisSet(fmt.Sprintf("user_group:%d", user.Id), user.Group, time.Duration(UserId2GroupCacheSeconds)*time.Second)
_ = common.RedisSet(fmt.Sprintf("user_quota:%d", user.Id), strconv.Itoa(user.Quota), time.Duration(UserId2QuotaCacheSeconds)*time.Second)
}
}
return err

View File

@@ -15,7 +15,7 @@ type Adaptor interface {
SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error
ConvertRequest(c *gin.Context, relayMode int, request *dto.GeneralOpenAIRequest) (any, error)
DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error)
DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse)
DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode)
GetModelList() []string
GetChannelName() string
}

View File

@@ -57,7 +57,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
err, usage = aliStreamHandler(c, resp)
} else {

View File

@@ -69,7 +69,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
err, usage = baiduStreamHandler(c, resp)
} else {

View File

@@ -63,7 +63,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
err, usage = claudeStreamHandler(a.RequestMode, info.UpstreamModelName, info.PromptTokens, c, resp)
} else {

View File

@@ -8,7 +8,6 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
"one-api/service"
"strings"
@@ -35,6 +34,7 @@ func requestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeR
StopSequences: nil,
Temperature: textRequest.Temperature,
TopP: textRequest.TopP,
TopK: textRequest.TopK,
Stream: textRequest.Stream,
}
if claudeRequest.MaxTokensToSample == 0 {
@@ -64,6 +64,7 @@ func requestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
StopSequences: nil,
Temperature: textRequest.Temperature,
TopP: textRequest.TopP,
TopK: textRequest.TopK,
Stream: textRequest.Stream,
}
if claudeRequest.MaxTokens == 0 {
@@ -317,7 +318,7 @@ func claudeHandler(requestMode int, c *gin.Context, resp *http.Response, promptT
}, nil
}
fullTextResponse := responseClaude2OpenAI(requestMode, &claudeResponse)
completionTokens, err, _ := service.CountTokenText(claudeResponse.Completion, model, constant.ShouldCheckCompletionSensitive())
completionTokens, err, _ := service.CountTokenText(claudeResponse.Completion, model, false)
if err != nil {
return service.OpenAIErrorWrapper(err, "count_token_text_failed", http.StatusInternalServerError), nil
}

View File

@@ -47,7 +47,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
var responseText string
err, responseText = geminiChatStreamHandler(c, resp)

View File

@@ -7,7 +7,6 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
relaycommon "one-api/relay/common"
"one-api/service"
@@ -257,7 +256,7 @@ func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, mo
}, nil
}
fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse)
completionTokens, _, _ := service.CountTokenText(geminiResponse.GetResponseText(), model, constant.ShouldCheckCompletionSensitive())
completionTokens, _, _ := service.CountTokenText(geminiResponse.GetResponseText(), model, false)
usage := dto.Usage{
PromptTokens: promptTokens,
CompletionTokens: completionTokens,

View File

@@ -49,16 +49,16 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
var responseText string
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
} else {
if info.RelayMode == relayconstant.RelayModeEmbeddings {
err, usage, sensitiveResp = ollamaEmbeddingHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
err, usage = ollamaEmbeddingHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
} else {
err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
}
}
return

View File

@@ -45,19 +45,19 @@ func requestOpenAI2Embeddings(request dto.GeneralOpenAIRequest) *OllamaEmbedding
}
}
func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens int, model string, relayMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage, *dto.SensitiveResponse) {
func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens int, model string, relayMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
var ollamaEmbeddingResponse OllamaEmbeddingResponse
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
err = json.Unmarshal(responseBody, &ollamaEmbeddingResponse)
if err != nil {
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
data := make([]dto.OpenAIEmbeddingResponseItem, 0, 1)
data = append(data, dto.OpenAIEmbeddingResponseItem{
@@ -77,7 +77,7 @@ func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens in
}
doResponseBody, err := json.Marshal(embeddingResponse)
if err != nil {
return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
resp.Body = io.NopCloser(bytes.NewBuffer(doResponseBody))
// We shouldn't set the header before we parse the response body, because the parse part may fail.
@@ -98,11 +98,11 @@ func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens in
c.Writer.WriteHeader(resp.StatusCode)
_, err = io.Copy(c.Writer, resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
return nil, usage, nil
return nil, usage
}

View File

@@ -69,13 +69,13 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
var responseText string
err, responseText = OpenaiStreamHandler(c, resp, info.RelayMode)
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
} else {
err, usage, sensitiveResp = OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
err, usage = OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
}
return
}

View File

@@ -4,14 +4,10 @@ import (
"bufio"
"bytes"
"encoding/json"
"errors"
"fmt"
"github.com/gin-gonic/gin"
"io"
"log"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
relayconstant "one-api/relay/constant"
"one-api/service"
@@ -21,7 +17,7 @@ import (
)
func OpenaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*dto.OpenAIErrorWithStatusCode, string) {
checkSensitive := constant.ShouldCheckCompletionSensitive()
//checkSensitive := constant.ShouldCheckCompletionSensitive()
var responseTextBuilder strings.Builder
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
@@ -53,20 +49,11 @@ func OpenaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*d
if data[:6] != "data: " && data[:6] != "[DONE]" {
continue
}
sensitive := false
if checkSensitive {
// check sensitive
sensitive, _, data = service.SensitiveWordReplace(data, false)
}
dataChan <- data
data = data[6:]
if !strings.HasPrefix(data, "[DONE]") {
streamItems = append(streamItems, data)
}
if sensitive && constant.StopOnSensitiveEnabled {
dataChan <- "data: [DONE]"
break
}
}
streamResp := "[" + strings.Join(streamItems, ",") + "]"
switch relayMode {
@@ -142,118 +129,56 @@ func OpenaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*d
return nil, responseTextBuilder.String()
}
func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model string, relayMode int) (*dto.OpenAIErrorWithStatusCode, *dto.Usage, *dto.SensitiveResponse) {
var responseWithError dto.TextResponseWithError
func OpenaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
var simpleResponse dto.SimpleResponse
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
err = json.Unmarshal(responseBody, &responseWithError)
err = json.Unmarshal(responseBody, &simpleResponse)
if err != nil {
log.Printf("unmarshal_response_body_failed: body: %s, err: %v", string(responseBody), err)
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil, nil
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if responseWithError.Error.Type != "" {
if simpleResponse.Error.Type != "" {
return &dto.OpenAIErrorWithStatusCode{
Error: responseWithError.Error,
Error: simpleResponse.Error,
StatusCode: resp.StatusCode,
}, nil, nil
}, nil
}
// Reset response body
resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
// We shouldn't set the header before we parse the response body, because the parse part may fail.
// And then we will have to send an error response, but in this case, the header has already been set.
// So the httpClient will be confused by the response.
// For example, Postman will report error, and we cannot check the response at all.
for k, v := range resp.Header {
c.Writer.Header().Set(k, v[0])
}
c.Writer.WriteHeader(resp.StatusCode)
_, err = io.Copy(c.Writer, resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
checkSensitive := constant.ShouldCheckCompletionSensitive()
sensitiveWords := make([]string, 0)
triggerSensitive := false
usage := &responseWithError.Usage
//textResponse := &dto.TextResponse{
// Choices: responseWithError.Choices,
// Usage: responseWithError.Usage,
//}
var doResponseBody []byte
switch relayMode {
case relayconstant.RelayModeEmbeddings:
embeddingResponse := &dto.OpenAIEmbeddingResponse{
Object: responseWithError.Object,
Data: responseWithError.Data,
Model: responseWithError.Model,
Usage: *usage,
if simpleResponse.Usage.TotalTokens == 0 {
completionTokens := 0
for _, choice := range simpleResponse.Choices {
ctkm, _, _ := service.CountTokenText(string(choice.Message.Content), model, false)
completionTokens += ctkm
}
doResponseBody, err = json.Marshal(embeddingResponse)
default:
if responseWithError.Usage.TotalTokens == 0 || checkSensitive {
completionTokens := 0
for i, choice := range responseWithError.Choices {
stringContent := string(choice.Message.Content)
ctkm, _, _ := service.CountTokenText(stringContent, model, false)
completionTokens += ctkm
if checkSensitive {
sensitive, words, stringContent := service.SensitiveWordReplace(stringContent, false)
if sensitive {
triggerSensitive = true
msg := choice.Message
msg.Content = common.StringToByteSlice(stringContent)
responseWithError.Choices[i].Message = msg
sensitiveWords = append(sensitiveWords, words...)
}
}
}
responseWithError.Usage = dto.Usage{
PromptTokens: promptTokens,
CompletionTokens: completionTokens,
TotalTokens: promptTokens + completionTokens,
}
}
textResponse := &dto.TextResponse{
Id: responseWithError.Id,
Created: responseWithError.Created,
Object: responseWithError.Object,
Choices: responseWithError.Choices,
Model: responseWithError.Model,
Usage: *usage,
}
doResponseBody, err = json.Marshal(textResponse)
}
if checkSensitive && triggerSensitive && constant.StopOnSensitiveEnabled {
sensitiveWords = common.RemoveDuplicate(sensitiveWords)
return service.OpenAIErrorWrapper(errors.New(fmt.Sprintf("sensitive words detected on response: %s",
strings.Join(sensitiveWords, ", "))), "sensitive_words_detected", http.StatusBadRequest),
usage, &dto.SensitiveResponse{
SensitiveWords: sensitiveWords,
}
} else {
// Reset response body
resp.Body = io.NopCloser(bytes.NewBuffer(doResponseBody))
// We shouldn't set the header before we parse the response body, because the parse part may fail.
// And then we will have to send an error response, but in this case, the header has already been set.
// So the httpClient will be confused by the response.
// For example, Postman will report error, and we cannot check the response at all.
// Copy headers
for k, v := range resp.Header {
// 删除任何现有的相同头部,以防止重复添加头部
c.Writer.Header().Del(k)
for _, vv := range v {
c.Writer.Header().Add(k, vv)
}
}
// reset content length
c.Writer.Header().Del("Content-Length")
c.Writer.Header().Set("Content-Length", fmt.Sprintf("%d", len(doResponseBody)))
c.Writer.WriteHeader(resp.StatusCode)
_, err = io.Copy(c.Writer, resp.Body)
if err != nil {
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil, nil
}
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil, nil
simpleResponse.Usage = dto.Usage{
PromptTokens: promptTokens,
CompletionTokens: completionTokens,
TotalTokens: promptTokens + completionTokens,
}
}
return nil, usage, nil
return nil, &simpleResponse.Usage
}

View File

@@ -39,7 +39,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
var responseText string
err, responseText = palmStreamHandler(c, resp)

View File

@@ -7,7 +7,6 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/constant"
"one-api/dto"
relaycommon "one-api/relay/common"
"one-api/service"
@@ -157,7 +156,7 @@ func palmHandler(c *gin.Context, resp *http.Response, promptTokens int, model st
}, nil
}
fullTextResponse := responsePaLM2OpenAI(&palmResponse)
completionTokens, _, _ := service.CountTokenText(palmResponse.Candidates[0].Content, model, constant.ShouldCheckCompletionSensitive())
completionTokens, _, _ := service.CountTokenText(palmResponse.Candidates[0].Content, model, false)
usage := dto.Usage{
PromptTokens: promptTokens,
CompletionTokens: completionTokens,

View File

@@ -43,13 +43,13 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
var responseText string
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
} else {
err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
}
return
}

View File

@@ -53,7 +53,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
var responseText string
err, responseText = tencentStreamHandler(c, resp)

View File

@@ -43,13 +43,13 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return dummyResp, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
splits := strings.Split(info.ApiKey, "|")
if len(splits) != 3 {
return nil, service.OpenAIErrorWrapper(errors.New("invalid auth"), "invalid_auth", http.StatusBadRequest), nil
return nil, service.OpenAIErrorWrapper(errors.New("invalid auth"), "invalid_auth", http.StatusBadRequest)
}
if a.request == nil {
return nil, service.OpenAIErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest), nil
return nil, service.OpenAIErrorWrapper(errors.New("request is nil"), "request_is_nil", http.StatusBadRequest)
}
if info.IsStream {
err, usage = xunfeiStreamHandler(c, *a.request, splits[0], splits[1], splits[2])

View File

@@ -46,7 +46,7 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
err, usage = zhipuStreamHandler(c, resp)
} else {

View File

@@ -44,13 +44,13 @@ func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, request
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode, sensitiveResp *dto.SensitiveResponse) {
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
var responseText string
err, responseText = openai.OpenaiStreamHandler(c, resp, info.RelayMode)
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
} else {
err, usage, sensitiveResp = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName, info.RelayMode)
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
}
return
}

View File

@@ -56,29 +56,29 @@ func Path2RelayMode(path string) int {
func Path2RelayModeMidjourney(path string) int {
relayMode := RelayModeUnknown
if strings.HasPrefix(path, "/mj/submit/action") {
if strings.HasSuffix(path, "/mj/submit/action") {
// midjourney plus
relayMode = RelayModeMidjourneyAction
} else if strings.HasPrefix(path, "/mj/submit/modal") {
} else if strings.HasSuffix(path, "/mj/submit/modal") {
// midjourney plus
relayMode = RelayModeMidjourneyModal
} else if strings.HasPrefix(path, "/mj/submit/shorten") {
} else if strings.HasSuffix(path, "/mj/submit/shorten") {
// midjourney plus
relayMode = RelayModeMidjourneyShorten
} else if strings.HasPrefix(path, "/mj/insight-face/swap") {
} else if strings.HasSuffix(path, "/mj/insight-face/swap") {
// midjourney plus
relayMode = RelayModeSwapFace
} else if strings.HasPrefix(path, "/mj/submit/imagine") {
} else if strings.HasSuffix(path, "/mj/submit/imagine") {
relayMode = RelayModeMidjourneyImagine
} else if strings.HasPrefix(path, "/mj/submit/blend") {
} else if strings.HasSuffix(path, "/mj/submit/blend") {
relayMode = RelayModeMidjourneyBlend
} else if strings.HasPrefix(path, "/mj/submit/describe") {
} else if strings.HasSuffix(path, "/mj/submit/describe") {
relayMode = RelayModeMidjourneyDescribe
} else if strings.HasPrefix(path, "/mj/notify") {
} else if strings.HasSuffix(path, "/mj/notify") {
relayMode = RelayModeMidjourneyNotify
} else if strings.HasPrefix(path, "/mj/submit/change") {
} else if strings.HasSuffix(path, "/mj/submit/change") {
relayMode = RelayModeMidjourneyChange
} else if strings.HasPrefix(path, "/mj/submit/simple-change") {
} else if strings.HasSuffix(path, "/mj/submit/simple-change") {
relayMode = RelayModeMidjourneyChange
} else if strings.HasSuffix(path, "/fetch") {
relayMode = RelayModeMidjourneyTaskFetch

View File

@@ -173,7 +173,7 @@ func AudioHelper(c *gin.Context, relayMode int) *dto.OpenAIErrorWithStatusCode {
if strings.HasPrefix(audioRequest.Model, "tts-1") {
quota = promptTokens
} else {
quota, err, _ = service.CountAudioToken(audioResponse.Text, audioRequest.Model, constant.ShouldCheckCompletionSensitive())
quota, err, _ = service.CountAudioToken(audioResponse.Text, audioRequest.Model, false)
}
quota = int(float64(quota) * ratio)
if ratio != 0 && quota <= 0 {

View File

@@ -180,7 +180,7 @@ func RelaySwapFace(c *gin.Context) *dto.MidjourneyResponse {
Description: "quota_not_enough",
}
}
requestURL := c.Request.URL.String()
requestURL := getMjRequestPath(c.Request.URL.String())
baseURL := c.GetString("base_url")
fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)
mjResp, _, err := service.DoMidjourneyHttpRequest(c, time.Second*60, fullRequestURL)
@@ -260,7 +260,7 @@ func RelayMidjourneyTaskImageSeed(c *gin.Context) *dto.MidjourneyResponse {
c.Set("channel_id", originTask.ChannelId)
c.Request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", channel.Key))
requestURL := c.Request.URL.String()
requestURL := getMjRequestPath(c.Request.URL.String())
fullRequestURL := fmt.Sprintf("%s%s", channel.GetBaseURL(), requestURL)
midjResponseWithStatus, _, err := service.DoMidjourneyHttpRequest(c, time.Second*30, fullRequestURL)
if err != nil {
@@ -440,7 +440,7 @@ func RelayMidjourneySubmit(c *gin.Context, relayMode int) *dto.MidjourneyRespons
}
//baseURL := common.ChannelBaseURLs[channelType]
requestURL := c.Request.URL.String()
requestURL := getMjRequestPath(c.Request.URL.String())
baseURL := c.GetString("base_url")
@@ -605,3 +605,15 @@ type taskChangeParams struct {
Action string
Index int
}
func getMjRequestPath(path string) string {
requestURL := path
if strings.Contains(requestURL, "/mj-") {
urls := strings.Split(requestURL, "/mj/")
if len(urls) < 2 {
return requestURL
}
requestURL = "/mj/" + urls[1]
}
return requestURL
}

View File

@@ -165,21 +165,12 @@ func TextHelper(c *gin.Context) *dto.OpenAIErrorWithStatusCode {
return service.OpenAIErrorWrapper(fmt.Errorf("bad response status code: %d", resp.StatusCode), "bad_response_status_code", resp.StatusCode)
}
usage, openaiErr, sensitiveResp := adaptor.DoResponse(c, resp, relayInfo)
usage, openaiErr := adaptor.DoResponse(c, resp, relayInfo)
if openaiErr != nil {
if sensitiveResp == nil { // 如果没有敏感词检查结果
returnPreConsumedQuota(c, relayInfo.TokenId, userQuota, preConsumedQuota)
return openaiErr
} else {
// 如果有敏感词检查结果,不返回预消耗配额,继续消耗配额
postConsumeQuota(c, relayInfo, *textRequest, usage, ratio, preConsumedQuota, userQuota, modelRatio, groupRatio, modelPrice, sensitiveResp)
if constant.StopOnSensitiveEnabled { // 是否直接返回错误
return openaiErr
}
return nil
}
returnPreConsumedQuota(c, relayInfo.TokenId, userQuota, preConsumedQuota)
return openaiErr
}
postConsumeQuota(c, relayInfo, *textRequest, usage, ratio, preConsumedQuota, userQuota, modelRatio, groupRatio, modelPrice, nil)
postConsumeQuota(c, relayInfo, *textRequest, usage, ratio, preConsumedQuota, userQuota, modelRatio, groupRatio, modelPrice)
return nil
}
@@ -258,7 +249,7 @@ func returnPreConsumedQuota(c *gin.Context, tokenId int, userQuota int, preConsu
func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, textRequest dto.GeneralOpenAIRequest,
usage *dto.Usage, ratio float64, preConsumedQuota int, userQuota int, modelRatio float64, groupRatio float64,
modelPrice float64, sensitiveResp *dto.SensitiveResponse) {
modelPrice float64) {
useTimeSeconds := time.Now().Unix() - relayInfo.StartTime.Unix()
promptTokens := usage.PromptTokens
@@ -293,9 +284,9 @@ func postConsumeQuota(ctx *gin.Context, relayInfo *relaycommon.RelayInfo, textRe
logContent += fmt.Sprintf("(可能是上游超时)")
common.LogError(ctx, fmt.Sprintf("total tokens is 0, cannot consume quota, userId %d, channelId %d, tokenId %d, model %s pre-consumed quota %d", relayInfo.UserId, relayInfo.ChannelId, relayInfo.TokenId, textRequest.Model, preConsumedQuota))
} else {
if sensitiveResp != nil {
logContent += fmt.Sprintf(",敏感词:%s", strings.Join(sensitiveResp.SensitiveWords, ", "))
}
//if sensitiveResp != nil {
// logContent += fmt.Sprintf(",敏感词:%s", strings.Join(sensitiveResp.SensitiveWords, ", "))
//}
quotaDelta := quota - preConsumedQuota
err := model.PostConsumeTokenQuota(relayInfo.TokenId, userQuota, quotaDelta, preConsumedQuota, true)
if err != nil {

View File

@@ -43,7 +43,16 @@ func SetRelayRouter(router *gin.Engine) {
relayV1Router.DELETE("/models/:model", controller.RelayNotImplemented)
relayV1Router.POST("/moderations", controller.Relay)
}
relayMjRouter := router.Group("/mj")
registerMjRouterGroup(relayMjRouter)
relayMjModeRouter := router.Group("/:mode/mj")
registerMjRouterGroup(relayMjModeRouter)
//relayMjRouter.Use()
}
func registerMjRouterGroup(relayMjRouter *gin.RouterGroup) {
relayMjRouter.GET("/image/:id", relay.RelayMidjourneyImage)
relayMjRouter.Use(middleware.TokenAuth(), middleware.Distribute())
{
@@ -61,5 +70,4 @@ func SetRelayRouter(router *gin.Engine) {
relayMjRouter.POST("/task/list-by-condition", controller.RelayMidjourney)
relayMjRouter.POST("/insight-face/swap", controller.RelayMidjourney)
}
//relayMjRouter.Use()
}

1
web/.prettierrc.mjs Normal file
View File

@@ -0,0 +1 @@
module.exports = require("@so1ve/prettier-config");

View File

@@ -1 +0,0 @@
module.exports = require("@so1ve/prettier-config");

View File

@@ -471,10 +471,10 @@ const LogsTable = () => {
});
};
const refresh = async (localLogType) => {
const refresh = async () => {
// setLoading(true);
setActivePage(1);
await loadLogs(0, pageSize, localLogType);
await loadLogs(0, pageSize, logType);
};
const copyText = async (text) => {
@@ -635,7 +635,7 @@ const LogsTable = () => {
style={{ width: 120 }}
onChange={(value) => {
setLogType(parseInt(value));
refresh(parseInt(value)).then();
loadLogs(0, pageSize, parseInt(value));
}}
>
<Select.Option value='0'>全部</Select.Option>

View File

@@ -330,21 +330,21 @@ const OperationSetting = () => {
name='CheckSensitiveOnPromptEnabled'
onChange={handleInputChange}
/>
<Form.Checkbox
checked={inputs.CheckSensitiveOnCompletionEnabled === 'true'}
label='启用生成内容检查'
name='CheckSensitiveOnCompletionEnabled'
onChange={handleInputChange}
/>
</Form.Group>
<Form.Group inline>
<Form.Checkbox
checked={inputs.StopOnSensitiveEnabled === 'true'}
label='在检测到屏蔽词时,立刻停止生成,否则替换屏蔽词'
name='StopOnSensitiveEnabled'
onChange={handleInputChange}
/>
{/*<Form.Checkbox*/}
{/* checked={inputs.CheckSensitiveOnCompletionEnabled === 'true'}*/}
{/* label='启用生成内容检查'*/}
{/* name='CheckSensitiveOnCompletionEnabled'*/}
{/* onChange={handleInputChange}*/}
{/*/>*/}
</Form.Group>
{/*<Form.Group inline>*/}
{/* <Form.Checkbox*/}
{/* checked={inputs.StopOnSensitiveEnabled === 'true'}*/}
{/* label='在检测到屏蔽词时,立刻停止生成,否则替换屏蔽词'*/}
{/* name='StopOnSensitiveEnabled'*/}
{/* onChange={handleInputChange}*/}
{/* />*/}
{/*</Form.Group>*/}
{/*<Form.Group>*/}
{/* <Form.Input*/}
{/* label="流模式下缓存队列,默认不缓存,设置越大检测越准确,但是回复会有卡顿感"*/}

View File

@@ -42,6 +42,7 @@ const SystemSetting = () => {
TurnstileSecretKey: '',
RegisterEnabled: '',
EmailDomainRestrictionEnabled: '',
SMTPSSLEnabled: '',
EmailDomainWhitelist: [],
// telegram login
TelegramOAuthEnabled: '',
@@ -98,6 +99,7 @@ const SystemSetting = () => {
case 'TelegramOAuthEnabled':
case 'TurnstileCheckEnabled':
case 'EmailDomainRestrictionEnabled':
case 'SMTPSSLEnabled':
case 'RegisterEnabled':
value = inputs[key] === 'true' ? 'false' : 'true';
break;
@@ -134,7 +136,7 @@ const SystemSetting = () => {
}
if (
name === 'Notice' ||
name.startsWith('SMTP') ||
(name.startsWith('SMTP') && name !== 'SMTPSSLEnabled') ||
name === 'ServerAddress' ||
name === 'EpayId' ||
name === 'EpayKey' ||
@@ -570,6 +572,14 @@ const SystemSetting = () => {
placeholder='敏感信息不会发送到前端显示'
/>
</Form.Group>
<Form.Group widths={3}>
<Form.Checkbox
label='启用SMTP SSL465端口强制开启'
name='SMTPSSLEnabled'
onChange={handleInputChange}
checked={inputs.SMTPSSLEnabled === 'true'}
/>
</Form.Group>
<Form.Button onClick={submitSMTP}>保存 SMTP 设置</Form.Button>
<Divider />
<Header as='h3'>

View File

@@ -8,39 +8,37 @@ export function renderText(text, limit) {
return text;
}
/**
* Render group tags based on the input group string
* @param {string} group - The input group string
* @returns {JSX.Element} - The rendered group tags
*/
export function renderGroup(group) {
if (group === '') {
return <Tag size='large'>default</Tag>;
return <Tag size='large' key='default'>default</Tag>;
}
let groups = group.split(',');
groups.sort();
const tagColors = {
'vip': 'yellow',
'pro': 'yellow',
'svip': 'red',
'premium': 'red'
};
const groups = group.split(',').sort();
return (
<>
{groups.map((group) => {
if (group === 'vip' || group === 'pro') {
return (
<Tag size='large' color='yellow'>
{group}
</Tag>
);
} else if (group === 'svip' || group === 'premium') {
return (
<Tag size='large' color='red'>
{group}
</Tag>
);
}
if (group === 'default') {
return <Tag size='large'>{group}</Tag>;
} else {
return (
<Tag size='large' color={stringToColor(group)}>
{group}
</Tag>
);
}
})}
</>
<span key={group}>
{groups.map((group) => (
<Tag
size='large'
color={tagColors[group] || stringToColor(group)}
key={group}
>
{group}
</Tag>
))}
</span>
);
}

View File

@@ -1,17 +1,21 @@
import React from 'react';
import React, { useEffect, useState } from 'react';
import { Layout, TabPane, Tabs } from '@douyinfe/semi-ui';
import { useNavigate, useLocation } from 'react-router-dom';
import SystemSetting from '../../components/SystemSetting';
import { isRoot } from '../../helpers';
import OtherSetting from '../../components/OtherSetting';
import PersonalSetting from '../../components/PersonalSetting';
import OperationSetting from '../../components/OperationSetting';
import { Layout, TabPane, Tabs } from '@douyinfe/semi-ui';
const Setting = () => {
const navigate = useNavigate();
const location = useLocation();
const [tabActiveKey, setTabActiveKey] = useState('1');
let panes = [
{
tab: '个人设置',
content: <PersonalSetting />,
itemKey: '1',
itemKey: 'personal',
},
];
@@ -19,28 +23,44 @@ const Setting = () => {
panes.push({
tab: '运营设置',
content: <OperationSetting />,
itemKey: '2',
itemKey: 'operation',
});
panes.push({
tab: '系统设置',
content: <SystemSetting />,
itemKey: '3',
itemKey: 'system',
});
panes.push({
tab: '其他设置',
content: <OtherSetting />,
itemKey: '4',
itemKey: 'other',
});
}
const onChangeTab = (key) => {
setTabActiveKey(key);
navigate(`?tab=${key}`);
};
useEffect(() => {
const searchParams = new URLSearchParams(window.location.search);
const tab = searchParams.get('tab');
if (tab) {
setTabActiveKey(tab);
} else {
onChangeTab('personal');
}
}, [location.search]);
return (
<div>
<Layout>
<Layout.Content>
<Tabs type='line' defaultActiveKey='1'>
<Tabs
type='line'
activeKey={tabActiveKey}
onChange={(key) => onChangeTab(key)}
>
{panes.map((pane) => (
<TabPane itemKey={pane.itemKey} tab={pane.tab} key={pane.itemKey}>
{pane.content}
{tabActiveKey === pane.itemKey && pane.content}
</TabPane>
))}
</Tabs>

View File

@@ -223,13 +223,6 @@ const EditUser = (props) => {
placeholder='此项只读,需要用户通过个人设置页面的相关绑定按钮进行绑定,不可直接修改'
readonly
/>
<Input
name='telegram_id'
value={telegram_id}
autoComplete='new-password'
placeholder='此项只读,需要用户通过个人设置页面的相关绑定按钮进行绑定,不可直接修改'
readonly
/>
<div style={{ marginTop: 20 }}>
<Typography.Text>已绑定的邮箱账户</Typography.Text>
</div>
@@ -240,6 +233,16 @@ const EditUser = (props) => {
placeholder='此项只读,需要用户通过个人设置页面的相关绑定按钮进行绑定,不可直接修改'
readonly
/>
<div style={{ marginTop: 20 }}>
<Typography.Text>已绑定的Telegram账户</Typography.Text>
</div>
<Input
name='telegram_id'
value={telegram_id}
autoComplete='new-password'
placeholder='此项只读,需要用户通过个人设置页面的相关绑定按钮进行绑定,不可直接修改'
readonly
/>
</Spin>
</SideSheet>
</>