refactor: remove consumeQuota

This commit is contained in:
CaIon 2024-03-13 22:30:10 +08:00
parent d3399d68f6
commit 9b2e5c2978

View File

@ -24,16 +24,13 @@ func RelayImageHelper(c *gin.Context, relayMode int) *dto.OpenAIErrorWithStatusC
channelType := c.GetInt("channel") channelType := c.GetInt("channel")
channelId := c.GetInt("channel_id") channelId := c.GetInt("channel_id")
userId := c.GetInt("id") userId := c.GetInt("id")
consumeQuota := c.GetBool("consume_quota")
group := c.GetString("group") group := c.GetString("group")
startTime := time.Now() startTime := time.Now()
var imageRequest dto.ImageRequest var imageRequest dto.ImageRequest
if consumeQuota { err := common.UnmarshalBodyReusable(c, &imageRequest)
err := common.UnmarshalBodyReusable(c, &imageRequest) if err != nil {
if err != nil { return service.OpenAIErrorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
return service.OpenAIErrorWrapper(err, "bind_request_body_failed", http.StatusBadRequest)
}
} }
if imageRequest.Model == "" { if imageRequest.Model == "" {
@ -136,7 +133,7 @@ func RelayImageHelper(c *gin.Context, relayMode int) *dto.OpenAIErrorWithStatusC
quota := int(ratio*sizeRatio*qualityRatio*1000) * imageRequest.N quota := int(ratio*sizeRatio*qualityRatio*1000) * imageRequest.N
if consumeQuota && userQuota-quota < 0 { if userQuota-quota < 0 {
return service.OpenAIErrorWrapper(errors.New("user quota is not enough"), "insufficient_user_quota", http.StatusForbidden) return service.OpenAIErrorWrapper(errors.New("user quota is not enough"), "insufficient_user_quota", http.StatusForbidden)
} }
@ -176,46 +173,42 @@ func RelayImageHelper(c *gin.Context, relayMode int) *dto.OpenAIErrorWithStatusC
var textResponse dto.ImageResponse var textResponse dto.ImageResponse
defer func(ctx context.Context) { defer func(ctx context.Context) {
useTimeSeconds := time.Now().Unix() - startTime.Unix() useTimeSeconds := time.Now().Unix() - startTime.Unix()
if consumeQuota { if resp.StatusCode != http.StatusOK {
if resp.StatusCode != http.StatusOK { return
return }
} err := model.PostConsumeTokenQuota(tokenId, userQuota, quota, 0, true)
err := model.PostConsumeTokenQuota(tokenId, userQuota, quota, 0, true) if err != nil {
if err != nil { common.SysError("error consuming token remain quota: " + err.Error())
common.SysError("error consuming token remain quota: " + err.Error()) }
} err = model.CacheUpdateUserQuota(userId)
err = model.CacheUpdateUserQuota(userId) if err != nil {
if err != nil { common.SysError("error update user quota cache: " + err.Error())
common.SysError("error update user quota cache: " + err.Error()) }
} if quota != 0 {
if quota != 0 { tokenName := c.GetString("token_name")
tokenName := c.GetString("token_name") logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio)
logContent := fmt.Sprintf("模型倍率 %.2f,分组倍率 %.2f", modelRatio, groupRatio) model.RecordConsumeLog(ctx, userId, channelId, 0, 0, imageRequest.Model, tokenName, quota, logContent, tokenId, userQuota, int(useTimeSeconds), false)
model.RecordConsumeLog(ctx, userId, channelId, 0, 0, imageRequest.Model, tokenName, quota, logContent, tokenId, userQuota, int(useTimeSeconds), false) model.UpdateUserUsedQuotaAndRequestCount(userId, quota)
model.UpdateUserUsedQuotaAndRequestCount(userId, quota) channelId := c.GetInt("channel_id")
channelId := c.GetInt("channel_id") model.UpdateChannelUsedQuota(channelId, quota)
model.UpdateChannelUsedQuota(channelId, quota)
}
} }
}(c.Request.Context()) }(c.Request.Context())
if consumeQuota { responseBody, err := io.ReadAll(resp.Body)
responseBody, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError) return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
}
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
}
err = json.Unmarshal(responseBody, &textResponse)
if err != nil {
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
}
resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
} }
err = resp.Body.Close()
if err != nil {
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError)
}
err = json.Unmarshal(responseBody, &textResponse)
if err != nil {
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
}
resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
for k, v := range resp.Header { for k, v := range resp.Header {
c.Writer.Header().Set(k, v[0]) c.Writer.Header().Set(k, v[0])