refactor: refactor config part

This commit is contained in:
JustSong
2024-01-21 23:18:32 +08:00
parent b373882814
commit 42569c83c0
53 changed files with 745 additions and 708 deletions

View File

@@ -8,6 +8,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
@@ -51,9 +52,9 @@ func responseAIProxyLibrary2OpenAI(response *LibraryResponse) *openai.TextRespon
FinishReason: "stop",
}
fullTextResponse := openai.TextResponse{
Id: common.GetUUID(),
Id: helper.GetUUID(),
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
@@ -64,9 +65,9 @@ func documentsAIProxyLibrary(documents []LibraryDocument) *openai.ChatCompletion
choice.Delta.Content = aiProxyDocuments2Markdown(documents)
choice.FinishReason = &constant.StopFinishReason
return &openai.ChatCompletionsStreamResponse{
Id: common.GetUUID(),
Id: helper.GetUUID(),
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
@@ -76,9 +77,9 @@ func streamResponseAIProxyLibrary2OpenAI(response *LibraryStreamResponse) *opena
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = response.Content
return &openai.ChatCompletionsStreamResponse{
Id: common.GetUUID(),
Id: helper.GetUUID(),
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: response.Model,
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}

View File

@@ -7,6 +7,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"strings"
@@ -119,7 +120,7 @@ func responseAli2OpenAI(response *ChatResponse) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Id: response.RequestId,
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
Usage: openai.Usage{
PromptTokens: response.Usage.InputTokens,
@@ -140,7 +141,7 @@ func streamResponseAli2OpenAI(aliResponse *ChatResponse) *openai.ChatCompletions
response := openai.ChatCompletionsStreamResponse{
Id: aliResponse.RequestId,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "qwen",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}

View File

@@ -8,6 +8,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"strings"
@@ -79,9 +80,9 @@ func responseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
FinishReason: stopReasonClaude2OpenAI(claudeResponse.StopReason),
}
fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Id: fmt.Sprintf("chatcmpl-%s", helper.GetUUID()),
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
@@ -89,8 +90,8 @@ func responseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, string) {
responseText := ""
responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
createdTime := common.GetTimestamp()
responseId := fmt.Sprintf("chatcmpl-%s", helper.GetUUID())
createdTime := helper.GetTimestamp()
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {

View File

@@ -7,6 +7,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/config"
"one-api/common/helper"
"one-api/common/image"
"one-api/common/logger"
"one-api/relay/channel/openai"
@@ -29,19 +31,19 @@ func ConvertGeminiRequest(textRequest openai.GeneralOpenAIRequest) *GeminiChatRe
SafetySettings: []GeminiChatSafetySettings{
{
Category: "HARM_CATEGORY_HARASSMENT",
Threshold: common.GeminiSafetySetting,
Threshold: config.GeminiSafetySetting,
},
{
Category: "HARM_CATEGORY_HATE_SPEECH",
Threshold: common.GeminiSafetySetting,
Threshold: config.GeminiSafetySetting,
},
{
Category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
Threshold: common.GeminiSafetySetting,
Threshold: config.GeminiSafetySetting,
},
{
Category: "HARM_CATEGORY_DANGEROUS_CONTENT",
Threshold: common.GeminiSafetySetting,
Threshold: config.GeminiSafetySetting,
},
},
GenerationConfig: GeminiChatGenerationConfig{
@@ -152,9 +154,9 @@ type GeminiChatPromptFeedback struct {
func responseGeminiChat2OpenAI(response *GeminiChatResponse) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Id: fmt.Sprintf("chatcmpl-%s", helper.GetUUID()),
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: make([]openai.TextResponseChoice, 0, len(response.Candidates)),
}
for i, candidate := range response.Candidates {
@@ -230,9 +232,9 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = dummy.Content
response := openai.ChatCompletionsStreamResponse{
Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Id: fmt.Sprintf("chatcmpl-%s", helper.GetUUID()),
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "gemini-pro",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}

View File

@@ -7,6 +7,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
@@ -72,8 +73,8 @@ func streamResponsePaLM2OpenAI(palmResponse *PaLMChatResponse) *openai.ChatCompl
func PaLMStreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, string) {
responseText := ""
responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
createdTime := common.GetTimestamp()
responseId := fmt.Sprintf("chatcmpl-%s", helper.GetUUID())
createdTime := helper.GetTimestamp()
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {

View File

@@ -6,6 +6,7 @@ import (
"github.com/pkoukk/tiktoken-go"
"math"
"one-api/common"
"one-api/common/config"
"one-api/common/image"
"one-api/common/logger"
"strings"
@@ -56,7 +57,7 @@ func getTokenEncoder(model string) *tiktoken.Tiktoken {
}
func getTokenNum(tokenEncoder *tiktoken.Tiktoken, text string) int {
if common.ApproximateTokenEnabled {
if config.ApproximateTokenEnabled {
return int(float64(len(text)) * 0.38)
}
return len(tokenEncoder.Encode(text, nil, nil))

View File

@@ -12,6 +12,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
@@ -47,9 +48,9 @@ func ConvertRequest(request openai.GeneralOpenAIRequest) *ChatRequest {
stream = 1
}
return &ChatRequest{
Timestamp: common.GetTimestamp(),
Expired: common.GetTimestamp() + 24*60*60,
QueryID: common.GetUUID(),
Timestamp: helper.GetTimestamp(),
Expired: helper.GetTimestamp() + 24*60*60,
QueryID: helper.GetUUID(),
Temperature: request.Temperature,
TopP: request.TopP,
Stream: stream,
@@ -60,7 +61,7 @@ func ConvertRequest(request openai.GeneralOpenAIRequest) *ChatRequest {
func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Usage: response.Usage,
}
if len(response.Choices) > 0 {
@@ -80,7 +81,7 @@ func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse {
func streamResponseTencent2OpenAI(TencentResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
response := openai.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "tencent-hunyuan",
}
if len(TencentResponse.Choices) > 0 {

View File

@@ -12,6 +12,7 @@ import (
"net/http"
"net/url"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
@@ -69,7 +70,7 @@ func responseXunfei2OpenAI(response *ChatResponse) *openai.TextResponse {
}
fullTextResponse := openai.TextResponse{
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
Usage: response.Payload.Usage.Text,
}
@@ -91,7 +92,7 @@ func streamResponseXunfei2OpenAI(xunfeiResponse *ChatResponse) *openai.ChatCompl
}
response := openai.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "SparkDesk",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}

View File

@@ -8,6 +8,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
@@ -102,7 +103,7 @@ func responseZhipu2OpenAI(response *Response) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Id: response.Data.TaskId,
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: make([]openai.TextResponseChoice, 0, len(response.Data.Choices)),
Usage: response.Data.Usage,
}
@@ -128,7 +129,7 @@ func streamResponseZhipu2OpenAI(zhipuResponse string) *openai.ChatCompletionsStr
choice.Delta.Content = zhipuResponse
response := openai.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "chatglm",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
@@ -142,7 +143,7 @@ func streamMetaResponseZhipu2OpenAI(zhipuResponse *StreamMetaResponse) (*openai.
response := openai.ChatCompletionsStreamResponse{
Id: zhipuResponse.RequestId,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "chatglm",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}

View File

@@ -11,6 +11,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/config"
"one-api/common/logger"
"one-api/model"
"one-api/relay/channel/openai"
@@ -54,7 +55,7 @@ func RelayAudioHelper(c *gin.Context, relayMode int) *openai.ErrorWithStatusCode
preConsumedQuota = int(float64(len(ttsRequest.Input)) * ratio)
quota = preConsumedQuota
default:
preConsumedQuota = int(float64(common.PreConsumedQuota) * ratio)
preConsumedQuota = int(float64(config.PreConsumedQuota) * ratio)
}
userQuota, err := model.CacheGetUserQuota(userId)
if err != nil {

View File

@@ -8,6 +8,7 @@ import (
"math"
"net/http"
"one-api/common"
"one-api/common/config"
"one-api/common/logger"
"one-api/model"
"one-api/relay/channel/openai"
@@ -52,7 +53,7 @@ func RelayTextHelper(c *gin.Context, relayMode int) *openai.ErrorWithStatusCode
case constant.RelayModeModerations:
promptTokens = openai.CountTokenInput(textRequest.Input, textRequest.Model)
}
preConsumedTokens := common.PreConsumedQuota
preConsumedTokens := config.PreConsumedQuota
if textRequest.MaxTokens != 0 {
preConsumedTokens = promptTokens + textRequest.MaxTokens
}

View File

@@ -9,6 +9,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/relay/channel/aiproxy"
"one-api/relay/channel/ali"
"one-api/relay/channel/anthropic"
@@ -66,7 +67,7 @@ func GetRequestURL(requestURL string, apiType int, relayMode int, meta *util.Rel
case constant.APITypePaLM:
fullRequestURL = fmt.Sprintf("%s/v1beta2/models/chat-bison-001:generateMessage", meta.BaseURL)
case constant.APITypeGemini:
version := common.AssignOrDefault(meta.APIVersion, "v1")
version := helper.AssignOrDefault(meta.APIVersion, "v1")
action := "generateContent"
if textRequest.Stream {
action = "streamGenerateContent"

View File

@@ -7,6 +7,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/config"
"one-api/common/logger"
"one-api/model"
"one-api/relay/channel/openai"
@@ -17,7 +18,7 @@ import (
)
func ShouldDisableChannel(err *openai.Error, statusCode int) bool {
if !common.AutomaticDisableChannelEnabled {
if !config.AutomaticDisableChannelEnabled {
return false
}
if err == nil {
@@ -33,7 +34,7 @@ func ShouldDisableChannel(err *openai.Error, statusCode int) bool {
}
func ShouldEnableChannel(err error, openAIErr *openai.Error) bool {
if !common.AutomaticEnableChannelEnabled {
if !config.AutomaticEnableChannelEnabled {
return false
}
if err != nil {

View File

@@ -2,7 +2,7 @@ package util
import (
"net/http"
"one-api/common"
"one-api/common/config"
"time"
)
@@ -10,11 +10,11 @@ var HTTPClient *http.Client
var ImpatientHTTPClient *http.Client
func init() {
if common.RelayTimeout == 0 {
if config.RelayTimeout == 0 {
HTTPClient = &http.Client{}
} else {
HTTPClient = &http.Client{
Timeout: time.Duration(common.RelayTimeout) * time.Second,
Timeout: time.Duration(config.RelayTimeout) * time.Second,
}
}