refactor: refactor relay part (#957)

* refactor: refactor relay part

* refactor: refactor config part
This commit is contained in:
JustSong
2024-01-21 23:21:42 +08:00
committed by GitHub
parent e2ed0399f0
commit 2d760d4a01
81 changed files with 1795 additions and 1459 deletions

View File

@@ -0,0 +1,22 @@
package aiproxy
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error) {
return nil, nil, nil
}

View File

@@ -8,6 +8,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
"strconv"
@@ -50,9 +52,9 @@ func responseAIProxyLibrary2OpenAI(response *LibraryResponse) *openai.TextRespon
FinishReason: "stop",
}
fullTextResponse := openai.TextResponse{
Id: common.GetUUID(),
Id: helper.GetUUID(),
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
@@ -63,9 +65,9 @@ func documentsAIProxyLibrary(documents []LibraryDocument) *openai.ChatCompletion
choice.Delta.Content = aiProxyDocuments2Markdown(documents)
choice.FinishReason = &constant.StopFinishReason
return &openai.ChatCompletionsStreamResponse{
Id: common.GetUUID(),
Id: helper.GetUUID(),
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
@@ -75,9 +77,9 @@ func streamResponseAIProxyLibrary2OpenAI(response *LibraryStreamResponse) *opena
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = response.Content
return &openai.ChatCompletionsStreamResponse{
Id: common.GetUUID(),
Id: helper.GetUUID(),
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: response.Model,
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
@@ -122,7 +124,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
var AIProxyLibraryResponse LibraryStreamResponse
err := json.Unmarshal([]byte(data), &AIProxyLibraryResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
if len(AIProxyLibraryResponse.Documents) != 0 {
@@ -131,7 +133,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
response := streamResponseAIProxyLibrary2OpenAI(&AIProxyLibraryResponse)
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
@@ -140,7 +142,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
response := documentsAIProxyLibrary(documents)
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})

View File

@@ -0,0 +1,22 @@
package ali
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error) {
return nil, nil, nil
}

View File

@@ -7,6 +7,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"strings"
)
@@ -118,7 +120,7 @@ func responseAli2OpenAI(response *ChatResponse) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Id: response.RequestId,
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
Usage: openai.Usage{
PromptTokens: response.Usage.InputTokens,
@@ -139,7 +141,7 @@ func streamResponseAli2OpenAI(aliResponse *ChatResponse) *openai.ChatCompletions
response := openai.ChatCompletionsStreamResponse{
Id: aliResponse.RequestId,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "qwen",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
@@ -185,7 +187,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
var aliResponse ChatResponse
err := json.Unmarshal([]byte(data), &aliResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
if aliResponse.Usage.OutputTokens != 0 {
@@ -198,7 +200,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
//lastResponseText = aliResponse.Output.Text
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})

View File

@@ -0,0 +1,22 @@
package anthropic
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error) {
return nil, nil, nil
}

View File

@@ -8,6 +8,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"strings"
)
@@ -78,9 +80,9 @@ func responseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
FinishReason: stopReasonClaude2OpenAI(claudeResponse.StopReason),
}
fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Id: fmt.Sprintf("chatcmpl-%s", helper.GetUUID()),
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
}
return &fullTextResponse
@@ -88,8 +90,8 @@ func responseClaude2OpenAI(claudeResponse *Response) *openai.TextResponse {
func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, string) {
responseText := ""
responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
createdTime := common.GetTimestamp()
responseId := fmt.Sprintf("chatcmpl-%s", helper.GetUUID())
createdTime := helper.GetTimestamp()
scanner := bufio.NewScanner(resp.Body)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
@@ -125,7 +127,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
var claudeResponse Response
err := json.Unmarshal([]byte(data), &claudeResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
responseText += claudeResponse.Completion
@@ -134,7 +136,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
response.Created = createdTime
jsonStr, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})

View File

@@ -0,0 +1,22 @@
package baidu
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error) {
return nil, nil, nil
}

View File

@@ -9,6 +9,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
"one-api/relay/util"
@@ -19,49 +20,49 @@ import (
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/flfmc9do2
type BaiduTokenResponse struct {
type TokenResponse struct {
ExpiresIn int `json:"expires_in"`
AccessToken string `json:"access_token"`
}
type BaiduMessage struct {
type Message struct {
Role string `json:"role"`
Content string `json:"content"`
}
type BaiduChatRequest struct {
Messages []BaiduMessage `json:"messages"`
Stream bool `json:"stream"`
UserId string `json:"user_id,omitempty"`
type ChatRequest struct {
Messages []Message `json:"messages"`
Stream bool `json:"stream"`
UserId string `json:"user_id,omitempty"`
}
type BaiduError struct {
type Error struct {
ErrorCode int `json:"error_code"`
ErrorMsg string `json:"error_msg"`
}
var baiduTokenStore sync.Map
func ConvertRequest(request openai.GeneralOpenAIRequest) *BaiduChatRequest {
messages := make([]BaiduMessage, 0, len(request.Messages))
func ConvertRequest(request openai.GeneralOpenAIRequest) *ChatRequest {
messages := make([]Message, 0, len(request.Messages))
for _, message := range request.Messages {
if message.Role == "system" {
messages = append(messages, BaiduMessage{
messages = append(messages, Message{
Role: "user",
Content: message.StringContent(),
})
messages = append(messages, BaiduMessage{
messages = append(messages, Message{
Role: "assistant",
Content: "Okay",
})
} else {
messages = append(messages, BaiduMessage{
messages = append(messages, Message{
Role: message.Role,
Content: message.StringContent(),
})
}
}
return &BaiduChatRequest{
return &ChatRequest{
Messages: messages,
Stream: request.Stream,
}
@@ -160,7 +161,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
var baiduResponse ChatStreamResponse
err := json.Unmarshal([]byte(data), &baiduResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
if baiduResponse.Usage.TotalTokens != 0 {
@@ -171,7 +172,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
response := streamResponseBaidu2OpenAI(&baiduResponse)
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})

View File

@@ -13,7 +13,7 @@ type ChatResponse struct {
IsTruncated bool `json:"is_truncated"`
NeedClearHistory bool `json:"need_clear_history"`
Usage openai.Usage `json:"usage"`
BaiduError
Error
}
type ChatStreamResponse struct {
@@ -38,7 +38,7 @@ type EmbeddingResponse struct {
Created int64 `json:"created"`
Data []EmbeddingData `json:"data"`
Usage openai.Usage `json:"usage"`
BaiduError
Error
}
type AccessToken struct {

View File

@@ -0,0 +1,22 @@
package google
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error) {
return nil, nil, nil
}

View File

@@ -7,7 +7,10 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/config"
"one-api/common/helper"
"one-api/common/image"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
"strings"
@@ -28,19 +31,19 @@ func ConvertGeminiRequest(textRequest openai.GeneralOpenAIRequest) *GeminiChatRe
SafetySettings: []GeminiChatSafetySettings{
{
Category: "HARM_CATEGORY_HARASSMENT",
Threshold: common.GeminiSafetySetting,
Threshold: config.GeminiSafetySetting,
},
{
Category: "HARM_CATEGORY_HATE_SPEECH",
Threshold: common.GeminiSafetySetting,
Threshold: config.GeminiSafetySetting,
},
{
Category: "HARM_CATEGORY_SEXUALLY_EXPLICIT",
Threshold: common.GeminiSafetySetting,
Threshold: config.GeminiSafetySetting,
},
{
Category: "HARM_CATEGORY_DANGEROUS_CONTENT",
Threshold: common.GeminiSafetySetting,
Threshold: config.GeminiSafetySetting,
},
},
GenerationConfig: GeminiChatGenerationConfig{
@@ -151,9 +154,9 @@ type GeminiChatPromptFeedback struct {
func responseGeminiChat2OpenAI(response *GeminiChatResponse) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Id: fmt.Sprintf("chatcmpl-%s", helper.GetUUID()),
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: make([]openai.TextResponseChoice, 0, len(response.Candidates)),
}
for i, candidate := range response.Candidates {
@@ -229,15 +232,15 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
var choice openai.ChatCompletionsStreamResponseChoice
choice.Delta.Content = dummy.Content
response := openai.ChatCompletionsStreamResponse{
Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
Id: fmt.Sprintf("chatcmpl-%s", helper.GetUUID()),
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "gemini-pro",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})

View File

@@ -7,6 +7,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
)
@@ -71,27 +73,27 @@ func streamResponsePaLM2OpenAI(palmResponse *PaLMChatResponse) *openai.ChatCompl
func PaLMStreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, string) {
responseText := ""
responseId := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
createdTime := common.GetTimestamp()
responseId := fmt.Sprintf("chatcmpl-%s", helper.GetUUID())
createdTime := helper.GetTimestamp()
dataChan := make(chan string)
stopChan := make(chan bool)
go func() {
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
common.SysError("error reading stream response: " + err.Error())
logger.SysError("error reading stream response: " + err.Error())
stopChan <- true
return
}
err = resp.Body.Close()
if err != nil {
common.SysError("error closing stream response: " + err.Error())
logger.SysError("error closing stream response: " + err.Error())
stopChan <- true
return
}
var palmResponse PaLMChatResponse
err = json.Unmarshal(responseBody, &palmResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
stopChan <- true
return
}
@@ -103,7 +105,7 @@ func PaLMStreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithSt
}
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
stopChan <- true
return
}

View File

@@ -0,0 +1,15 @@
package channel
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor interface {
GetRequestURL() string
Auth(c *gin.Context) error
ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error)
DoRequest(request *openai.GeneralOpenAIRequest) error
DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error)
}

View File

@@ -0,0 +1,21 @@
package openai
import (
"github.com/gin-gonic/gin"
"net/http"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*ErrorWithStatusCode, *Usage, error) {
return nil, nil, nil
}

View File

@@ -8,6 +8,7 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/logger"
"one-api/relay/constant"
"strings"
)
@@ -46,7 +47,7 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*ErrorWi
var streamResponse ChatCompletionsStreamResponse
err := json.Unmarshal([]byte(data), &streamResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
continue // just ignore the error
}
for _, choice := range streamResponse.Choices {
@@ -56,7 +57,7 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*ErrorWi
var streamResponse CompletionsStreamResponse
err := json.Unmarshal([]byte(data), &streamResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
continue
}
for _, choice := range streamResponse.Choices {

View File

@@ -207,6 +207,11 @@ type Usage struct {
TotalTokens int `json:"total_tokens"`
}
type UsageOrResponseText struct {
*Usage
ResponseText string
}
type Error struct {
Message string `json:"message"`
Type string `json:"type"`

View File

@@ -6,7 +6,9 @@ import (
"github.com/pkoukk/tiktoken-go"
"math"
"one-api/common"
"one-api/common/config"
"one-api/common/image"
"one-api/common/logger"
"strings"
)
@@ -15,15 +17,15 @@ var tokenEncoderMap = map[string]*tiktoken.Tiktoken{}
var defaultTokenEncoder *tiktoken.Tiktoken
func InitTokenEncoders() {
common.SysLog("initializing token encoders")
logger.SysLog("initializing token encoders")
gpt35TokenEncoder, err := tiktoken.EncodingForModel("gpt-3.5-turbo")
if err != nil {
common.FatalLog(fmt.Sprintf("failed to get gpt-3.5-turbo token encoder: %s", err.Error()))
logger.FatalLog(fmt.Sprintf("failed to get gpt-3.5-turbo token encoder: %s", err.Error()))
}
defaultTokenEncoder = gpt35TokenEncoder
gpt4TokenEncoder, err := tiktoken.EncodingForModel("gpt-4")
if err != nil {
common.FatalLog(fmt.Sprintf("failed to get gpt-4 token encoder: %s", err.Error()))
logger.FatalLog(fmt.Sprintf("failed to get gpt-4 token encoder: %s", err.Error()))
}
for model, _ := range common.ModelRatio {
if strings.HasPrefix(model, "gpt-3.5") {
@@ -34,7 +36,7 @@ func InitTokenEncoders() {
tokenEncoderMap[model] = nil
}
}
common.SysLog("token encoders initialized")
logger.SysLog("token encoders initialized")
}
func getTokenEncoder(model string) *tiktoken.Tiktoken {
@@ -45,7 +47,7 @@ func getTokenEncoder(model string) *tiktoken.Tiktoken {
if ok {
tokenEncoder, err := tiktoken.EncodingForModel(model)
if err != nil {
common.SysError(fmt.Sprintf("failed to get token encoder for model %s: %s, using encoder for gpt-3.5-turbo", model, err.Error()))
logger.SysError(fmt.Sprintf("failed to get token encoder for model %s: %s, using encoder for gpt-3.5-turbo", model, err.Error()))
tokenEncoder = defaultTokenEncoder
}
tokenEncoderMap[model] = tokenEncoder
@@ -55,7 +57,7 @@ func getTokenEncoder(model string) *tiktoken.Tiktoken {
}
func getTokenNum(tokenEncoder *tiktoken.Tiktoken, text string) int {
if common.ApproximateTokenEnabled {
if config.ApproximateTokenEnabled {
return int(float64(len(text)) * 0.38)
}
return len(tokenEncoder.Encode(text, nil, nil))
@@ -99,7 +101,7 @@ func CountTokenMessages(messages []Message, model string) int {
}
imageTokens, err := countImageTokens(url, detail)
if err != nil {
common.SysError("error counting image tokens: " + err.Error())
logger.SysError("error counting image tokens: " + err.Error())
} else {
tokenNum += imageTokens
}

View File

@@ -0,0 +1,22 @@
package tencent
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error) {
return nil, nil, nil
}

View File

@@ -12,6 +12,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
"sort"
@@ -46,9 +48,9 @@ func ConvertRequest(request openai.GeneralOpenAIRequest) *ChatRequest {
stream = 1
}
return &ChatRequest{
Timestamp: common.GetTimestamp(),
Expired: common.GetTimestamp() + 24*60*60,
QueryID: common.GetUUID(),
Timestamp: helper.GetTimestamp(),
Expired: helper.GetTimestamp() + 24*60*60,
QueryID: helper.GetUUID(),
Temperature: request.Temperature,
TopP: request.TopP,
Stream: stream,
@@ -59,7 +61,7 @@ func ConvertRequest(request openai.GeneralOpenAIRequest) *ChatRequest {
func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Usage: response.Usage,
}
if len(response.Choices) > 0 {
@@ -79,7 +81,7 @@ func responseTencent2OpenAI(response *ChatResponse) *openai.TextResponse {
func streamResponseTencent2OpenAI(TencentResponse *ChatResponse) *openai.ChatCompletionsStreamResponse {
response := openai.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "tencent-hunyuan",
}
if len(TencentResponse.Choices) > 0 {
@@ -131,7 +133,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
var TencentResponse ChatResponse
err := json.Unmarshal([]byte(data), &TencentResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
response := streamResponseTencent2OpenAI(&TencentResponse)
@@ -140,7 +142,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
}
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})

View File

@@ -0,0 +1,22 @@
package xunfei
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error) {
return nil, nil, nil
}

View File

@@ -12,6 +12,8 @@ import (
"net/http"
"net/url"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
"strings"
@@ -68,7 +70,7 @@ func responseXunfei2OpenAI(response *ChatResponse) *openai.TextResponse {
}
fullTextResponse := openai.TextResponse{
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: []openai.TextResponseChoice{choice},
Usage: response.Payload.Usage.Text,
}
@@ -90,7 +92,7 @@ func streamResponseXunfei2OpenAI(xunfeiResponse *ChatResponse) *openai.ChatCompl
}
response := openai.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "SparkDesk",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
@@ -140,7 +142,7 @@ func StreamHandler(c *gin.Context, textRequest openai.GeneralOpenAIRequest, appI
response := streamResponseXunfei2OpenAI(&xunfeiResponse)
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
@@ -215,20 +217,20 @@ func xunfeiMakeRequest(textRequest openai.GeneralOpenAIRequest, domain, authUrl,
for {
_, msg, err := conn.ReadMessage()
if err != nil {
common.SysError("error reading stream response: " + err.Error())
logger.SysError("error reading stream response: " + err.Error())
break
}
var response ChatResponse
err = json.Unmarshal(msg, &response)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
break
}
dataChan <- response
if response.Payload.Choices.Status == 2 {
err := conn.Close()
if err != nil {
common.SysError("error closing websocket connection: " + err.Error())
logger.SysError("error closing websocket connection: " + err.Error())
}
break
}
@@ -247,7 +249,7 @@ func getXunfeiAuthUrl(c *gin.Context, apiKey string, apiSecret string) (string,
}
if apiVersion == "" {
apiVersion = "v1.1"
common.SysLog("api_version not found, use default: " + apiVersion)
logger.SysLog("api_version not found, use default: " + apiVersion)
}
domain := "general"
if apiVersion != "v1.1" {

View File

@@ -0,0 +1,22 @@
package zhipu
import (
"github.com/gin-gonic/gin"
"net/http"
"one-api/relay/channel/openai"
)
type Adaptor struct {
}
func (a *Adaptor) Auth(c *gin.Context) error {
return nil
}
func (a *Adaptor) ConvertRequest(request *openai.GeneralOpenAIRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatusCode, *openai.Usage, error) {
return nil, nil, nil
}

View File

@@ -8,6 +8,8 @@ import (
"io"
"net/http"
"one-api/common"
"one-api/common/helper"
"one-api/common/logger"
"one-api/relay/channel/openai"
"one-api/relay/constant"
"strings"
@@ -34,7 +36,7 @@ func GetToken(apikey string) string {
split := strings.Split(apikey, ".")
if len(split) != 2 {
common.SysError("invalid zhipu key: " + apikey)
logger.SysError("invalid zhipu key: " + apikey)
return ""
}
@@ -101,7 +103,7 @@ func responseZhipu2OpenAI(response *Response) *openai.TextResponse {
fullTextResponse := openai.TextResponse{
Id: response.Data.TaskId,
Object: "chat.completion",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Choices: make([]openai.TextResponseChoice, 0, len(response.Data.Choices)),
Usage: response.Data.Usage,
}
@@ -127,7 +129,7 @@ func streamResponseZhipu2OpenAI(zhipuResponse string) *openai.ChatCompletionsStr
choice.Delta.Content = zhipuResponse
response := openai.ChatCompletionsStreamResponse{
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "chatglm",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
@@ -141,7 +143,7 @@ func streamMetaResponseZhipu2OpenAI(zhipuResponse *StreamMetaResponse) (*openai.
response := openai.ChatCompletionsStreamResponse{
Id: zhipuResponse.RequestId,
Object: "chat.completion.chunk",
Created: common.GetTimestamp(),
Created: helper.GetTimestamp(),
Model: "chatglm",
Choices: []openai.ChatCompletionsStreamResponseChoice{choice},
}
@@ -193,7 +195,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
response := streamResponseZhipu2OpenAI(data)
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
@@ -202,13 +204,13 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*openai.ErrorWithStatus
var zhipuResponse StreamMetaResponse
err := json.Unmarshal([]byte(data), &zhipuResponse)
if err != nil {
common.SysError("error unmarshalling stream response: " + err.Error())
logger.SysError("error unmarshalling stream response: " + err.Error())
return true
}
response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
jsonResponse, err := json.Marshal(response)
if err != nil {
common.SysError("error marshalling stream response: " + err.Error())
logger.SysError("error marshalling stream response: " + err.Error())
return true
}
usage = zhipuUsage