mirror of
				https://github.com/songquanpeng/one-api.git
				synced 2025-10-31 22:03:41 +08:00 
			
		
		
		
	Compare commits
	
		
			6 Commits
		
	
	
		
			v0.6.4-alp
			...
			v0.6.5-alp
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | e3cfb1fa52 | ||
|  | f89ae5ad58 | ||
|  | 06a3fc5421 | ||
|  | a9c464ec5a | ||
|  | 3f3c13c98c | ||
|  | 2ba28c72cb | 
							
								
								
									
										6
									
								
								common/conv/any.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										6
									
								
								common/conv/any.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| package conv | ||||
|  | ||||
| func AsString(v any) string { | ||||
| 	str, _ := v.(string) | ||||
| 	return str | ||||
| } | ||||
| @@ -75,7 +75,7 @@ var ModelRatio = map[string]float64{ | ||||
| 	"ERNIE-Bot":       0.8572,     // ¥0.012 / 1k tokens | ||||
| 	"ERNIE-Bot-turbo": 0.5715,     // ¥0.008 / 1k tokens | ||||
| 	"ERNIE-Bot-4":     0.12 * RMB, // ¥0.12 / 1k tokens | ||||
| 	"ERNIE-Bot-8k":    0.024 * RMB, | ||||
| 	"ERNIE-Bot-8K":    0.024 * RMB, | ||||
| 	"Embedding-V1":    0.1429, // ¥0.002 / 1k tokens | ||||
| 	"bge-large-zh":    0.002 * RMB, | ||||
| 	"bge-large-en":    0.002 * RMB, | ||||
|   | ||||
| @@ -48,7 +48,10 @@ func ConvertRequest(request model.GeneralOpenAIRequest) *ChatRequest { | ||||
| 			MaxTokens:         request.MaxTokens, | ||||
| 			Temperature:       request.Temperature, | ||||
| 			TopP:              request.TopP, | ||||
| 			TopK:              request.TopK, | ||||
| 			ResultFormat:      "message", | ||||
| 		}, | ||||
| 		Tools: request.Tools, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @@ -117,19 +120,11 @@ func embeddingResponseAli2OpenAI(response *EmbeddingResponse) *openai.EmbeddingR | ||||
| } | ||||
|  | ||||
| func responseAli2OpenAI(response *ChatResponse) *openai.TextResponse { | ||||
| 	choice := openai.TextResponseChoice{ | ||||
| 		Index: 0, | ||||
| 		Message: model.Message{ | ||||
| 			Role:    "assistant", | ||||
| 			Content: response.Output.Text, | ||||
| 		}, | ||||
| 		FinishReason: response.Output.FinishReason, | ||||
| 	} | ||||
| 	fullTextResponse := openai.TextResponse{ | ||||
| 		Id:      response.RequestId, | ||||
| 		Object:  "chat.completion", | ||||
| 		Created: helper.GetTimestamp(), | ||||
| 		Choices: []openai.TextResponseChoice{choice}, | ||||
| 		Choices: response.Output.Choices, | ||||
| 		Usage: model.Usage{ | ||||
| 			PromptTokens:     response.Usage.InputTokens, | ||||
| 			CompletionTokens: response.Usage.OutputTokens, | ||||
| @@ -140,10 +135,14 @@ func responseAli2OpenAI(response *ChatResponse) *openai.TextResponse { | ||||
| } | ||||
|  | ||||
| func streamResponseAli2OpenAI(aliResponse *ChatResponse) *openai.ChatCompletionsStreamResponse { | ||||
| 	if len(aliResponse.Output.Choices) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
| 	aliChoice := aliResponse.Output.Choices[0] | ||||
| 	var choice openai.ChatCompletionsStreamResponseChoice | ||||
| 	choice.Delta.Content = aliResponse.Output.Text | ||||
| 	if aliResponse.Output.FinishReason != "null" { | ||||
| 		finishReason := aliResponse.Output.FinishReason | ||||
| 	choice.Delta = aliChoice.Message | ||||
| 	if aliChoice.FinishReason != "null" { | ||||
| 		finishReason := aliChoice.FinishReason | ||||
| 		choice.FinishReason = &finishReason | ||||
| 	} | ||||
| 	response := openai.ChatCompletionsStreamResponse{ | ||||
| @@ -204,6 +203,9 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC | ||||
| 				usage.TotalTokens = aliResponse.Usage.InputTokens + aliResponse.Usage.OutputTokens | ||||
| 			} | ||||
| 			response := streamResponseAli2OpenAI(&aliResponse) | ||||
| 			if response == nil { | ||||
| 				return true | ||||
| 			} | ||||
| 			//response.Choices[0].Delta.Content = strings.TrimPrefix(response.Choices[0].Delta.Content, lastResponseText) | ||||
| 			//lastResponseText = aliResponse.Output.Text | ||||
| 			jsonResponse, err := json.Marshal(response) | ||||
| @@ -226,6 +228,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC | ||||
| } | ||||
|  | ||||
| func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *model.Usage) { | ||||
| 	ctx := c.Request.Context() | ||||
| 	var aliResponse ChatResponse | ||||
| 	responseBody, err := io.ReadAll(resp.Body) | ||||
| 	if err != nil { | ||||
| @@ -235,6 +238,7 @@ func Handler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, * | ||||
| 	if err != nil { | ||||
| 		return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil | ||||
| 	} | ||||
| 	logger.Debugf(ctx, "response body: %s\n", responseBody) | ||||
| 	err = json.Unmarshal(responseBody, &aliResponse) | ||||
| 	if err != nil { | ||||
| 		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil | ||||
|   | ||||
| @@ -1,5 +1,10 @@ | ||||
| package ali | ||||
|  | ||||
| import ( | ||||
| 	"github.com/songquanpeng/one-api/relay/channel/openai" | ||||
| 	"github.com/songquanpeng/one-api/relay/model" | ||||
| ) | ||||
|  | ||||
| type Message struct { | ||||
| 	Content string `json:"content"` | ||||
| 	Role    string `json:"role"` | ||||
| @@ -18,12 +23,14 @@ type Parameters struct { | ||||
| 	IncrementalOutput bool    `json:"incremental_output,omitempty"` | ||||
| 	MaxTokens         int     `json:"max_tokens,omitempty"` | ||||
| 	Temperature       float64 `json:"temperature,omitempty"` | ||||
| 	ResultFormat      string  `json:"result_format,omitempty"` | ||||
| } | ||||
|  | ||||
| type ChatRequest struct { | ||||
| 	Model      string     `json:"model"` | ||||
| 	Input      Input      `json:"input"` | ||||
| 	Parameters Parameters `json:"parameters,omitempty"` | ||||
| 	Model      string       `json:"model"` | ||||
| 	Input      Input        `json:"input"` | ||||
| 	Parameters Parameters   `json:"parameters,omitempty"` | ||||
| 	Tools      []model.Tool `json:"tools,omitempty"` | ||||
| } | ||||
|  | ||||
| type EmbeddingRequest struct { | ||||
| @@ -62,8 +69,9 @@ type Usage struct { | ||||
| } | ||||
|  | ||||
| type Output struct { | ||||
| 	Text         string `json:"text"` | ||||
| 	FinishReason string `json:"finish_reason"` | ||||
| 	//Text         string                      `json:"text"` | ||||
| 	//FinishReason string                      `json:"finish_reason"` | ||||
| 	Choices []openai.TextResponseChoice `json:"choices"` | ||||
| } | ||||
|  | ||||
| type ChatResponse struct { | ||||
|   | ||||
| @@ -38,6 +38,7 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request { | ||||
| 		MaxTokens:   textRequest.MaxTokens, | ||||
| 		Temperature: textRequest.Temperature, | ||||
| 		TopP:        textRequest.TopP, | ||||
| 		TopK:        textRequest.TopK, | ||||
| 		Stream:      textRequest.Stream, | ||||
| 	} | ||||
| 	if claudeRequest.MaxTokens == 0 { | ||||
|   | ||||
| @@ -70,8 +70,10 @@ func (a *Adaptor) DoRequest(c *gin.Context, meta *util.RelayMeta, requestBody io | ||||
| func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *util.RelayMeta) (usage *model.Usage, err *model.ErrorWithStatusCode) { | ||||
| 	if meta.IsStream { | ||||
| 		var responseText string | ||||
| 		err, responseText, _ = StreamHandler(c, resp, meta.Mode) | ||||
| 		usage = ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens) | ||||
| 		err, responseText, usage = StreamHandler(c, resp, meta.Mode) | ||||
| 		if usage == nil { | ||||
| 			usage = ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens) | ||||
| 		} | ||||
| 	} else { | ||||
| 		err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName) | ||||
| 	} | ||||
|   | ||||
| @@ -6,6 +6,7 @@ import ( | ||||
| 	"encoding/json" | ||||
| 	"github.com/gin-gonic/gin" | ||||
| 	"github.com/songquanpeng/one-api/common" | ||||
| 	"github.com/songquanpeng/one-api/common/conv" | ||||
| 	"github.com/songquanpeng/one-api/common/logger" | ||||
| 	"github.com/songquanpeng/one-api/relay/constant" | ||||
| 	"github.com/songquanpeng/one-api/relay/model" | ||||
| @@ -53,7 +54,7 @@ func StreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*model.E | ||||
| 						continue // just ignore the error | ||||
| 					} | ||||
| 					for _, choice := range streamResponse.Choices { | ||||
| 						responseText += choice.Delta.Content | ||||
| 						responseText += conv.AsString(choice.Delta.Content) | ||||
| 					} | ||||
| 					if streamResponse.Usage != nil { | ||||
| 						usage = streamResponse.Usage | ||||
|   | ||||
| @@ -118,12 +118,9 @@ type ImageResponse struct { | ||||
| } | ||||
|  | ||||
| type ChatCompletionsStreamResponseChoice struct { | ||||
| 	Index int `json:"index"` | ||||
| 	Delta struct { | ||||
| 		Content string `json:"content"` | ||||
| 		Role    string `json:"role,omitempty"` | ||||
| 	} `json:"delta"` | ||||
| 	FinishReason *string `json:"finish_reason,omitempty"` | ||||
| 	Index        int           `json:"index"` | ||||
| 	Delta        model.Message `json:"delta"` | ||||
| 	FinishReason *string       `json:"finish_reason,omitempty"` | ||||
| } | ||||
|  | ||||
| type ChatCompletionsStreamResponse struct { | ||||
|   | ||||
| @@ -10,6 +10,7 @@ import ( | ||||
| 	"fmt" | ||||
| 	"github.com/gin-gonic/gin" | ||||
| 	"github.com/songquanpeng/one-api/common" | ||||
| 	"github.com/songquanpeng/one-api/common/conv" | ||||
| 	"github.com/songquanpeng/one-api/common/helper" | ||||
| 	"github.com/songquanpeng/one-api/common/logger" | ||||
| 	"github.com/songquanpeng/one-api/relay/channel/openai" | ||||
| @@ -129,7 +130,7 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC | ||||
| 			} | ||||
| 			response := streamResponseTencent2OpenAI(&TencentResponse) | ||||
| 			if len(response.Choices) != 0 { | ||||
| 				responseText += response.Choices[0].Delta.Content | ||||
| 				responseText += conv.AsString(response.Choices[0].Delta.Content) | ||||
| 			} | ||||
| 			jsonResponse, err := json.Marshal(response) | ||||
| 			if err != nil { | ||||
|   | ||||
| @@ -26,7 +26,11 @@ import ( | ||||
|  | ||||
| func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string, domain string) *ChatRequest { | ||||
| 	messages := make([]Message, 0, len(request.Messages)) | ||||
| 	var lastToolCalls []model.Tool | ||||
| 	for _, message := range request.Messages { | ||||
| 		if message.ToolCalls != nil { | ||||
| 			lastToolCalls = message.ToolCalls | ||||
| 		} | ||||
| 		messages = append(messages, Message{ | ||||
| 			Role:    message.Role, | ||||
| 			Content: message.StringContent(), | ||||
| @@ -39,9 +43,33 @@ func requestOpenAI2Xunfei(request model.GeneralOpenAIRequest, xunfeiAppId string | ||||
| 	xunfeiRequest.Parameter.Chat.TopK = request.N | ||||
| 	xunfeiRequest.Parameter.Chat.MaxTokens = request.MaxTokens | ||||
| 	xunfeiRequest.Payload.Message.Text = messages | ||||
| 	if len(lastToolCalls) != 0 { | ||||
| 		for _, toolCall := range lastToolCalls { | ||||
| 			xunfeiRequest.Payload.Functions.Text = append(xunfeiRequest.Payload.Functions.Text, toolCall.Function) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return &xunfeiRequest | ||||
| } | ||||
|  | ||||
| func getToolCalls(response *ChatResponse) []model.Tool { | ||||
| 	var toolCalls []model.Tool | ||||
| 	if len(response.Payload.Choices.Text) == 0 { | ||||
| 		return toolCalls | ||||
| 	} | ||||
| 	item := response.Payload.Choices.Text[0] | ||||
| 	if item.FunctionCall == nil { | ||||
| 		return toolCalls | ||||
| 	} | ||||
| 	toolCall := model.Tool{ | ||||
| 		Id:       fmt.Sprintf("call_%s", helper.GetUUID()), | ||||
| 		Type:     "function", | ||||
| 		Function: *item.FunctionCall, | ||||
| 	} | ||||
| 	toolCalls = append(toolCalls, toolCall) | ||||
| 	return toolCalls | ||||
| } | ||||
|  | ||||
| func responseXunfei2OpenAI(response *ChatResponse) *openai.TextResponse { | ||||
| 	if len(response.Payload.Choices.Text) == 0 { | ||||
| 		response.Payload.Choices.Text = []ChatResponseTextItem{ | ||||
| @@ -53,8 +81,9 @@ func responseXunfei2OpenAI(response *ChatResponse) *openai.TextResponse { | ||||
| 	choice := openai.TextResponseChoice{ | ||||
| 		Index: 0, | ||||
| 		Message: model.Message{ | ||||
| 			Role:    "assistant", | ||||
| 			Content: response.Payload.Choices.Text[0].Content, | ||||
| 			Role:      "assistant", | ||||
| 			Content:   response.Payload.Choices.Text[0].Content, | ||||
| 			ToolCalls: getToolCalls(response), | ||||
| 		}, | ||||
| 		FinishReason: constant.StopFinishReason, | ||||
| 	} | ||||
| @@ -78,6 +107,7 @@ func streamResponseXunfei2OpenAI(xunfeiResponse *ChatResponse) *openai.ChatCompl | ||||
| 	} | ||||
| 	var choice openai.ChatCompletionsStreamResponseChoice | ||||
| 	choice.Delta.Content = xunfeiResponse.Payload.Choices.Text[0].Content | ||||
| 	choice.Delta.ToolCalls = getToolCalls(xunfeiResponse) | ||||
| 	if xunfeiResponse.Payload.Choices.Status == 2 { | ||||
| 		choice.FinishReason = &constant.StopFinishReason | ||||
| 	} | ||||
|   | ||||
| @@ -26,13 +26,18 @@ type ChatRequest struct { | ||||
| 		Message struct { | ||||
| 			Text []Message `json:"text"` | ||||
| 		} `json:"message"` | ||||
| 		Functions struct { | ||||
| 			Text []model.Function `json:"text,omitempty"` | ||||
| 		} `json:"functions"` | ||||
| 	} `json:"payload"` | ||||
| } | ||||
|  | ||||
| type ChatResponseTextItem struct { | ||||
| 	Content string `json:"content"` | ||||
| 	Role    string `json:"role"` | ||||
| 	Index   int    `json:"index"` | ||||
| 	Content      string          `json:"content"` | ||||
| 	Role         string          `json:"role"` | ||||
| 	Index        int             `json:"index"` | ||||
| 	ContentType  string          `json:"content_type"` | ||||
| 	FunctionCall *model.Function `json:"function_call"` | ||||
| } | ||||
|  | ||||
| type ChatResponse struct { | ||||
|   | ||||
| @@ -5,25 +5,29 @@ type ResponseFormat struct { | ||||
| } | ||||
|  | ||||
| type GeneralOpenAIRequest struct { | ||||
| 	Model            string          `json:"model,omitempty"` | ||||
| 	Messages         []Message       `json:"messages,omitempty"` | ||||
| 	Prompt           any             `json:"prompt,omitempty"` | ||||
| 	Stream           bool            `json:"stream,omitempty"` | ||||
| 	MaxTokens        int             `json:"max_tokens,omitempty"` | ||||
| 	Temperature      float64         `json:"temperature,omitempty"` | ||||
| 	TopP             float64         `json:"top_p,omitempty"` | ||||
| 	N                int             `json:"n,omitempty"` | ||||
| 	Input            any             `json:"input,omitempty"` | ||||
| 	Instruction      string          `json:"instruction,omitempty"` | ||||
| 	Size             string          `json:"size,omitempty"` | ||||
| 	Functions        any             `json:"functions,omitempty"` | ||||
| 	Model            string          `json:"model,omitempty"` | ||||
| 	FrequencyPenalty float64         `json:"frequency_penalty,omitempty"` | ||||
| 	MaxTokens        int             `json:"max_tokens,omitempty"` | ||||
| 	N                int             `json:"n,omitempty"` | ||||
| 	PresencePenalty  float64         `json:"presence_penalty,omitempty"` | ||||
| 	ResponseFormat   *ResponseFormat `json:"response_format,omitempty"` | ||||
| 	Seed             float64         `json:"seed,omitempty"` | ||||
| 	Tools            any             `json:"tools,omitempty"` | ||||
| 	Stream           bool            `json:"stream,omitempty"` | ||||
| 	Temperature      float64         `json:"temperature,omitempty"` | ||||
| 	TopP             float64         `json:"top_p,omitempty"` | ||||
| 	TopK             int             `json:"top_k,omitempty"` | ||||
| 	Tools            []Tool          `json:"tools,omitempty"` | ||||
| 	ToolChoice       any             `json:"tool_choice,omitempty"` | ||||
| 	FunctionCall     any             `json:"function_call,omitempty"` | ||||
| 	Functions        any             `json:"functions,omitempty"` | ||||
| 	User             string          `json:"user,omitempty"` | ||||
| 	Prompt           any             `json:"prompt,omitempty"` | ||||
| 	Input            any             `json:"input,omitempty"` | ||||
| 	EncodingFormat   string          `json:"encoding_format,omitempty"` | ||||
| 	Dimensions       int             `json:"dimensions,omitempty"` | ||||
| 	Instruction      string          `json:"instruction,omitempty"` | ||||
| 	Size             string          `json:"size,omitempty"` | ||||
| } | ||||
|  | ||||
| func (r GeneralOpenAIRequest) ParseInput() []string { | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| package model | ||||
|  | ||||
| type Message struct { | ||||
| 	Role    string  `json:"role"` | ||||
| 	Content any     `json:"content"` | ||||
| 	Name    *string `json:"name,omitempty"` | ||||
| 	Role      string  `json:"role,omitempty"` | ||||
| 	Content   any     `json:"content,omitempty"` | ||||
| 	Name      *string `json:"name,omitempty"` | ||||
| 	ToolCalls []Tool  `json:"tool_calls,omitempty"` | ||||
| } | ||||
|  | ||||
| func (m Message) IsStringContent() bool { | ||||
|   | ||||
							
								
								
									
										14
									
								
								relay/model/tool.go
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								relay/model/tool.go
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| package model | ||||
|  | ||||
| type Tool struct { | ||||
| 	Id       string   `json:"id,omitempty"` | ||||
| 	Type     string   `json:"type"` | ||||
| 	Function Function `json:"function"` | ||||
| } | ||||
|  | ||||
| type Function struct { | ||||
| 	Description string `json:"description,omitempty"` | ||||
| 	Name        string `json:"name"` | ||||
| 	Parameters  any    `json:"parameters,omitempty"` // request | ||||
| 	Arguments   any    `json:"arguments,omitempty"`  // response | ||||
| } | ||||
		Reference in New Issue
	
	Block a user