mirror of
				https://github.com/songquanpeng/one-api.git
				synced 2025-11-04 15:53:42 +08:00 
			
		
		
		
	* optimize:unify finish_reason field * refactor: use a global stop finish reason --------- Co-authored-by: JustSong <songquanpeng@foxmail.com>
This commit is contained in:
		@@ -121,7 +121,10 @@ func responseAli2OpenAI(response *AliChatResponse) *OpenAITextResponse {
 | 
			
		||||
func streamResponseAli2OpenAI(aliResponse *AliChatResponse) *ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = aliResponse.Output.Text
 | 
			
		||||
	choice.FinishReason = aliResponse.Output.FinishReason
 | 
			
		||||
	if aliResponse.Output.FinishReason != "null" {
 | 
			
		||||
		finishReason := aliResponse.Output.FinishReason
 | 
			
		||||
		choice.FinishReason = &finishReason
 | 
			
		||||
	}
 | 
			
		||||
	response := ChatCompletionsStreamResponse{
 | 
			
		||||
		Id:      aliResponse.RequestId,
 | 
			
		||||
		Object:  "chat.completion.chunk",
 | 
			
		||||
 
 | 
			
		||||
@@ -120,7 +120,9 @@ func responseBaidu2OpenAI(response *BaiduChatResponse) *OpenAITextResponse {
 | 
			
		||||
func streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStreamResponse) *ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = baiduResponse.Result
 | 
			
		||||
	choice.FinishReason = "stop"
 | 
			
		||||
	if baiduResponse.IsEnd {
 | 
			
		||||
		choice.FinishReason = &stopFinishReason
 | 
			
		||||
	}
 | 
			
		||||
	response := ChatCompletionsStreamResponse{
 | 
			
		||||
		Id:      baiduResponse.Id,
 | 
			
		||||
		Object:  "chat.completion.chunk",
 | 
			
		||||
 
 | 
			
		||||
@@ -81,7 +81,10 @@ func requestOpenAI2Claude(textRequest GeneralOpenAIRequest) *ClaudeRequest {
 | 
			
		||||
func streamResponseClaude2OpenAI(claudeResponse *ClaudeResponse) *ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = claudeResponse.Completion
 | 
			
		||||
	choice.FinishReason = stopReasonClaude2OpenAI(claudeResponse.StopReason)
 | 
			
		||||
	finishReason := stopReasonClaude2OpenAI(claudeResponse.StopReason)
 | 
			
		||||
	if finishReason != "null" {
 | 
			
		||||
		choice.FinishReason = &finishReason
 | 
			
		||||
	}
 | 
			
		||||
	var response ChatCompletionsStreamResponse
 | 
			
		||||
	response.Object = "chat.completion.chunk"
 | 
			
		||||
	response.Model = claudeResponse.Model
 | 
			
		||||
 
 | 
			
		||||
@@ -94,7 +94,7 @@ func streamResponsePaLM2OpenAI(palmResponse *PaLMChatResponse) *ChatCompletionsS
 | 
			
		||||
	if len(palmResponse.Candidates) > 0 {
 | 
			
		||||
		choice.Delta.Content = palmResponse.Candidates[0].Content
 | 
			
		||||
	}
 | 
			
		||||
	choice.FinishReason = "stop"
 | 
			
		||||
	choice.FinishReason = &stopFinishReason
 | 
			
		||||
	var response ChatCompletionsStreamResponse
 | 
			
		||||
	response.Object = "chat.completion.chunk"
 | 
			
		||||
	response.Model = "palm2"
 | 
			
		||||
 
 | 
			
		||||
@@ -6,6 +6,8 @@ import (
 | 
			
		||||
	"one-api/common"
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
var stopFinishReason = "stop"
 | 
			
		||||
 | 
			
		||||
var tokenEncoderMap = map[string]*tiktoken.Tiktoken{}
 | 
			
		||||
 | 
			
		||||
func getTokenEncoder(model string) *tiktoken.Tiktoken {
 | 
			
		||||
 
 | 
			
		||||
@@ -138,6 +138,9 @@ func streamResponseXunfei2OpenAI(xunfeiResponse *XunfeiChatResponse) *ChatComple
 | 
			
		||||
	}
 | 
			
		||||
	var choice ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = xunfeiResponse.Payload.Choices.Text[0].Content
 | 
			
		||||
	if xunfeiResponse.Payload.Choices.Status == 2 {
 | 
			
		||||
		choice.FinishReason = &stopFinishReason
 | 
			
		||||
	}
 | 
			
		||||
	response := ChatCompletionsStreamResponse{
 | 
			
		||||
		Object:  "chat.completion.chunk",
 | 
			
		||||
		Created: common.GetTimestamp(),
 | 
			
		||||
 
 | 
			
		||||
@@ -163,7 +163,6 @@ func responseZhipu2OpenAI(response *ZhipuResponse) *OpenAITextResponse {
 | 
			
		||||
func streamResponseZhipu2OpenAI(zhipuResponse string) *ChatCompletionsStreamResponse {
 | 
			
		||||
	var choice ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = zhipuResponse
 | 
			
		||||
	choice.FinishReason = ""
 | 
			
		||||
	response := ChatCompletionsStreamResponse{
 | 
			
		||||
		Object:  "chat.completion.chunk",
 | 
			
		||||
		Created: common.GetTimestamp(),
 | 
			
		||||
@@ -176,7 +175,7 @@ func streamResponseZhipu2OpenAI(zhipuResponse string) *ChatCompletionsStreamResp
 | 
			
		||||
func streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStreamMetaResponse) (*ChatCompletionsStreamResponse, *Usage) {
 | 
			
		||||
	var choice ChatCompletionsStreamResponseChoice
 | 
			
		||||
	choice.Delta.Content = ""
 | 
			
		||||
	choice.FinishReason = "stop"
 | 
			
		||||
	choice.FinishReason = &stopFinishReason
 | 
			
		||||
	response := ChatCompletionsStreamResponse{
 | 
			
		||||
		Id:      zhipuResponse.RequestId,
 | 
			
		||||
		Object:  "chat.completion.chunk",
 | 
			
		||||
 
 | 
			
		||||
@@ -124,7 +124,7 @@ type ChatCompletionsStreamResponseChoice struct {
 | 
			
		||||
	Delta struct {
 | 
			
		||||
		Content string `json:"content"`
 | 
			
		||||
	} `json:"delta"`
 | 
			
		||||
	FinishReason string `json:"finish_reason,omitempty"`
 | 
			
		||||
	FinishReason *string `json:"finish_reason"`
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
type ChatCompletionsStreamResponse struct {
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user