mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-11 02:43:44 +08:00
feat: 支持CozeV3
This commit is contained in:
@@ -4,19 +4,18 @@ import (
|
||||
"bufio"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/songquanpeng/one-api/common/render"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/conv"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/common/render"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/coze/constant/messagetype"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// https://www.coze.com/open
|
||||
@@ -45,12 +44,12 @@ func ConvertRequest(textRequest model.GeneralOpenAIRequest) *Request {
|
||||
}
|
||||
for i, message := range textRequest.Messages {
|
||||
if i == len(textRequest.Messages)-1 {
|
||||
cozeRequest.Query = message.StringContent()
|
||||
cozeRequest.Query = message.CozeV3StringContent()
|
||||
continue
|
||||
}
|
||||
cozeMessage := Message{
|
||||
Role: message.Role,
|
||||
Content: message.StringContent(),
|
||||
Content: message.CozeV3StringContent(),
|
||||
}
|
||||
cozeRequest.ChatHistory = append(cozeRequest.ChatHistory, cozeMessage)
|
||||
}
|
||||
@@ -80,6 +79,28 @@ func StreamResponseCoze2OpenAI(cozeResponse *StreamResponse) (*openai.ChatComple
|
||||
return &openaiResponse, response
|
||||
}
|
||||
|
||||
func V3StreamResponseCoze2OpenAI(cozeResponse *V3StreamResponse) (*openai.ChatCompletionsStreamResponse, *Response) {
|
||||
var response *Response
|
||||
var choice openai.ChatCompletionsStreamResponseChoice
|
||||
|
||||
choice.Delta.Role = cozeResponse.Role
|
||||
choice.Delta.Content = cozeResponse.Content
|
||||
|
||||
var openaiResponse openai.ChatCompletionsStreamResponse
|
||||
openaiResponse.Object = "chat.completion.chunk"
|
||||
openaiResponse.Choices = []openai.ChatCompletionsStreamResponseChoice{choice}
|
||||
openaiResponse.Id = cozeResponse.ConversationId
|
||||
|
||||
if cozeResponse.Usage.TokenCount > 0 {
|
||||
openaiResponse.Usage = &model.Usage{
|
||||
PromptTokens: cozeResponse.Usage.InputCount,
|
||||
CompletionTokens: cozeResponse.Usage.OutputCount,
|
||||
TotalTokens: cozeResponse.Usage.TokenCount,
|
||||
}
|
||||
}
|
||||
return &openaiResponse, response
|
||||
}
|
||||
|
||||
func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
|
||||
var responseText string
|
||||
for _, message := range cozeResponse.Messages {
|
||||
@@ -107,6 +128,26 @@ func ResponseCoze2OpenAI(cozeResponse *Response) *openai.TextResponse {
|
||||
return &fullTextResponse
|
||||
}
|
||||
|
||||
func V3ResponseCoze2OpenAI(cozeResponse *V3Response) *openai.TextResponse {
|
||||
choice := openai.TextResponseChoice{
|
||||
Index: 0,
|
||||
Message: model.Message{
|
||||
Role: "assistant",
|
||||
Content: cozeResponse.Data.Content,
|
||||
Name: nil,
|
||||
},
|
||||
FinishReason: "stop",
|
||||
}
|
||||
fullTextResponse := openai.TextResponse{
|
||||
Id: fmt.Sprintf("chatcmpl-%s", cozeResponse.Data.ConversationId),
|
||||
Model: "coze-bot",
|
||||
Object: "chat.completion",
|
||||
Created: helper.GetTimestamp(),
|
||||
Choices: []openai.TextResponseChoice{choice},
|
||||
}
|
||||
return &fullTextResponse
|
||||
}
|
||||
|
||||
func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
|
||||
var responseText string
|
||||
createdTime := helper.GetTimestamp()
|
||||
@@ -162,6 +203,63 @@ func StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusC
|
||||
return nil, &responseText
|
||||
}
|
||||
|
||||
func V3StreamHandler(c *gin.Context, resp *http.Response) (*model.ErrorWithStatusCode, *string) {
|
||||
var responseText string
|
||||
createdTime := helper.GetTimestamp()
|
||||
scanner := bufio.NewScanner(resp.Body)
|
||||
scanner.Split(splitOnDoubleNewline)
|
||||
common.SetEventStreamHeaders(c)
|
||||
var modelName string
|
||||
for scanner.Scan() {
|
||||
part := scanner.Text()
|
||||
part = strings.TrimPrefix(part, "\n")
|
||||
parts := strings.Split(part, "\n")
|
||||
if len(parts) != 2 {
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(parts[0], "event:") && strings.HasPrefix(parts[1], "data:") {
|
||||
continue
|
||||
}
|
||||
event, data := strings.TrimSpace(parts[0][6:]), strings.TrimSpace(parts[1][5:])
|
||||
if event == "conversation.message.delta" || event == "conversation.chat.completed" {
|
||||
data = strings.TrimSuffix(data, "\r")
|
||||
var cozeResponse V3StreamResponse
|
||||
err := json.Unmarshal([]byte(data), &cozeResponse)
|
||||
if err != nil {
|
||||
logger.SysError("error unmarshalling stream response: " + err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
response, _ := V3StreamResponseCoze2OpenAI(&cozeResponse)
|
||||
if response == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, choice := range response.Choices {
|
||||
responseText += conv.AsString(choice.Delta.Content)
|
||||
}
|
||||
response.Model = modelName
|
||||
response.Created = createdTime
|
||||
|
||||
err = render.ObjectData(c, response)
|
||||
if err != nil {
|
||||
logger.SysError(err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
logger.SysError("error reading stream: " + err.Error())
|
||||
}
|
||||
|
||||
render.Done(c)
|
||||
err := resp.Body.Close()
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
|
||||
return nil, &responseText
|
||||
}
|
||||
|
||||
func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
|
||||
responseBody, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
@@ -200,3 +298,42 @@ func Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName st
|
||||
}
|
||||
return nil, &responseText
|
||||
}
|
||||
|
||||
func V3Handler(c *gin.Context, resp *http.Response, promptTokens int, modelName string) (*model.ErrorWithStatusCode, *string) {
|
||||
responseBody, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
err = resp.Body.Close()
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
var cozeResponse V3Response
|
||||
err = json.Unmarshal(responseBody, &cozeResponse)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
if cozeResponse.Code != 0 {
|
||||
return &model.ErrorWithStatusCode{
|
||||
Error: model.Error{
|
||||
Message: cozeResponse.Msg,
|
||||
Code: cozeResponse.Code,
|
||||
},
|
||||
StatusCode: resp.StatusCode,
|
||||
}, nil
|
||||
}
|
||||
fullTextResponse := V3ResponseCoze2OpenAI(&cozeResponse)
|
||||
fullTextResponse.Model = modelName
|
||||
jsonResponse, err := json.Marshal(fullTextResponse)
|
||||
if err != nil {
|
||||
return openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
|
||||
}
|
||||
c.Writer.Header().Set("Content-Type", "application/json")
|
||||
c.Writer.WriteHeader(resp.StatusCode)
|
||||
_, err = c.Writer.Write(jsonResponse)
|
||||
var responseText string
|
||||
if len(fullTextResponse.Choices) > 0 {
|
||||
responseText = fullTextResponse.Choices[0].Message.StringContent()
|
||||
}
|
||||
return nil, &responseText
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user