mirror of
https://github.com/linux-do/new-api.git
synced 2025-09-20 01:16:36 +08:00
feat: support claude stream_options
This commit is contained in:
parent
b0e234e8f5
commit
e8b93ed6ec
@ -102,6 +102,7 @@ type ChatCompletionsStreamResponse struct {
|
|||||||
Model string `json:"model"`
|
Model string `json:"model"`
|
||||||
SystemFingerprint *string `json:"system_fingerprint"`
|
SystemFingerprint *string `json:"system_fingerprint"`
|
||||||
Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
|
Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
|
||||||
|
Usage *Usage `json:"usage"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatCompletionsStreamResponseSimple struct {
|
type ChatCompletionsStreamResponseSimple struct {
|
||||||
|
@ -330,22 +330,15 @@ func claudeStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.
|
|||||||
response.Created = createdTime
|
response.Created = createdTime
|
||||||
response.Model = info.UpstreamModelName
|
response.Model = info.UpstreamModelName
|
||||||
|
|
||||||
jsonStr, err := json.Marshal(response)
|
err = service.ObjectData(c, response)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.SysError("error marshalling stream response: " + err.Error())
|
common.SysError(err.Error())
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonStr)})
|
|
||||||
return true
|
return true
|
||||||
case <-stopChan:
|
case <-stopChan:
|
||||||
c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
err := resp.Body.Close()
|
|
||||||
if err != nil {
|
|
||||||
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
|
||||||
}
|
|
||||||
if requestMode == RequestModeCompletion {
|
if requestMode == RequestModeCompletion {
|
||||||
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, info.PromptTokens)
|
||||||
} else {
|
} else {
|
||||||
@ -356,6 +349,16 @@ func claudeStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.
|
|||||||
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, usage.PromptTokens)
|
usage, _ = service.ResponseText2Usage(responseText, info.UpstreamModelName, usage.PromptTokens)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
response := service.GenerateFinalUsageResponse(responseId, createdTime, info.UpstreamModelName, *usage)
|
||||||
|
err := service.ObjectData(c, response)
|
||||||
|
if err != nil {
|
||||||
|
common.SysError(err.Error())
|
||||||
|
}
|
||||||
|
service.Done(c)
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapperLocal(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
return nil, usage
|
return nil, usage
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *dto.Gen
|
|||||||
return nil, errors.New("request is nil")
|
return nil, errors.New("request is nil")
|
||||||
}
|
}
|
||||||
// 如果不支持StreamOptions,将StreamOptions设置为nil
|
// 如果不支持StreamOptions,将StreamOptions设置为nil
|
||||||
if !a.SupportStreamOptions {
|
if !a.SupportStreamOptions || !request.Stream {
|
||||||
request.StreamOptions = nil
|
request.StreamOptions = nil
|
||||||
} else {
|
} else {
|
||||||
// 如果支持StreamOptions,且请求中没有设置StreamOptions,根据配置文件设置StreamOptions
|
// 如果支持StreamOptions,且请求中没有设置StreamOptions,根据配置文件设置StreamOptions
|
||||||
|
@ -24,3 +24,15 @@ func ResponseText2Usage(responseText string, modeName string, promptTokens int)
|
|||||||
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
|
usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
|
||||||
return usage, err
|
return usage, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GenerateFinalUsageResponse(id string, createAt int64, model string, usage dto.Usage) *dto.ChatCompletionsStreamResponse {
|
||||||
|
return &dto.ChatCompletionsStreamResponse{
|
||||||
|
Id: id,
|
||||||
|
Object: "chat.completion.chunk",
|
||||||
|
Created: createAt,
|
||||||
|
Model: model,
|
||||||
|
SystemFingerprint: nil,
|
||||||
|
Choices: nil,
|
||||||
|
Usage: &usage,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user