feat: support llm chat on replicate

This commit is contained in:
Laisky.Cai
2024-12-19 03:14:32 +00:00
parent 4c86e7c506
commit 502cf3315d
9 changed files with 376 additions and 28 deletions

View File

@@ -149,14 +149,20 @@ func isErrorHappened(meta *meta.Meta, resp *http.Response) bool {
}
return true
}
if resp.StatusCode != http.StatusOK {
if resp.StatusCode != http.StatusOK &&
// replicate return 201 to create a task
resp.StatusCode != http.StatusCreated {
return true
}
if meta.ChannelType == channeltype.DeepL {
// skip stream check for deepl
return false
}
if meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json") {
if meta.IsStream && strings.HasPrefix(resp.Header.Get("Content-Type"), "application/json") &&
// Even if stream mode is enabled, replicate will first return a task info in JSON format,
// requiring the client to request the stream endpoint in the task info
meta.ChannelType != channeltype.Replicate {
return true
}
return false

View File

@@ -24,7 +24,7 @@ import (
relaymodel "github.com/songquanpeng/one-api/relay/model"
)
func getImageRequest(c *gin.Context, relayMode int) (*relaymodel.ImageRequest, error) {
func getImageRequest(c *gin.Context, _ int) (*relaymodel.ImageRequest, error) {
imageRequest := &relaymodel.ImageRequest{}
err := common.UnmarshalBodyReusable(c, imageRequest)
if err != nil {
@@ -67,7 +67,7 @@ func getImageSizeRatio(model string, size string) float64 {
return 1
}
func validateImageRequest(imageRequest *relaymodel.ImageRequest, meta *meta.Meta) *relaymodel.ErrorWithStatusCode {
func validateImageRequest(imageRequest *relaymodel.ImageRequest, _ *meta.Meta) *relaymodel.ErrorWithStatusCode {
// check prompt length
if imageRequest.Prompt == "" {
return openai.ErrorWrapper(errors.New("prompt is required"), "prompt_missing", http.StatusBadRequest)