Merge commit '2369025842b828ac38f4427fd1ebab8d03b1fe7f'

This commit is contained in:
Laisky.Cai
2024-04-20 01:07:29 +00:00
139 changed files with 2642 additions and 2625 deletions

View File

@@ -1,145 +1,146 @@
package zhipu
// import (
// "github.com/Laisky/errors/v2"
// "fmt"
// "github.com/gin-gonic/gin"
// "github.com/songquanpeng/one-api/relay/adaptor"
// "github.com/songquanpeng/one-api/relay/adaptor/openai"
// "github.com/songquanpeng/one-api/relay/meta"
// "github.com/songquanpeng/one-api/relay/model"
// "github.com/songquanpeng/one-api/relay/relaymode"
// "io"
// "math"
// "net/http"
// "strings"
// )
import (
"errors"
"fmt"
"io"
"math"
"net/http"
"strings"
// type Adaptor struct {
// APIVersion string
// }
"github.com/Laisky/one-api/relay/adaptor"
"github.com/Laisky/one-api/relay/adaptor/openai"
"github.com/Laisky/one-api/relay/meta"
"github.com/Laisky/one-api/relay/model"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
)
// func (a *Adaptor) Init(meta *meta.Meta) {
type Adaptor struct {
APIVersion string
}
// }
func (a *Adaptor) Init(meta *meta.Meta) {
// func (a *Adaptor) SetVersionByModeName(modelName string) {
// if strings.HasPrefix(modelName, "glm-") {
// a.APIVersion = "v4"
// } else {
// a.APIVersion = "v3"
// }
// }
}
// func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
// switch meta.Mode {
// case relaymode.ImagesGenerations:
// return fmt.Sprintf("%s/api/paas/v4/images/generations", meta.BaseURL), nil
// case relaymode.Embeddings:
// return fmt.Sprintf("%s/api/paas/v4/embeddings", meta.BaseURL), nil
// }
// a.SetVersionByModeName(meta.ActualModelName)
// if a.APIVersion == "v4" {
// return fmt.Sprintf("%s/api/paas/v4/chat/completions", meta.BaseURL), nil
// }
// method := "invoke"
// if meta.IsStream {
// method = "sse-invoke"
// }
// return fmt.Sprintf("%s/api/paas/v3/model-api/%s/%s", meta.BaseURL, meta.ActualModelName, method), nil
// }
func (a *Adaptor) SetVersionByModeName(modelName string) {
if strings.HasPrefix(modelName, "glm-") {
a.APIVersion = "v4"
} else {
a.APIVersion = "v3"
}
}
// func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
// adaptor.SetupCommonRequestHeader(c, req, meta)
// token := GetToken(meta.APIKey)
// req.Header.Set("Authorization", token)
// return nil
// }
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
switch meta.Mode {
case relaymode.ImagesGenerations:
return fmt.Sprintf("%s/api/paas/v4/images/generations", meta.BaseURL), nil
case relaymode.Embeddings:
return fmt.Sprintf("%s/api/paas/v4/embeddings", meta.BaseURL), nil
}
a.SetVersionByModeName(meta.ActualModelName)
if a.APIVersion == "v4" {
return fmt.Sprintf("%s/api/paas/v4/chat/completions", meta.BaseURL), nil
}
method := "invoke"
if meta.IsStream {
method = "sse-invoke"
}
return fmt.Sprintf("%s/api/paas/v3/model-api/%s/%s", meta.BaseURL, meta.ActualModelName, method), nil
}
// func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
// if request == nil {
// return nil, errors.New("request is nil")
// }
// switch relayMode {
// case relaymode.Embeddings:
// baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
// return baiduEmbeddingRequest, nil
// default:
// // TopP (0.0, 1.0)
// request.TopP = math.Min(0.99, request.TopP)
// request.TopP = math.Max(0.01, request.TopP)
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *meta.Meta) error {
adaptor.SetupCommonRequestHeader(c, req, meta)
token := GetToken(meta.APIKey)
req.Header.Set("Authorization", token)
return nil
}
// // Temperature (0.0, 1.0)
// request.Temperature = math.Min(0.99, request.Temperature)
// request.Temperature = math.Max(0.01, request.Temperature)
// a.SetVersionByModeName(request.Model)
// if a.APIVersion == "v4" {
// return request, nil
// }
// return ConvertRequest(*request), nil
// }
// }
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
switch relayMode {
case relaymode.Embeddings:
baiduEmbeddingRequest := ConvertEmbeddingRequest(*request)
return baiduEmbeddingRequest, nil
default:
// TopP (0.0, 1.0)
request.TopP = math.Min(0.99, request.TopP)
request.TopP = math.Max(0.01, request.TopP)
// func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
// if request == nil {
// return nil, errors.New("request is nil")
// }
// newRequest := ImageRequest{
// Model: request.Model,
// Prompt: request.Prompt,
// UserId: request.User,
// }
// return newRequest, nil
// }
// Temperature (0.0, 1.0)
request.Temperature = math.Min(0.99, request.Temperature)
request.Temperature = math.Max(0.01, request.Temperature)
a.SetVersionByModeName(request.Model)
if a.APIVersion == "v4" {
return request, nil
}
return ConvertRequest(*request), nil
}
}
// func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
// return adaptor.DoRequestHelper(a, c, meta, requestBody)
// }
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
newRequest := ImageRequest{
Model: request.Model,
Prompt: request.Prompt,
UserId: request.User,
}
return newRequest, nil
}
// func (a *Adaptor) DoResponseV4(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
// if meta.IsStream {
// err, _, usage = openai.StreamHandler(c, resp, meta.Mode)
// } else {
// err, usage = openai.Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
// }
// return
// }
func (a *Adaptor) DoRequest(c *gin.Context, meta *meta.Meta, requestBody io.Reader) (*http.Response, error) {
return adaptor.DoRequestHelper(a, c, meta, requestBody)
}
// func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
// switch meta.Mode {
// case relaymode.Embeddings:
// err, usage = EmbeddingsHandler(c, resp)
// return
// case relaymode.ImagesGenerations:
// err, usage = openai.ImageHandler(c, resp)
// return
// }
// if a.APIVersion == "v4" {
// return a.DoResponseV4(c, resp, meta)
// }
// if meta.IsStream {
// err, usage = StreamHandler(c, resp)
// } else {
// if meta.Mode == relaymode.Embeddings {
// err, usage = EmbeddingsHandler(c, resp)
// } else {
// err, usage = Handler(c, resp)
// }
// }
// return
// }
func (a *Adaptor) DoResponseV4(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
if meta.IsStream {
err, _, usage = openai.StreamHandler(c, resp, meta.Mode)
} else {
err, usage = openai.Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
}
return
}
// func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
// return &EmbeddingRequest{
// Model: "embedding-2",
// Input: request.Input.(string),
// }
// }
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Meta) (usage *model.Usage, err *model.ErrorWithStatusCode) {
switch meta.Mode {
case relaymode.Embeddings:
err, usage = EmbeddingsHandler(c, resp)
return
case relaymode.ImagesGenerations:
err, usage = openai.ImageHandler(c, resp)
return
}
if a.APIVersion == "v4" {
return a.DoResponseV4(c, resp, meta)
}
if meta.IsStream {
err, usage = StreamHandler(c, resp)
} else {
if meta.Mode == relaymode.Embeddings {
err, usage = EmbeddingsHandler(c, resp)
} else {
err, usage = Handler(c, resp)
}
}
return
}
// func (a *Adaptor) GetModelList() []string {
// return ModelList
// }
func ConvertEmbeddingRequest(request model.GeneralOpenAIRequest) *EmbeddingRequest {
return &EmbeddingRequest{
Model: "embedding-2",
Input: request.Input.(string),
}
}
// func (a *Adaptor) GetChannelName() string {
// return "zhipu"
// }
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return "zhipu"
}