mirror of
https://github.com/linux-do/new-api.git
synced 2025-09-17 16:06:38 +08:00
136 lines
3.5 KiB
Go
136 lines
3.5 KiB
Go
package baidu
|
|
|
|
import (
|
|
"errors"
|
|
"fmt"
|
|
"github.com/gin-gonic/gin"
|
|
"io"
|
|
"net/http"
|
|
"one-api/dto"
|
|
"one-api/relay/channel"
|
|
relaycommon "one-api/relay/common"
|
|
"one-api/relay/constant"
|
|
"strings"
|
|
)
|
|
|
|
type Adaptor struct {
|
|
}
|
|
|
|
func (a *Adaptor) Init(info *relaycommon.RelayInfo, request dto.GeneralOpenAIRequest) {
|
|
|
|
}
|
|
|
|
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
|
// https://cloud.baidu.com/doc/WENXINWORKSHOP/s/clntwmv7t
|
|
suffix := "chat/"
|
|
if strings.HasPrefix(info.UpstreamModelName, "Embedding") {
|
|
suffix = "embeddings/"
|
|
}
|
|
if strings.HasPrefix(info.UpstreamModelName, "bge-large") {
|
|
suffix = "embeddings/"
|
|
}
|
|
if strings.HasPrefix(info.UpstreamModelName, "tao-8k") {
|
|
suffix = "embeddings/"
|
|
}
|
|
switch info.UpstreamModelName {
|
|
case "ERNIE-4.0":
|
|
suffix += "completions_pro"
|
|
case "ERNIE-Bot-4":
|
|
suffix += "completions_pro"
|
|
case "ERNIE-Bot":
|
|
suffix += "completions"
|
|
case "ERNIE-Bot-turbo":
|
|
suffix += "eb-instant"
|
|
case "ERNIE-Speed":
|
|
suffix += "ernie_speed"
|
|
case "ERNIE-4.0-8K":
|
|
suffix += "completions_pro"
|
|
case "ERNIE-3.5-8K":
|
|
suffix += "completions"
|
|
case "ERNIE-3.5-8K-0205":
|
|
suffix += "ernie-3.5-8k-0205"
|
|
case "ERNIE-3.5-8K-1222":
|
|
suffix += "ernie-3.5-8k-1222"
|
|
case "ERNIE-Bot-8K":
|
|
suffix += "ernie_bot_8k"
|
|
case "ERNIE-3.5-4K-0205":
|
|
suffix += "ernie-3.5-4k-0205"
|
|
case "ERNIE-Speed-8K":
|
|
suffix += "ernie_speed"
|
|
case "ERNIE-Speed-128K":
|
|
suffix += "ernie-speed-128k"
|
|
case "ERNIE-Lite-8K-0922":
|
|
suffix += "eb-instant"
|
|
case "ERNIE-Lite-8K-0308":
|
|
suffix += "ernie-lite-8k"
|
|
case "ERNIE-Tiny-8K":
|
|
suffix += "ernie-tiny-8k"
|
|
case "BLOOMZ-7B":
|
|
suffix += "bloomz_7b1"
|
|
case "Embedding-V1":
|
|
suffix += "embedding-v1"
|
|
case "bge-large-zh":
|
|
suffix += "bge_large_zh"
|
|
case "bge-large-en":
|
|
suffix += "bge_large_en"
|
|
case "tao-8k":
|
|
suffix += "tao_8k"
|
|
default:
|
|
suffix += strings.ToLower(info.UpstreamModelName)
|
|
}
|
|
fullRequestURL := fmt.Sprintf("%s/rpc/2.0/ai_custom/v1/wenxinworkshop/%s", info.BaseUrl, suffix)
|
|
var accessToken string
|
|
var err error
|
|
if accessToken, err = getBaiduAccessToken(info.ApiKey); err != nil {
|
|
return "", err
|
|
}
|
|
fullRequestURL += "?access_token=" + accessToken
|
|
return fullRequestURL, nil
|
|
}
|
|
|
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
|
|
channel.SetupApiRequestHeader(info, c, req)
|
|
req.Header.Set("Authorization", "Bearer "+info.ApiKey)
|
|
return nil
|
|
}
|
|
|
|
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *dto.GeneralOpenAIRequest) (any, error) {
|
|
if request == nil {
|
|
return nil, errors.New("request is nil")
|
|
}
|
|
switch relayMode {
|
|
case constant.RelayModeEmbeddings:
|
|
baiduEmbeddingRequest := embeddingRequestOpenAI2Baidu(*request)
|
|
return baiduEmbeddingRequest, nil
|
|
default:
|
|
baiduRequest := requestOpenAI2Baidu(*request)
|
|
return baiduRequest, nil
|
|
}
|
|
}
|
|
|
|
func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error) {
|
|
return channel.DoApiRequest(a, c, info, requestBody)
|
|
}
|
|
|
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
|
if info.IsStream {
|
|
err, usage = baiduStreamHandler(c, resp)
|
|
} else {
|
|
switch info.RelayMode {
|
|
case constant.RelayModeEmbeddings:
|
|
err, usage = baiduEmbeddingHandler(c, resp)
|
|
default:
|
|
err, usage = baiduHandler(c, resp)
|
|
}
|
|
}
|
|
return
|
|
}
|
|
|
|
func (a *Adaptor) GetModelList() []string {
|
|
return ModelList
|
|
}
|
|
|
|
func (a *Adaptor) GetChannelName() string {
|
|
return ChannelName
|
|
}
|