mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-10 18:43:41 +08:00
Merge remote-tracking branch 'origin/upstream/main'
This commit is contained in:
@@ -6,13 +6,14 @@ import (
|
||||
"net/http"
|
||||
|
||||
"github.com/Laisky/errors/v2"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/helper"
|
||||
channelhelper "github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/openai"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||
)
|
||||
|
||||
type Adaptor struct {
|
||||
@@ -24,7 +25,14 @@ func (a *Adaptor) Init(meta *meta.Meta) {
|
||||
|
||||
func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
|
||||
version := helper.AssignOrDefault(meta.Config.APIVersion, config.GeminiVersion)
|
||||
action := "generateContent"
|
||||
action := ""
|
||||
switch meta.Mode {
|
||||
case relaymode.Embeddings:
|
||||
action = "batchEmbedContents"
|
||||
default:
|
||||
action = "generateContent"
|
||||
}
|
||||
|
||||
if meta.IsStream {
|
||||
action = "streamGenerateContent?alt=sse"
|
||||
}
|
||||
@@ -42,7 +50,14 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *model.G
|
||||
if request == nil {
|
||||
return nil, errors.New("request is nil")
|
||||
}
|
||||
return ConvertRequest(*request), nil
|
||||
switch relayMode {
|
||||
case relaymode.Embeddings:
|
||||
geminiEmbeddingRequest := ConvertEmbeddingRequest(*request)
|
||||
return geminiEmbeddingRequest, nil
|
||||
default:
|
||||
geminiRequest := ConvertRequest(*request)
|
||||
return geminiRequest, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Adaptor) ConvertImageRequest(request *model.ImageRequest) (any, error) {
|
||||
@@ -62,7 +77,12 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, meta *meta.Met
|
||||
err, responseText = StreamHandler(c, resp)
|
||||
usage = openai.ResponseText2Usage(responseText, meta.ActualModelName, meta.PromptTokens)
|
||||
} else {
|
||||
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||
switch meta.Mode {
|
||||
case relaymode.Embeddings:
|
||||
err, usage = EmbeddingHandler(c, resp)
|
||||
default:
|
||||
err, usage = Handler(c, resp, meta.PromptTokens, meta.ActualModelName)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user