mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-14 20:23:46 +08:00
✨ add: add images api
This commit is contained in:
@@ -38,6 +38,9 @@ func CreateOpenAIProvider(c *gin.Context, baseURL string) *OpenAIProvider {
|
||||
AudioSpeech: "/v1/audio/speech",
|
||||
AudioTranscriptions: "/v1/audio/transcriptions",
|
||||
AudioTranslations: "/v1/audio/translations",
|
||||
ImagesGenerations: "/v1/images/generations",
|
||||
ImagesEdit: "/v1/images/edit",
|
||||
ImagesVariations: "/v1/images/variations",
|
||||
Context: c,
|
||||
},
|
||||
IsAzure: false,
|
||||
@@ -50,7 +53,13 @@ func (p *OpenAIProvider) GetFullRequestURL(requestURL string, modelName string)
|
||||
|
||||
if p.IsAzure {
|
||||
apiVersion := p.Context.GetString("api_version")
|
||||
requestURL = fmt.Sprintf("/openai/deployments/%s%s?api-version=%s", modelName, requestURL, apiVersion)
|
||||
if modelName == "dall-e-2" {
|
||||
// 因为dall-e-3需要api-version=2023-12-01-preview,但是该版本
|
||||
// 已经没有dall-e-2了,所以暂时写死
|
||||
requestURL = fmt.Sprintf("/openai/%s:submit?api-version=2023-09-01-preview", requestURL)
|
||||
} else {
|
||||
requestURL = fmt.Sprintf("/openai/deployments/%s%s?api-version=%s", modelName, requestURL, apiVersion)
|
||||
}
|
||||
}
|
||||
|
||||
if strings.HasPrefix(baseURL, "https://gateway.ai.cloudflare.com") {
|
||||
@@ -78,7 +87,7 @@ func (p *OpenAIProvider) GetRequestHeaders() (headers map[string]string) {
|
||||
}
|
||||
|
||||
// 获取请求体
|
||||
func (p *OpenAIProvider) getRequestBody(request any, isModelMapped bool) (requestBody io.Reader, err error) {
|
||||
func (p *OpenAIProvider) GetRequestBody(request any, isModelMapped bool) (requestBody io.Reader, err error) {
|
||||
if isModelMapped {
|
||||
jsonStr, err := json.Marshal(request)
|
||||
if err != nil {
|
||||
|
||||
@@ -26,7 +26,7 @@ func (c *OpenAIProviderChatStreamResponse) responseStreamHandler() (responseText
|
||||
}
|
||||
|
||||
func (p *OpenAIProvider) ChatAction(request *types.ChatCompletionRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
requestBody, err := p.getRequestBody(&request, isModelMapped)
|
||||
requestBody, err := p.GetRequestBody(&request, isModelMapped)
|
||||
if err != nil {
|
||||
return nil, types.ErrorWrapper(err, "json_marshal_failed", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ func (c *OpenAIProviderCompletionResponse) responseStreamHandler() (responseText
|
||||
}
|
||||
|
||||
func (p *OpenAIProvider) CompleteAction(request *types.CompletionRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
requestBody, err := p.getRequestBody(&request, isModelMapped)
|
||||
requestBody, err := p.GetRequestBody(&request, isModelMapped)
|
||||
if err != nil {
|
||||
return nil, types.ErrorWrapper(err, "json_marshal_failed", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
@@ -19,7 +19,7 @@ func (c *OpenAIProviderEmbeddingsResponse) ResponseHandler(resp *http.Response)
|
||||
|
||||
func (p *OpenAIProvider) EmbeddingsAction(request *types.EmbeddingRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
|
||||
requestBody, err := p.getRequestBody(&request, isModelMapped)
|
||||
requestBody, err := p.GetRequestBody(&request, isModelMapped)
|
||||
if err != nil {
|
||||
return nil, types.ErrorWrapper(err, "json_marshal_failed", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
49
providers/openai/image_generations.go
Normal file
49
providers/openai/image_generations.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package openai
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"one-api/common"
|
||||
"one-api/types"
|
||||
)
|
||||
|
||||
func (c *OpenAIProviderImageResponseResponse) ResponseHandler(resp *http.Response) (OpenAIResponse any, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
if c.Error.Type != "" {
|
||||
errWithCode = &types.OpenAIErrorWithStatusCode{
|
||||
OpenAIError: c.Error,
|
||||
StatusCode: resp.StatusCode,
|
||||
}
|
||||
return
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (p *OpenAIProvider) ImageGenerationsAction(request *types.ImageRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
|
||||
requestBody, err := p.GetRequestBody(&request, isModelMapped)
|
||||
if err != nil {
|
||||
return nil, types.ErrorWrapper(err, "json_marshal_failed", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
fullRequestURL := p.GetFullRequestURL(p.ImagesGenerations, request.Model)
|
||||
headers := p.GetRequestHeaders()
|
||||
|
||||
client := common.NewClient()
|
||||
req, err := client.NewRequest(p.Context.Request.Method, fullRequestURL, common.WithBody(requestBody), common.WithHeader(headers))
|
||||
if err != nil {
|
||||
return nil, types.ErrorWrapper(err, "new_request_failed", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
openAIProviderImageResponseResponse := &OpenAIProviderImageResponseResponse{}
|
||||
errWithCode = p.SendRequest(req, openAIProviderImageResponseResponse, true)
|
||||
if errWithCode != nil {
|
||||
return
|
||||
}
|
||||
|
||||
usage = &types.Usage{
|
||||
PromptTokens: promptTokens,
|
||||
CompletionTokens: 0,
|
||||
TotalTokens: promptTokens,
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
@@ -19,7 +19,7 @@ func (c *OpenAIProviderModerationResponse) ResponseHandler(resp *http.Response)
|
||||
|
||||
func (p *OpenAIProvider) ModerationAction(request *types.ModerationRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
|
||||
requestBody, err := p.getRequestBody(&request, isModelMapped)
|
||||
requestBody, err := p.GetRequestBody(&request, isModelMapped)
|
||||
if err != nil {
|
||||
return nil, types.ErrorWrapper(err, "json_marshal_failed", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
|
||||
func (p *OpenAIProvider) SpeechAction(request *types.SpeechAudioRequest, isModelMapped bool, promptTokens int) (usage *types.Usage, errWithCode *types.OpenAIErrorWithStatusCode) {
|
||||
|
||||
requestBody, err := p.getRequestBody(&request, isModelMapped)
|
||||
requestBody, err := p.GetRequestBody(&request, isModelMapped)
|
||||
if err != nil {
|
||||
return nil, types.ErrorWrapper(err, "json_marshal_failed", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
@@ -37,3 +37,8 @@ type OpenAIProviderTranscriptionsTextResponse string
|
||||
func (a *OpenAIProviderTranscriptionsTextResponse) GetString() *string {
|
||||
return (*string)(a)
|
||||
}
|
||||
|
||||
type OpenAIProviderImageResponseResponse struct {
|
||||
types.ImageResponse
|
||||
types.OpenAIErrorResponse
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user