Merge commit '1c2654320e5b6268b13b6efca40ce37a523d032b'

This commit is contained in:
Laisky.Cai
2024-04-28 01:53:31 +00:00
162 changed files with 1351 additions and 731 deletions

View File

@@ -3,12 +3,12 @@ package openai
import (
"fmt"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/relay/adaptor"
"github.com/Laisky/one-api/relay/adaptor/minimax"
"github.com/Laisky/one-api/relay/channeltype"
"github.com/Laisky/one-api/relay/meta"
"github.com/Laisky/one-api/relay/model"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/songquanpeng/one-api/relay/adaptor"
"github.com/songquanpeng/one-api/relay/adaptor/minimax"
"github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/meta"
"github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
"io"
"net/http"
@@ -29,17 +29,17 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
if meta.Mode == relaymode.ImagesGenerations {
// https://learn.microsoft.com/en-us/azure/ai-services/openai/dall-e-quickstart?tabs=dalle3%2Ccommand-line&pivots=rest-api
// https://{resource_name}.openai.azure.com/openai/deployments/dall-e-3/images/generations?api-version=2024-03-01-preview
fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.APIVersion)
fullRequestURL := fmt.Sprintf("%s/openai/deployments/%s/images/generations?api-version=%s", meta.BaseURL, meta.ActualModelName, meta.Config.APIVersion)
return fullRequestURL, nil
}
// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
requestURL := strings.Split(meta.RequestURLPath, "?")[0]
requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.APIVersion)
requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, meta.Config.APIVersion)
task := strings.TrimPrefix(requestURL, "/v1/")
model_ := meta.ActualModelName
model_ = strings.Replace(model_, ".", "", -1)
//https://github.com/Laisky/one-api/issues/1191
//https://github.com/songquanpeng/one-api/issues/1191
// {your endpoint}/openai/deployments/{your azure_model}/chat/completions?api-version={api_version}
requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task)
return GetFullRequestURL(meta.BaseURL, requestURL, meta.ChannelType), nil
@@ -58,7 +58,7 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, meta *me
}
req.Header.Set("Authorization", "Bearer "+meta.APIKey)
if meta.ChannelType == channeltype.OpenRouter {
req.Header.Set("HTTP-Referer", "https://github.com/Laisky/one-api")
req.Header.Set("HTTP-Referer", "https://github.com/songquanpeng/one-api")
req.Header.Set("X-Title", "One API")
}
return nil

View File

@@ -1,16 +1,16 @@
package openai
import (
"github.com/Laisky/one-api/relay/adaptor/ai360"
"github.com/Laisky/one-api/relay/adaptor/baichuan"
"github.com/Laisky/one-api/relay/adaptor/deepseek"
"github.com/Laisky/one-api/relay/adaptor/groq"
"github.com/Laisky/one-api/relay/adaptor/lingyiwanwu"
"github.com/Laisky/one-api/relay/adaptor/minimax"
"github.com/Laisky/one-api/relay/adaptor/mistral"
"github.com/Laisky/one-api/relay/adaptor/moonshot"
"github.com/Laisky/one-api/relay/adaptor/stepfun"
"github.com/Laisky/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/adaptor/ai360"
"github.com/songquanpeng/one-api/relay/adaptor/baichuan"
"github.com/songquanpeng/one-api/relay/adaptor/deepseek"
"github.com/songquanpeng/one-api/relay/adaptor/groq"
"github.com/songquanpeng/one-api/relay/adaptor/lingyiwanwu"
"github.com/songquanpeng/one-api/relay/adaptor/minimax"
"github.com/songquanpeng/one-api/relay/adaptor/mistral"
"github.com/songquanpeng/one-api/relay/adaptor/moonshot"
"github.com/songquanpeng/one-api/relay/adaptor/stepfun"
"github.com/songquanpeng/one-api/relay/channeltype"
)
var CompatibleChannels = []int{

View File

@@ -2,8 +2,8 @@ package openai
import (
"fmt"
"github.com/Laisky/one-api/relay/channeltype"
"github.com/Laisky/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/channeltype"
"github.com/songquanpeng/one-api/relay/model"
"strings"
)

View File

@@ -6,7 +6,7 @@ import (
"io"
"net/http"
"github.com/Laisky/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/model"
"github.com/gin-gonic/gin"
)

View File

@@ -8,11 +8,11 @@ import (
"net/http"
"strings"
"github.com/Laisky/one-api/common"
"github.com/Laisky/one-api/common/conv"
"github.com/Laisky/one-api/common/logger"
"github.com/Laisky/one-api/relay/model"
"github.com/Laisky/one-api/relay/relaymode"
"github.com/songquanpeng/one-api/common"
"github.com/songquanpeng/one-api/common/conv"
"github.com/songquanpeng/one-api/common/logger"
"github.com/songquanpeng/one-api/relay/model"
"github.com/songquanpeng/one-api/relay/relaymode"
"github.com/gin-gonic/gin"
)

View File

@@ -1,6 +1,6 @@
package openai
import "github.com/Laisky/one-api/relay/model"
import "github.com/songquanpeng/one-api/relay/model"
type TextContent struct {
Type string `json:"type,omitempty"`
@@ -134,7 +134,7 @@ type ChatCompletionsStreamResponse struct {
Created int64 `json:"created"`
Model string `json:"model"`
Choices []ChatCompletionsStreamResponseChoice `json:"choices"`
Usage *model.Usage `json:"usage"`
Usage *model.Usage `json:"usage,omitempty"`
}
type CompletionsStreamResponse struct {

View File

@@ -3,11 +3,11 @@ package openai
import (
"fmt"
"github.com/Laisky/errors/v2"
"github.com/Laisky/one-api/common/config"
"github.com/Laisky/one-api/common/image"
"github.com/Laisky/one-api/common/logger"
billingratio "github.com/Laisky/one-api/relay/billing/ratio"
"github.com/Laisky/one-api/relay/model"
"github.com/songquanpeng/one-api/common/config"
"github.com/songquanpeng/one-api/common/image"
"github.com/songquanpeng/one-api/common/logger"
billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
"github.com/songquanpeng/one-api/relay/model"
"github.com/pkoukk/tiktoken-go"
"math"
"strings"
@@ -253,3 +253,7 @@ func CountTokenText(text string, model string) int {
tokenEncoder := getTokenEncoder(model)
return getTokenNum(tokenEncoder, text)
}
func CountToken(text string) int {
return CountTokenInput(text, "gpt-3.5-turbo")
}

View File

@@ -1,6 +1,6 @@
package openai
import "github.com/Laisky/one-api/relay/model"
import "github.com/songquanpeng/one-api/relay/model"
func ErrorWrapper(err error, code string, statusCode int) *model.ErrorWithStatusCode {
Error := model.Error{