mirror of
https://github.com/songquanpeng/one-api.git
synced 2025-11-10 10:33:41 +08:00
Merge remote-tracking branch 'origin/upstream/main'
This commit is contained in:
@@ -3,13 +3,13 @@ package openai
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Laisky/errors/v2"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor"
|
||||
"github.com/songquanpeng/one-api/relay/adaptor/minimax"
|
||||
"github.com/songquanpeng/one-api/relay/channeltype"
|
||||
"github.com/songquanpeng/one-api/relay/meta"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||
"github.com/gin-gonic/gin"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
@@ -6,8 +6,8 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
)
|
||||
|
||||
// ImagesEditsHandler just copy response body to client
|
||||
|
||||
@@ -8,12 +8,12 @@ import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/songquanpeng/one-api/common"
|
||||
"github.com/songquanpeng/one-api/common/conv"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"github.com/songquanpeng/one-api/relay/relaymode"
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@@ -3,12 +3,12 @@ package openai
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/Laisky/errors/v2"
|
||||
"github.com/pkoukk/tiktoken-go"
|
||||
"github.com/songquanpeng/one-api/common/config"
|
||||
"github.com/songquanpeng/one-api/common/image"
|
||||
"github.com/songquanpeng/one-api/common/logger"
|
||||
billingratio "github.com/songquanpeng/one-api/relay/billing/ratio"
|
||||
"github.com/songquanpeng/one-api/relay/model"
|
||||
"github.com/pkoukk/tiktoken-go"
|
||||
"math"
|
||||
"strings"
|
||||
)
|
||||
@@ -24,6 +24,10 @@ func InitTokenEncoders() {
|
||||
logger.FatalLog(fmt.Sprintf("failed to get gpt-3.5-turbo token encoder: %s", err.Error()))
|
||||
}
|
||||
defaultTokenEncoder = gpt35TokenEncoder
|
||||
gpt4oTokenEncoder, err := tiktoken.EncodingForModel("gpt-4o")
|
||||
if err != nil {
|
||||
logger.FatalLog(fmt.Sprintf("failed to get gpt-4o token encoder: %s", err.Error()))
|
||||
}
|
||||
gpt4TokenEncoder, err := tiktoken.EncodingForModel("gpt-4")
|
||||
if err != nil {
|
||||
logger.FatalLog(fmt.Sprintf("failed to get gpt-4 token encoder: %s", err.Error()))
|
||||
@@ -31,6 +35,8 @@ func InitTokenEncoders() {
|
||||
for model := range billingratio.ModelRatio {
|
||||
if strings.HasPrefix(model, "gpt-3.5") {
|
||||
tokenEncoderMap[model] = gpt35TokenEncoder
|
||||
} else if strings.HasPrefix(model, "gpt-4o") {
|
||||
tokenEncoderMap[model] = gpt4oTokenEncoder
|
||||
} else if strings.HasPrefix(model, "gpt-4") {
|
||||
tokenEncoderMap[model] = gpt4TokenEncoder
|
||||
} else {
|
||||
|
||||
Reference in New Issue
Block a user