mirror of
https://github.com/linux-do/new-api.git
synced 2025-11-09 15:43:41 +08:00
Merge branch 'Calcium-Ion:main' into main
This commit is contained in:
@@ -42,6 +42,10 @@ var defaultModelRatio = map[string]float64{
|
|||||||
"gpt-4o": 2.5, // $0.01 / 1K tokens
|
"gpt-4o": 2.5, // $0.01 / 1K tokens
|
||||||
"gpt-4o-2024-05-13": 2.5, // $0.01 / 1K tokens
|
"gpt-4o-2024-05-13": 2.5, // $0.01 / 1K tokens
|
||||||
"gpt-4o-2024-08-06": 1.25, // $0.01 / 1K tokens
|
"gpt-4o-2024-08-06": 1.25, // $0.01 / 1K tokens
|
||||||
|
"o1-preview": 7.5,
|
||||||
|
"o1-preview-2024-09-12": 7.5,
|
||||||
|
"o1-mini": 1.5,
|
||||||
|
"o1-mini-2024-09-12": 1.5,
|
||||||
"gpt-4o-mini": 0.075,
|
"gpt-4o-mini": 0.075,
|
||||||
"gpt-4o-mini-2024-07-18": 0.075,
|
"gpt-4o-mini-2024-07-18": 0.075,
|
||||||
"gpt-4-turbo": 5, // $0.01 / 1K tokens
|
"gpt-4-turbo": 5, // $0.01 / 1K tokens
|
||||||
@@ -106,8 +110,10 @@ var defaultModelRatio = map[string]float64{
|
|||||||
"gemini-pro-vision": 1, // $0.00025 / 1k characters -> $0.001 / 1k tokens
|
"gemini-pro-vision": 1, // $0.00025 / 1k characters -> $0.001 / 1k tokens
|
||||||
"gemini-1.0-pro-vision-001": 1,
|
"gemini-1.0-pro-vision-001": 1,
|
||||||
"gemini-1.0-pro-001": 1,
|
"gemini-1.0-pro-001": 1,
|
||||||
"gemini-1.5-pro-latest": 1,
|
"gemini-1.5-pro-latest": 1.75, // $3.5 / 1M tokens
|
||||||
|
"gemini-1.5-pro-exp-0827": 1.75, // $3.5 / 1M tokens
|
||||||
"gemini-1.5-flash-latest": 1,
|
"gemini-1.5-flash-latest": 1,
|
||||||
|
"gemini-1.5-flash-exp-0827": 1,
|
||||||
"gemini-1.0-pro-latest": 1,
|
"gemini-1.0-pro-latest": 1,
|
||||||
"gemini-1.0-pro-vision-latest": 1,
|
"gemini-1.0-pro-vision-latest": 1,
|
||||||
"gemini-ultra": 1,
|
"gemini-ultra": 1,
|
||||||
@@ -329,17 +335,6 @@ func GetCompletionRatio(name string) float64 {
|
|||||||
if strings.HasPrefix(name, "gpt-4o-gizmo") {
|
if strings.HasPrefix(name, "gpt-4o-gizmo") {
|
||||||
name = "gpt-4o-gizmo-*"
|
name = "gpt-4o-gizmo-*"
|
||||||
}
|
}
|
||||||
if strings.HasPrefix(name, "gpt-3.5") {
|
|
||||||
if name == "gpt-3.5-turbo" || strings.HasSuffix(name, "0125") {
|
|
||||||
// https://openai.com/blog/new-embedding-models-and-api-updates
|
|
||||||
// Updated GPT-3.5 Turbo model and lower pricing
|
|
||||||
return 3
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(name, "1106") {
|
|
||||||
return 2
|
|
||||||
}
|
|
||||||
return 4.0 / 3.0
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(name, "gpt-4") && !strings.HasSuffix(name, "-all") && !strings.HasSuffix(name, "-gizmo-*") {
|
if strings.HasPrefix(name, "gpt-4") && !strings.HasSuffix(name, "-all") && !strings.HasSuffix(name, "-gizmo-*") {
|
||||||
if strings.HasPrefix(name, "gpt-4-turbo") || strings.HasSuffix(name, "preview") {
|
if strings.HasPrefix(name, "gpt-4-turbo") || strings.HasSuffix(name, "preview") {
|
||||||
return 3
|
return 3
|
||||||
@@ -352,6 +347,9 @@ func GetCompletionRatio(name string) float64 {
|
|||||||
}
|
}
|
||||||
return 2
|
return 2
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(name, "o1-") {
|
||||||
|
return 4
|
||||||
|
}
|
||||||
if name == "chatgpt-4o-latest" {
|
if name == "chatgpt-4o-latest" {
|
||||||
return 3
|
return 3
|
||||||
}
|
}
|
||||||
@@ -362,6 +360,17 @@ func GetCompletionRatio(name string) float64 {
|
|||||||
} else if strings.Contains(name, "claude-3") {
|
} else if strings.Contains(name, "claude-3") {
|
||||||
return 5
|
return 5
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(name, "gpt-3.5") {
|
||||||
|
if name == "gpt-3.5-turbo" || strings.HasSuffix(name, "0125") {
|
||||||
|
// https://openai.com/blog/new-embedding-models-and-api-updates
|
||||||
|
// Updated GPT-3.5 Turbo model and lower pricing
|
||||||
|
return 3
|
||||||
|
}
|
||||||
|
if strings.HasSuffix(name, "1106") {
|
||||||
|
return 2
|
||||||
|
}
|
||||||
|
return 4.0 / 3.0
|
||||||
|
}
|
||||||
if strings.HasPrefix(name, "mistral-") {
|
if strings.HasPrefix(name, "mistral-") {
|
||||||
return 3
|
return 3
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,9 @@ var GeminiModelMap = map[string]string{
|
|||||||
"gemini-1.5-pro-001": "v1beta",
|
"gemini-1.5-pro-001": "v1beta",
|
||||||
"gemini-1.5-pro": "v1beta",
|
"gemini-1.5-pro": "v1beta",
|
||||||
"gemini-1.5-pro-exp-0801": "v1beta",
|
"gemini-1.5-pro-exp-0801": "v1beta",
|
||||||
|
"gemini-1.5-pro-exp-0827": "v1beta",
|
||||||
"gemini-1.5-flash-latest": "v1beta",
|
"gemini-1.5-flash-latest": "v1beta",
|
||||||
|
"gemini-1.5-flash-exp-0827": "v1beta",
|
||||||
"gemini-1.5-flash-001": "v1beta",
|
"gemini-1.5-flash-001": "v1beta",
|
||||||
"gemini-1.5-flash": "v1beta",
|
"gemini-1.5-flash": "v1beta",
|
||||||
"gemini-ultra": "v1beta",
|
"gemini-ultra": "v1beta",
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ import (
|
|||||||
"one-api/relay/constant"
|
"one-api/relay/constant"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -81,8 +82,7 @@ func testChannel(channel *model.Channel, testModel string) (err error, openAIErr
|
|||||||
return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
|
return fmt.Errorf("invalid api type: %d, adaptor is nil", apiType), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
request := buildTestRequest()
|
request := buildTestRequest(testModel)
|
||||||
request.Model = testModel
|
|
||||||
meta.UpstreamModelName = testModel
|
meta.UpstreamModelName = testModel
|
||||||
common.SysLog(fmt.Sprintf("testing channel %d with model %s", channel.Id, testModel))
|
common.SysLog(fmt.Sprintf("testing channel %d with model %s", channel.Id, testModel))
|
||||||
|
|
||||||
@@ -141,17 +141,22 @@ func testChannel(channel *model.Channel, testModel string) (err error, openAIErr
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildTestRequest() *dto.GeneralOpenAIRequest {
|
func buildTestRequest(model string) *dto.GeneralOpenAIRequest {
|
||||||
testRequest := &dto.GeneralOpenAIRequest{
|
testRequest := &dto.GeneralOpenAIRequest{
|
||||||
Model: "", // this will be set later
|
Model: "", // this will be set later
|
||||||
MaxTokens: 1,
|
|
||||||
Stream: false,
|
Stream: false,
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(model, "o1-") {
|
||||||
|
testRequest.MaxCompletionTokens = 1
|
||||||
|
} else {
|
||||||
|
testRequest.MaxTokens = 1
|
||||||
|
}
|
||||||
content, _ := json.Marshal("hi")
|
content, _ := json.Marshal("hi")
|
||||||
testMessage := dto.Message{
|
testMessage := dto.Message{
|
||||||
Role: "user",
|
Role: "user",
|
||||||
Content: content,
|
Content: content,
|
||||||
}
|
}
|
||||||
|
testRequest.Model = model
|
||||||
testRequest.Messages = append(testRequest.Messages, testMessage)
|
testRequest.Messages = append(testRequest.Messages, testMessage)
|
||||||
return testRequest
|
return testRequest
|
||||||
}
|
}
|
||||||
@@ -226,26 +231,22 @@ func testAllChannels(notify bool) error {
|
|||||||
tok := time.Now()
|
tok := time.Now()
|
||||||
milliseconds := tok.Sub(tik).Milliseconds()
|
milliseconds := tok.Sub(tik).Milliseconds()
|
||||||
|
|
||||||
ban := false
|
shouldBanChannel := false
|
||||||
if milliseconds > disableThreshold {
|
|
||||||
err = errors.New(fmt.Sprintf("响应时间 %.2fs 超过阈值 %.2fs", float64(milliseconds)/1000.0, float64(disableThreshold)/1000.0))
|
|
||||||
ban = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// request error disables the channel
|
// request error disables the channel
|
||||||
if openaiWithStatusErr != nil {
|
if openaiWithStatusErr != nil {
|
||||||
oaiErr := openaiWithStatusErr.Error
|
oaiErr := openaiWithStatusErr.Error
|
||||||
err = errors.New(fmt.Sprintf("type %s, httpCode %d, code %v, message %s", oaiErr.Type, openaiWithStatusErr.StatusCode, oaiErr.Code, oaiErr.Message))
|
err = errors.New(fmt.Sprintf("type %s, httpCode %d, code %v, message %s", oaiErr.Type, openaiWithStatusErr.StatusCode, oaiErr.Code, oaiErr.Message))
|
||||||
ban = service.ShouldDisableChannel(channel.Type, openaiWithStatusErr)
|
shouldBanChannel = service.ShouldDisableChannel(channel.Type, openaiWithStatusErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse *int to bool
|
if milliseconds > disableThreshold {
|
||||||
if !channel.GetAutoBan() {
|
err = errors.New(fmt.Sprintf("响应时间 %.2fs 超过阈值 %.2fs", float64(milliseconds)/1000.0, float64(disableThreshold)/1000.0))
|
||||||
ban = false
|
shouldBanChannel = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// disable channel
|
// disable channel
|
||||||
if ban && isChannelEnabled {
|
if isChannelEnabled && shouldBanChannel && channel.GetAutoBan() {
|
||||||
service.DisableChannel(channel.Id, channel.Name, err.Error())
|
service.DisableChannel(channel.Id, channel.Name, err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ type GeneralOpenAIRequest struct {
|
|||||||
Stream bool `json:"stream,omitempty"`
|
Stream bool `json:"stream,omitempty"`
|
||||||
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
|
StreamOptions *StreamOptions `json:"stream_options,omitempty"`
|
||||||
MaxTokens uint `json:"max_tokens,omitempty"`
|
MaxTokens uint `json:"max_tokens,omitempty"`
|
||||||
|
MaxCompletionTokens uint `json:"max_completion_tokens,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
TopK int `json:"top_k,omitempty"`
|
TopK int `json:"top_k,omitempty"`
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ type OpenAITextResponseChoice struct {
|
|||||||
|
|
||||||
type OpenAITextResponse struct {
|
type OpenAITextResponse struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
|
Model string `json:"model"`
|
||||||
Object string `json:"object"`
|
Object string `json:"object"`
|
||||||
Created int64 `json:"created"`
|
Created int64 `json:"created"`
|
||||||
Choices []OpenAITextResponseChoice `json:"choices"`
|
Choices []OpenAITextResponseChoice `json:"choices"`
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import (
|
|||||||
"bufio"
|
"bufio"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"one-api/common"
|
"one-api/common"
|
||||||
@@ -12,6 +11,8 @@ import (
|
|||||||
relaycommon "one-api/relay/common"
|
relaycommon "one-api/relay/common"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
)
|
)
|
||||||
|
|
||||||
func stopReasonClaude2OpenAI(reason string) string {
|
func stopReasonClaude2OpenAI(reason string) string {
|
||||||
@@ -108,13 +109,10 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
formatMessages := make([]dto.Message, 0)
|
formatMessages := make([]dto.Message, 0)
|
||||||
var lastMessage *dto.Message
|
lastMessage := dto.Message{
|
||||||
|
Role: "tool",
|
||||||
|
}
|
||||||
for i, message := range textRequest.Messages {
|
for i, message := range textRequest.Messages {
|
||||||
//if message.Role == "system" {
|
|
||||||
// if i != 0 {
|
|
||||||
// message.Role = "user"
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
if message.Role == "" {
|
if message.Role == "" {
|
||||||
textRequest.Messages[i].Role = "user"
|
textRequest.Messages[i].Role = "user"
|
||||||
}
|
}
|
||||||
@@ -122,7 +120,13 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
|
|||||||
Role: message.Role,
|
Role: message.Role,
|
||||||
Content: message.Content,
|
Content: message.Content,
|
||||||
}
|
}
|
||||||
if lastMessage != nil && lastMessage.Role == message.Role {
|
if message.Role == "tool" {
|
||||||
|
fmtMessage.ToolCallId = message.ToolCallId
|
||||||
|
}
|
||||||
|
if message.Role == "assistant" && message.ToolCalls != nil {
|
||||||
|
fmtMessage.ToolCalls = message.ToolCalls
|
||||||
|
}
|
||||||
|
if lastMessage.Role == message.Role && lastMessage.Role != "tool" {
|
||||||
if lastMessage.IsStringContent() && message.IsStringContent() {
|
if lastMessage.IsStringContent() && message.IsStringContent() {
|
||||||
content, _ := json.Marshal(strings.Trim(fmt.Sprintf("%s %s", lastMessage.StringContent(), message.StringContent()), "\""))
|
content, _ := json.Marshal(strings.Trim(fmt.Sprintf("%s %s", lastMessage.StringContent(), message.StringContent()), "\""))
|
||||||
fmtMessage.Content = content
|
fmtMessage.Content = content
|
||||||
@@ -135,7 +139,7 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
|
|||||||
fmtMessage.Content = content
|
fmtMessage.Content = content
|
||||||
}
|
}
|
||||||
formatMessages = append(formatMessages, fmtMessage)
|
formatMessages = append(formatMessages, fmtMessage)
|
||||||
lastMessage = &textRequest.Messages[i]
|
lastMessage = fmtMessage
|
||||||
}
|
}
|
||||||
|
|
||||||
claudeMessages := make([]ClaudeMessage, 0)
|
claudeMessages := make([]ClaudeMessage, 0)
|
||||||
@@ -174,7 +178,35 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
|
|||||||
claudeMessage := ClaudeMessage{
|
claudeMessage := ClaudeMessage{
|
||||||
Role: message.Role,
|
Role: message.Role,
|
||||||
}
|
}
|
||||||
if message.IsStringContent() {
|
if message.Role == "tool" {
|
||||||
|
if len(claudeMessages) > 0 && claudeMessages[len(claudeMessages)-1].Role == "user" {
|
||||||
|
lastMessage := claudeMessages[len(claudeMessages)-1]
|
||||||
|
if content, ok := lastMessage.Content.(string); ok {
|
||||||
|
lastMessage.Content = []ClaudeMediaMessage{
|
||||||
|
{
|
||||||
|
Type: "text",
|
||||||
|
Text: content,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lastMessage.Content = append(lastMessage.Content.([]ClaudeMediaMessage), ClaudeMediaMessage{
|
||||||
|
Type: "tool_result",
|
||||||
|
ToolUseId: message.ToolCallId,
|
||||||
|
Content: message.StringContent(),
|
||||||
|
})
|
||||||
|
claudeMessages[len(claudeMessages)-1] = lastMessage
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
claudeMessage.Role = "user"
|
||||||
|
claudeMessage.Content = []ClaudeMediaMessage{
|
||||||
|
{
|
||||||
|
Type: "tool_result",
|
||||||
|
ToolUseId: message.ToolCallId,
|
||||||
|
Content: message.StringContent(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if message.IsStringContent() && message.ToolCalls == nil {
|
||||||
claudeMessage.Content = message.StringContent()
|
claudeMessage.Content = message.StringContent()
|
||||||
} else {
|
} else {
|
||||||
claudeMediaMessages := make([]ClaudeMediaMessage, 0)
|
claudeMediaMessages := make([]ClaudeMediaMessage, 0)
|
||||||
@@ -207,6 +239,28 @@ func RequestOpenAI2ClaudeMessage(textRequest dto.GeneralOpenAIRequest) (*ClaudeR
|
|||||||
}
|
}
|
||||||
claudeMediaMessages = append(claudeMediaMessages, claudeMediaMessage)
|
claudeMediaMessages = append(claudeMediaMessages, claudeMediaMessage)
|
||||||
}
|
}
|
||||||
|
if message.ToolCalls != nil {
|
||||||
|
for _, tc := range message.ToolCalls.([]interface{}) {
|
||||||
|
toolCallJSON, _ := json.Marshal(tc)
|
||||||
|
var toolCall dto.ToolCall
|
||||||
|
err := json.Unmarshal(toolCallJSON, &toolCall)
|
||||||
|
if err != nil {
|
||||||
|
common.SysError("tool call is not a dto.ToolCall: " + fmt.Sprintf("%v", tc))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
inputObj := make(map[string]any)
|
||||||
|
if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &inputObj); err != nil {
|
||||||
|
common.SysError("tool call function arguments is not a map[string]any: " + fmt.Sprintf("%v", toolCall.Function.Arguments))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
claudeMediaMessages = append(claudeMediaMessages, ClaudeMediaMessage{
|
||||||
|
Type: "tool_use",
|
||||||
|
Id: toolCall.ID,
|
||||||
|
Name: toolCall.Function.Name,
|
||||||
|
Input: inputObj,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
claudeMessage.Content = claudeMediaMessages
|
claudeMessage.Content = claudeMediaMessages
|
||||||
}
|
}
|
||||||
claudeMessages = append(claudeMessages, claudeMessage)
|
claudeMessages = append(claudeMessages, claudeMessage)
|
||||||
@@ -341,6 +395,7 @@ func ResponseClaude2OpenAI(reqMode int, claudeResponse *ClaudeResponse) *dto.Ope
|
|||||||
if len(tools) > 0 {
|
if len(tools) > 0 {
|
||||||
choice.Message.ToolCalls = tools
|
choice.Message.ToolCalls = tools
|
||||||
}
|
}
|
||||||
|
fullTextResponse.Model = claudeResponse.Model
|
||||||
choices = append(choices, choice)
|
choices = append(choices, choice)
|
||||||
fullTextResponse.Choices = choices
|
fullTextResponse.Choices = choices
|
||||||
return &fullTextResponse
|
return &fullTextResponse
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ type CohereRequest struct {
|
|||||||
Message string `json:"message"`
|
Message string `json:"message"`
|
||||||
Stream bool `json:"stream"`
|
Stream bool `json:"stream"`
|
||||||
MaxTokens int `json:"max_tokens"`
|
MaxTokens int `json:"max_tokens"`
|
||||||
SafetyMode string `json:"safety_mode"`
|
SafetyMode string `json:"safety_mode,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatHistory struct {
|
type ChatHistory struct {
|
||||||
|
|||||||
@@ -22,7 +22,9 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest {
|
|||||||
Message: "",
|
Message: "",
|
||||||
Stream: textRequest.Stream,
|
Stream: textRequest.Stream,
|
||||||
MaxTokens: textRequest.GetMaxTokens(),
|
MaxTokens: textRequest.GetMaxTokens(),
|
||||||
SafetyMode: common.CohereSafetySetting,
|
}
|
||||||
|
if common.CohereSafetySetting != "NONE" {
|
||||||
|
cohereReq.SafetyMode = common.CohereSafetySetting
|
||||||
}
|
}
|
||||||
if cohereReq.MaxTokens == 0 {
|
if cohereReq.MaxTokens == 0 {
|
||||||
cohereReq.MaxTokens = 4000
|
cohereReq.MaxTokens = 4000
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ const (
|
|||||||
|
|
||||||
var ModelList = []string{
|
var ModelList = []string{
|
||||||
"gemini-1.0-pro-latest", "gemini-1.0-pro-001", "gemini-1.5-pro-latest", "gemini-1.5-flash-latest", "gemini-ultra",
|
"gemini-1.0-pro-latest", "gemini-1.0-pro-001", "gemini-1.5-pro-latest", "gemini-1.5-flash-latest", "gemini-ultra",
|
||||||
"gemini-1.0-pro-vision-latest", "gemini-1.0-pro-vision-001",
|
"gemini-1.0-pro-vision-latest", "gemini-1.0-pro-vision-001", "gemini-1.5-pro-exp-0827", "gemini-1.5-flash-exp-0827",
|
||||||
}
|
}
|
||||||
|
|
||||||
var ChannelName = "google gemini"
|
var ChannelName = "google gemini"
|
||||||
|
|||||||
@@ -17,11 +17,25 @@ type OllamaRequest struct {
|
|||||||
PresencePenalty float64 `json:"presence_penalty,omitempty"`
|
PresencePenalty float64 `json:"presence_penalty,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Options struct {
|
||||||
|
Seed int `json:"seed,omitempty"`
|
||||||
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
|
TopK int `json:"top_k,omitempty"`
|
||||||
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
|
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
|
||||||
|
PresencePenalty float64 `json:"presence_penalty,omitempty"`
|
||||||
|
NumPredict int `json:"num_predict,omitempty"`
|
||||||
|
NumCtx int `json:"num_ctx,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
type OllamaEmbeddingRequest struct {
|
type OllamaEmbeddingRequest struct {
|
||||||
Model string `json:"model,omitempty"`
|
Model string `json:"model,omitempty"`
|
||||||
Prompt any `json:"prompt,omitempty"`
|
Input []string `json:"input"`
|
||||||
|
Options *Options `json:"options,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type OllamaEmbeddingResponse struct {
|
type OllamaEmbeddingResponse struct {
|
||||||
|
Error string `json:"error,omitempty"`
|
||||||
|
Model string `json:"model"`
|
||||||
Embedding []float64 `json:"embedding,omitempty"`
|
Embedding []float64 `json:"embedding,omitempty"`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"one-api/dto"
|
"one-api/dto"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest {
|
func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest {
|
||||||
@@ -46,7 +45,14 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest {
|
|||||||
func requestOpenAI2Embeddings(request dto.GeneralOpenAIRequest) *OllamaEmbeddingRequest {
|
func requestOpenAI2Embeddings(request dto.GeneralOpenAIRequest) *OllamaEmbeddingRequest {
|
||||||
return &OllamaEmbeddingRequest{
|
return &OllamaEmbeddingRequest{
|
||||||
Model: request.Model,
|
Model: request.Model,
|
||||||
Prompt: strings.Join(request.ParseInput(), " "),
|
Input: request.ParseInput(),
|
||||||
|
Options: &Options{
|
||||||
|
Seed: int(request.Seed),
|
||||||
|
Temperature: request.Temperature,
|
||||||
|
TopP: request.TopP,
|
||||||
|
FrequencyPenalty: request.FrequencyPenalty,
|
||||||
|
PresencePenalty: request.PresencePenalty,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,6 +70,9 @@ func ollamaEmbeddingHandler(c *gin.Context, resp *http.Response, promptTokens in
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
}
|
}
|
||||||
|
if ollamaEmbeddingResponse.Error != "" {
|
||||||
|
return service.OpenAIErrorWrapper(err, "ollama_error", resp.StatusCode), nil
|
||||||
|
}
|
||||||
data := make([]dto.OpenAIEmbeddingResponseItem, 0, 1)
|
data := make([]dto.OpenAIEmbeddingResponseItem, 0, 1)
|
||||||
data = append(data, dto.OpenAIEmbeddingResponseItem{
|
data = append(data, dto.OpenAIEmbeddingResponseItem{
|
||||||
Embedding: ollamaEmbeddingResponse.Embedding,
|
Embedding: ollamaEmbeddingResponse.Embedding,
|
||||||
|
|||||||
@@ -78,6 +78,12 @@ func (a *Adaptor) ConvertRequest(c *gin.Context, info *relaycommon.RelayInfo, re
|
|||||||
if info.ChannelType != common.ChannelTypeOpenAI {
|
if info.ChannelType != common.ChannelTypeOpenAI {
|
||||||
request.StreamOptions = nil
|
request.StreamOptions = nil
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(request.Model, "o1-") {
|
||||||
|
if request.MaxCompletionTokens == 0 && request.MaxTokens != 0 {
|
||||||
|
request.MaxCompletionTokens = request.MaxTokens
|
||||||
|
request.MaxTokens = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
return request, nil
|
return request, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,8 @@ var ModelList = []string{
|
|||||||
"chatgpt-4o-latest",
|
"chatgpt-4o-latest",
|
||||||
"gpt-4o", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06",
|
"gpt-4o", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06",
|
||||||
"gpt-4o-mini", "gpt-4o-mini-2024-07-18",
|
"gpt-4o-mini", "gpt-4o-mini-2024-07-18",
|
||||||
|
"o1-preview", "o1-preview-2024-09-12",
|
||||||
|
"o1-mini", "o1-mini-2024-09-12",
|
||||||
"text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large",
|
"text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large",
|
||||||
"text-curie-001", "text-babbage-001", "text-ada-001",
|
"text-curie-001", "text-babbage-001", "text-ada-001",
|
||||||
"text-moderation-latest", "text-moderation-stable",
|
"text-moderation-latest", "text-moderation-stable",
|
||||||
|
|||||||
@@ -59,12 +59,10 @@ const Footer = () => {
|
|||||||
<Layout>
|
<Layout>
|
||||||
<Layout.Content style={{ textAlign: 'center' }}>
|
<Layout.Content style={{ textAlign: 'center' }}>
|
||||||
{footer ? (
|
{footer ? (
|
||||||
<Tooltip content={defaultFooter}>
|
|
||||||
<div
|
<div
|
||||||
className='custom-footer'
|
className='custom-footer'
|
||||||
dangerouslySetInnerHTML={{ __html: footer }}
|
dangerouslySetInnerHTML={{ __html: footer }}
|
||||||
></div>
|
></div>
|
||||||
</Tooltip>
|
|
||||||
) : (
|
) : (
|
||||||
defaultFooter
|
defaultFooter
|
||||||
)}
|
)}
|
||||||
|
|||||||
Reference in New Issue
Block a user