Compare commits

...

7 Commits

Author SHA1 Message Date
CalciumIon
4b48e490fa feat: 添加Mistral渠道 (close #546) 2024-11-05 17:11:33 +08:00
CalciumIon
3e2ae29ba0 fix: 修复聊天环境变量替换不完全 (close #542) 2024-11-05 16:02:10 +08:00
CalciumIon
fe0ed128c6 chore: update model ratio 2024-11-05 15:58:22 +08:00
Calcium-Ion
3785e9d754 Merge pull request #549 from HynoR/main
chore: 更新最新haiku模型倍率
2024-11-05 14:58:55 +08:00
HynoR
902a66b60f Sync Latest Claude Model 2024-11-05 10:17:11 +08:00
Calcium-Ion
aaf3f09eec Merge pull request #548 from utopeadia/main
ollama /api/embeddings is deprecated, use /api/embed.
2024-11-04 22:21:52 +08:00
HowieWood
e523555844 /api/embeddings is deprecated, use /api/embed.
/api/embeddings is deprecated, use /api/embed.
2024-11-04 22:03:41 +08:00
13 changed files with 163 additions and 24 deletions

View File

@@ -222,6 +222,7 @@ const (
ChannelCloudflare = 39
ChannelTypeSiliconFlow = 40
ChannelTypeVertexAi = 41
ChannelTypeMistral = 42
ChannelTypeDummy // this one is only for count, do not add any channel after this
@@ -270,4 +271,5 @@ var ChannelBaseURLs = []string{
"https://api.cloudflare.com", //39
"https://api.siliconflow.cn", //40
"", //41
"https://api.mistral.ai", //42
}

View File

@@ -32,25 +32,27 @@ var defaultModelRatio = map[string]float64{
"gpt-4-0613": 15,
"gpt-4-32k": 30,
//"gpt-4-32k-0314": 30, //deprecated
"gpt-4-32k-0613": 30,
"gpt-4-1106-preview": 5, // $0.01 / 1K tokens
"gpt-4-0125-preview": 5, // $0.01 / 1K tokens
"gpt-4-turbo-preview": 5, // $0.01 / 1K tokens
"gpt-4-vision-preview": 5, // $0.01 / 1K tokens
"gpt-4-1106-vision-preview": 5, // $0.01 / 1K tokens
"chatgpt-4o-latest": 2.5, // $0.01 / 1K tokens
"gpt-4o": 2.5, // $0.01 / 1K tokens
"gpt-4o-2024-05-13": 2.5, // $0.01 / 1K tokens
"gpt-4o-2024-08-06": 1.25, // $0.01 / 1K tokens
"o1-preview": 7.5,
"o1-preview-2024-09-12": 7.5,
"o1-mini": 1.5,
"o1-mini-2024-09-12": 1.5,
"gpt-4o-mini": 0.075,
"gpt-4o-mini-2024-07-18": 0.075,
"gpt-4-turbo": 5, // $0.01 / 1K tokens
"gpt-4-turbo-2024-04-09": 5, // $0.01 / 1K tokens
"gpt-3.5-turbo": 0.25, // $0.0015 / 1K tokens
"gpt-4-32k-0613": 30,
"gpt-4-1106-preview": 5, // $0.01 / 1K tokens
"gpt-4-0125-preview": 5, // $0.01 / 1K tokens
"gpt-4-turbo-preview": 5, // $0.01 / 1K tokens
"gpt-4-vision-preview": 5, // $0.01 / 1K tokens
"gpt-4-1106-vision-preview": 5, // $0.01 / 1K tokens
"chatgpt-4o-latest": 2.5, // $0.01 / 1K tokens
"gpt-4o": 1.25, // $0.01 / 1K tokens
"gpt-4o-audio-preview": 1.25, // $0.0015 / 1K tokens
"gpt-4o-audio-preview-2024-10-01": 1.25, // $0.0015 / 1K tokens
"gpt-4o-2024-08-06": 1.25, // $0.01 / 1K tokens
"gpt-4o-2024-05-13": 2.5,
"gpt-4o-realtime-preview": 2.5,
"o1-preview": 7.5,
"o1-preview-2024-09-12": 7.5,
"o1-mini": 1.5,
"o1-mini-2024-09-12": 1.5,
"gpt-4o-mini": 0.075,
"gpt-4o-mini-2024-07-18": 0.075,
"gpt-4-turbo": 5, // $0.01 / 1K tokens
"gpt-4-turbo-2024-04-09": 5, // $0.01 / 1K tokens
//"gpt-3.5-turbo-0301": 0.75, //deprecated
"gpt-3.5-turbo-0613": 0.75,
"gpt-3.5-turbo-16k": 1.5, // $0.003 / 1K tokens
@@ -86,6 +88,7 @@ var defaultModelRatio = map[string]float64{
"claude-2.0": 4, // $8 / 1M tokens
"claude-2.1": 4, // $8 / 1M tokens
"claude-3-haiku-20240307": 0.125, // $0.25 / 1M tokens
"claude-3-5-haiku-20241022": 0.5, // $1 / 1M tokens
"claude-3-sonnet-20240229": 1.5, // $3 / 1M tokens
"claude-3-5-sonnet-20240620": 1.5,
"claude-3-5-sonnet-20241022": 1.5,

View File

@@ -8,6 +8,7 @@ var ModelList = []string{
"claude-3-sonnet-20240229",
"claude-3-opus-20240229",
"claude-3-haiku-20240307",
"claude-3-5-haiku-20241022",
"claude-3-5-sonnet-20240620",
"claude-3-5-sonnet-20241022",
}

View File

@@ -0,0 +1,72 @@
package mistral
import (
"errors"
"github.com/gin-gonic/gin"
"io"
"net/http"
"one-api/dto"
"one-api/relay/channel"
"one-api/relay/channel/openai"
relaycommon "one-api/relay/common"
)
type Adaptor struct {
}
func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {
//TODO implement me
return nil, errors.New("not implemented")
}
func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
//TODO implement me
return nil, errors.New("not implemented")
}
func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
}
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
return relaycommon.GetFullRequestURL(info.BaseUrl, info.RequestURLPath, info.ChannelType), nil
}
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
channel.SetupApiRequestHeader(info, c, req)
req.Header.Set("Authorization", "Bearer "+info.ApiKey)
return nil
}
func (a *Adaptor) ConvertRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) {
if request == nil {
return nil, errors.New("request is nil")
}
mistralReq := requestOpenAI2Mistral(*request)
//common.LogJson(c, "body", mistralReq)
return mistralReq, nil
}
func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dto.RerankRequest) (any, error) {
return nil, nil
}
func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error) {
return channel.DoApiRequest(a, c, info, requestBody)
}
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
if info.IsStream {
err, usage = openai.OaiStreamHandler(c, resp, info)
} else {
err, usage = openai.OpenaiHandler(c, resp, info.PromptTokens, info.UpstreamModelName)
}
return
}
func (a *Adaptor) GetModelList() []string {
return ModelList
}
func (a *Adaptor) GetChannelName() string {
return ChannelName
}

View File

@@ -0,0 +1,12 @@
package mistral
var ModelList = []string{
"open-mistral-7b",
"open-mixtral-8x7b",
"mistral-small-latest",
"mistral-medium-latest",
"mistral-large-latest",
"mistral-embed",
}
var ChannelName = "mistral"

View File

@@ -0,0 +1,40 @@
package mistral
import (
"encoding/json"
"one-api/dto"
)
func requestOpenAI2Mistral(request dto.GeneralOpenAIRequest) *dto.GeneralOpenAIRequest {
messages := make([]dto.Message, 0, len(request.Messages))
for _, message := range request.Messages {
if !message.IsStringContent() {
mediaMessages := message.ParseContent()
for j, mediaMessage := range mediaMessages {
if mediaMessage.Type == dto.ContentTypeImageURL {
imageUrl := mediaMessage.ImageUrl.(dto.MessageImageUrl)
mediaMessage.ImageUrl = imageUrl.Url
mediaMessages[j] = mediaMessage
}
}
messageRaw, _ := json.Marshal(mediaMessages)
message.Content = messageRaw
}
messages = append(messages, dto.Message{
Role: message.Role,
Content: message.Content,
ToolCalls: message.ToolCalls,
ToolCallId: message.ToolCallId,
})
}
return &dto.GeneralOpenAIRequest{
Model: request.Model,
Stream: request.Stream,
Messages: messages,
Temperature: request.Temperature,
TopP: request.TopP,
MaxTokens: request.MaxTokens,
Tools: request.Tools,
ToolChoice: request.ToolChoice,
}
}

View File

@@ -31,7 +31,7 @@ func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
switch info.RelayMode {
case relayconstant.RelayModeEmbeddings:
return info.BaseUrl + "/api/embeddings", nil
return info.BaseUrl + "/api/embed", nil
default:
return relaycommon.GetFullRequestURL(info.BaseUrl, info.RequestURLPath, info.ChannelType), nil
}

View File

@@ -13,6 +13,8 @@ var ModelList = []string{
"gpt-4o-mini", "gpt-4o-mini-2024-07-18",
"o1-preview", "o1-preview-2024-09-12",
"o1-mini", "o1-mini-2024-09-12",
"gpt-4o-audio-preview", "gpt-4o-audio-preview-2024-10-01",
"gpt-4o-realtime-preview", "gpt-4o-realtime-preview-2024-10-01",
"text-embedding-ada-002", "text-embedding-3-small", "text-embedding-3-large",
"text-curie-001", "text-babbage-001", "text-ada-001",
"text-moderation-latest", "text-moderation-stable",

View File

@@ -25,6 +25,7 @@ const (
APITypeCloudflare
APITypeSiliconFlow
APITypeVertexAi
APITypeMistral
APITypeDummy // this one is only for count, do not add any channel after this
)
@@ -72,6 +73,8 @@ func ChannelType2APIType(channelType int) (int, bool) {
apiType = APITypeSiliconFlow
case common.ChannelTypeVertexAi:
apiType = APITypeVertexAi
case common.ChannelTypeMistral:
apiType = APITypeMistral
}
if apiType == -1 {
return APITypeOpenAI, false

View File

@@ -12,6 +12,7 @@ import (
"one-api/relay/channel/dify"
"one-api/relay/channel/gemini"
"one-api/relay/channel/jina"
"one-api/relay/channel/mistral"
"one-api/relay/channel/ollama"
"one-api/relay/channel/openai"
"one-api/relay/channel/palm"
@@ -68,6 +69,8 @@ func GetAdaptor(apiType int) channel.Adaptor {
return &siliconflow.Adaptor{}
case constant.APITypeVertexAi:
return &vertex.Adaptor{}
case constant.APITypeMistral:
return &mistral.Adaptor{}
}
return nil
}

View File

@@ -393,8 +393,8 @@ const TokensTable = () => {
serverAddress = window.location.origin;
}
let encodedServerAddress = encodeURIComponent(serverAddress);
url = url.replace('{address}', encodedServerAddress);
url = url.replace('{key}', 'sk-' + record.key);
url = url.replaceAll('{address}', encodedServerAddress);
url = url.replaceAll('{key}', 'sk-' + record.key);
// console.log(url);
// const chatLink = localStorage.getItem('chat_link');
// const mjLink = localStorage.getItem('chat_link2');

View File

@@ -109,6 +109,7 @@ export const CHANNEL_OPTIONS = [
{ key: 37, text: 'Dify', value: 37, color: 'teal', label: 'Dify' },
{ key: 38, text: 'Jina', value: 38, color: 'blue', label: 'Jina' },
{ key: 40, text: 'SiliconCloud', value: 40, color: 'purple', label: 'SiliconCloud' },
{ key: 42, text: 'Mistral AI', value: 42, color: 'blue', label: 'Mistral AI' },
{ key: 8, text: '自定义渠道', value: 8, color: 'pink', label: '自定义渠道' },
{
key: 22,

View File

@@ -20,8 +20,8 @@ const ChatPage = () => {
if (Array.isArray(chats) && chats.length > 0) {
for (let k in chats[id]) {
link = chats[id][k];
link = link.replace('{address}', encodeURIComponent(serverAddress));
link = link.replace('{key}', 'sk-' + key);
link = link.replaceAll('{address}', encodeURIComponent(serverAddress));
link = link.replaceAll('{key}', 'sk-' + key);
}
}
}