mirror of
https://github.com/linux-do/new-api.git
synced 2025-09-18 00:16:37 +08:00
feat: 支持智谱GLM-4V
This commit is contained in:
parent
a3687b72f8
commit
b4645d1019
@ -199,6 +199,8 @@ const (
|
|||||||
ChannelTypeFastGPT = 22
|
ChannelTypeFastGPT = 22
|
||||||
ChannelTypeTencent = 23
|
ChannelTypeTencent = 23
|
||||||
ChannelTypeGemini = 24
|
ChannelTypeGemini = 24
|
||||||
|
ChannelTypeMoonshot = 25
|
||||||
|
ChannelTypeZhipu_v4 = 26
|
||||||
)
|
)
|
||||||
|
|
||||||
var ChannelBaseURLs = []string{
|
var ChannelBaseURLs = []string{
|
||||||
@ -227,4 +229,6 @@ var ChannelBaseURLs = []string{
|
|||||||
"https://fastgpt.run/api/openapi", // 22
|
"https://fastgpt.run/api/openapi", // 22
|
||||||
"https://hunyuan.cloud.tencent.com", //23
|
"https://hunyuan.cloud.tencent.com", //23
|
||||||
"", //24
|
"", //24
|
||||||
|
"https://api.moonshot.cn", //25
|
||||||
|
"https://open.bigmodel.cn", //26
|
||||||
}
|
}
|
||||||
|
@ -74,6 +74,9 @@ var ModelRatio = map[string]float64{
|
|||||||
"chatglm_pro": 0.7143, // ¥0.01 / 1k tokens
|
"chatglm_pro": 0.7143, // ¥0.01 / 1k tokens
|
||||||
"chatglm_std": 0.3572, // ¥0.005 / 1k tokens
|
"chatglm_std": 0.3572, // ¥0.005 / 1k tokens
|
||||||
"chatglm_lite": 0.1429, // ¥0.002 / 1k tokens
|
"chatglm_lite": 0.1429, // ¥0.002 / 1k tokens
|
||||||
|
"glm-4": 7.143, // ¥0.1 / 1k tokens
|
||||||
|
"glm-4v": 7.143, // ¥0.1 / 1k tokens
|
||||||
|
"glm-3-turbo": 0.3572,
|
||||||
"qwen-turbo": 0.8572, // ¥0.012 / 1k tokens
|
"qwen-turbo": 0.8572, // ¥0.012 / 1k tokens
|
||||||
"qwen-plus": 10, // ¥0.14 / 1k tokens
|
"qwen-plus": 10, // ¥0.14 / 1k tokens
|
||||||
"text-embedding-v1": 0.05, // ¥0.0007 / 1k tokens
|
"text-embedding-v1": 0.05, // ¥0.0007 / 1k tokens
|
||||||
|
@ -4,6 +4,10 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"one-api/dto"
|
"one-api/dto"
|
||||||
|
"one-api/relay"
|
||||||
|
"one-api/relay/channel/ai360"
|
||||||
|
"one-api/relay/channel/moonshot"
|
||||||
|
"one-api/relay/constant"
|
||||||
)
|
)
|
||||||
|
|
||||||
// https://platform.openai.com/docs/api-reference/models/list
|
// https://platform.openai.com/docs/api-reference/models/list
|
||||||
@ -53,574 +57,46 @@ func init() {
|
|||||||
IsBlocking: false,
|
IsBlocking: false,
|
||||||
})
|
})
|
||||||
// https://platform.openai.com/docs/models/model-endpoint-compatibility
|
// https://platform.openai.com/docs/models/model-endpoint-compatibility
|
||||||
openAIModels = []OpenAIModels{
|
for i := 0; i < constant.APITypeDummy; i++ {
|
||||||
{
|
if i == constant.APITypeAIProxyLibrary {
|
||||||
Id: "midjourney",
|
continue
|
||||||
|
}
|
||||||
|
adaptor := relay.GetAdaptor(i)
|
||||||
|
channelName := adaptor.GetChannelName()
|
||||||
|
modelNames := adaptor.GetModelList()
|
||||||
|
for _, modelName := range modelNames {
|
||||||
|
openAIModels = append(openAIModels, OpenAIModels{
|
||||||
|
Id: modelName,
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: 1677649963,
|
Created: 1626777600,
|
||||||
OwnedBy: "Midjourney",
|
OwnedBy: channelName,
|
||||||
Permission: permission,
|
Permission: permission,
|
||||||
Root: "midjourney",
|
Root: modelName,
|
||||||
Parent: nil,
|
Parent: nil,
|
||||||
},
|
})
|
||||||
{
|
}
|
||||||
Id: "dall-e-2",
|
}
|
||||||
|
for _, modelName := range ai360.ModelList {
|
||||||
|
openAIModels = append(openAIModels, OpenAIModels{
|
||||||
|
Id: modelName,
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: 1677649963,
|
Created: 1626777600,
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "dall-e-2",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "dall-e-3",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "dall-e-3",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "whisper-1",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "whisper-1",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "tts-1",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "tts-1",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "tts-1-1106",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "tts-1-1106",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "tts-1-hd",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "tts-1-hd",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "tts-1-hd-1106",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "tts-1-hd-1106",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-3.5-turbo",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-3.5-turbo",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-3.5-turbo-0301",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-3.5-turbo-0301",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-3.5-turbo-0613",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-3.5-turbo-0613",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-3.5-turbo-16k",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-3.5-turbo-16k",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-3.5-turbo-16k-0613",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-3.5-turbo-16k-0613",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-3.5-turbo-1106",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1699593571,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-3.5-turbo-1106",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-3.5-turbo-0125",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1706232090,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-3.5-turbo-0125",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-3.5-turbo-instruct",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-3.5-turbo-instruct",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-0314",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-0314",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-0613",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-0613",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-32k",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-32k",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-32k-0314",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-32k-0314",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-32k-0613",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-32k-0613",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-1106-preview",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1699593571,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-1106-preview",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-0125-preview",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1699593571,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-0125-preview",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-turbo-preview",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1699593571,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-turbo-preview",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-vision-preview",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1699593571,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-vision-preview",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gpt-4-1106-vision-preview",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1699593571,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gpt-4-1106-vision-preview",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-embedding-3-small",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-embedding-ada-002",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-embedding-3-large",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-embedding-ada-002",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-embedding-ada-002",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-embedding-ada-002",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-davinci-003",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-davinci-003",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-davinci-002",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-davinci-002",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-curie-001",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-curie-001",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-babbage-001",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-babbage-001",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-ada-001",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-ada-001",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-moderation-latest",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-moderation-latest",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-moderation-stable",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-moderation-stable",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-davinci-edit-001",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-davinci-edit-001",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "code-davinci-edit-001",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "code-davinci-edit-001",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "babbage-002",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "babbage-002",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "davinci-002",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "openai",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "davinci-002",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "claude-instant-1",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "anthropic",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "claude-instant-1",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "claude-2",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "anthropic",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "claude-2",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "ERNIE-Bot",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "baidu",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "ERNIE-Bot",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "ERNIE-Bot-turbo",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "baidu",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "ERNIE-Bot-turbo",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "ERNIE-Bot-4",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "baidu",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "ERNIE-Bot-4",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "Embedding-V1",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "baidu",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "Embedding-V1",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "PaLM-2",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "google",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "PaLM-2",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gemini-pro",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "google",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gemini-pro",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "gemini-pro-vision",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "google",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "gemini-pro-vision",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "chatglm_turbo",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "zhipu",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "chatglm_turbo",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "chatglm_pro",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "zhipu",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "chatglm_pro",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "chatglm_std",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "zhipu",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "chatglm_std",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "chatglm_lite",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "zhipu",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "chatglm_lite",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "qwen-turbo",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "ali",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "qwen-turbo",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "qwen-plus",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "ali",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "qwen-plus",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "text-embedding-v1",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "ali",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "text-embedding-v1",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "SparkDesk",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "xunfei",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "SparkDesk",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "360GPT_S2_V9",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "360",
|
OwnedBy: "360",
|
||||||
Permission: permission,
|
Permission: permission,
|
||||||
Root: "360GPT_S2_V9",
|
Root: modelName,
|
||||||
Parent: nil,
|
Parent: nil,
|
||||||
},
|
})
|
||||||
{
|
}
|
||||||
Id: "embedding-bert-512-v1",
|
for _, modelName := range moonshot.ModelList {
|
||||||
|
openAIModels = append(openAIModels, OpenAIModels{
|
||||||
|
Id: modelName,
|
||||||
Object: "model",
|
Object: "model",
|
||||||
Created: 1677649963,
|
Created: 1626777600,
|
||||||
OwnedBy: "360",
|
OwnedBy: "moonshot",
|
||||||
Permission: permission,
|
Permission: permission,
|
||||||
Root: "embedding-bert-512-v1",
|
Root: modelName,
|
||||||
Parent: nil,
|
Parent: nil,
|
||||||
},
|
})
|
||||||
{
|
|
||||||
Id: "embedding_s1_v1",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "360",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "embedding_s1_v1",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "semantic_similarity_s1_v1",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "360",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "semantic_similarity_s1_v1",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Id: "hunyuan",
|
|
||||||
Object: "model",
|
|
||||||
Created: 1677649963,
|
|
||||||
OwnedBy: "tencent",
|
|
||||||
Permission: permission,
|
|
||||||
Root: "hunyuan",
|
|
||||||
Parent: nil,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
openAIModelsMap = make(map[string]OpenAIModels)
|
openAIModelsMap = make(map[string]OpenAIModels)
|
||||||
for _, model := range openAIModels {
|
for _, model := range openAIModels {
|
||||||
|
@ -14,6 +14,7 @@ type GeneralOpenAIRequest struct {
|
|||||||
MaxTokens uint `json:"max_tokens,omitempty"`
|
MaxTokens uint `json:"max_tokens,omitempty"`
|
||||||
Temperature float64 `json:"temperature,omitempty"`
|
Temperature float64 `json:"temperature,omitempty"`
|
||||||
TopP float64 `json:"top_p,omitempty"`
|
TopP float64 `json:"top_p,omitempty"`
|
||||||
|
Stop any `json:"stop,omitempty"`
|
||||||
N int `json:"n,omitempty"`
|
N int `json:"n,omitempty"`
|
||||||
Input any `json:"input,omitempty"`
|
Input any `json:"input,omitempty"`
|
||||||
Instruction string `json:"instruction,omitempty"`
|
Instruction string `json:"instruction,omitempty"`
|
||||||
|
@ -36,8 +36,11 @@ type OpenAIEmbeddingResponse struct {
|
|||||||
type ChatCompletionsStreamResponseChoice struct {
|
type ChatCompletionsStreamResponseChoice struct {
|
||||||
Delta struct {
|
Delta struct {
|
||||||
Content string `json:"content"`
|
Content string `json:"content"`
|
||||||
|
Role string `json:"role,omitempty"`
|
||||||
|
ToolCalls any `json:"tool_calls,omitempty"`
|
||||||
} `json:"delta"`
|
} `json:"delta"`
|
||||||
FinishReason *string `json:"finish_reason,omitempty"`
|
FinishReason *string `json:"finish_reason,omitempty"`
|
||||||
|
Index int `json:"index,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ChatCompletionsStreamResponse struct {
|
type ChatCompletionsStreamResponse struct {
|
||||||
|
8
relay/channel/ai360/constants.go
Normal file
8
relay/channel/ai360/constants.go
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
package ai360
|
||||||
|
|
||||||
|
var ModelList = []string{
|
||||||
|
"360GPT_S2_V9",
|
||||||
|
"embedding-bert-512-v1",
|
||||||
|
"embedding_s1_v1",
|
||||||
|
"semantic_similarity_s1_v1",
|
||||||
|
}
|
@ -9,15 +9,19 @@ import (
|
|||||||
"one-api/common"
|
"one-api/common"
|
||||||
"one-api/dto"
|
"one-api/dto"
|
||||||
"one-api/relay/channel"
|
"one-api/relay/channel"
|
||||||
|
"one-api/relay/channel/ai360"
|
||||||
|
"one-api/relay/channel/moonshot"
|
||||||
relaycommon "one-api/relay/common"
|
relaycommon "one-api/relay/common"
|
||||||
"one-api/service"
|
"one-api/service"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Adaptor struct {
|
type Adaptor struct {
|
||||||
|
ChannelType int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) Init(info *relaycommon.RelayInfo, request dto.GeneralOpenAIRequest) {
|
func (a *Adaptor) Init(info *relaycommon.RelayInfo, request dto.GeneralOpenAIRequest) {
|
||||||
|
a.ChannelType = info.ChannelType
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
||||||
@ -76,7 +80,14 @@ func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycom
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetModelList() []string {
|
func (a *Adaptor) GetModelList() []string {
|
||||||
|
switch a.ChannelType {
|
||||||
|
case common.ChannelType360:
|
||||||
|
return ai360.ModelList
|
||||||
|
case common.ChannelTypeMoonshot:
|
||||||
|
return moonshot.ModelList
|
||||||
|
default:
|
||||||
return ModelList
|
return ModelList
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *Adaptor) GetChannelName() string {
|
func (a *Adaptor) GetChannelName() string {
|
||||||
|
57
relay/channel/zhipu_v4/adaptor.go
Normal file
57
relay/channel/zhipu_v4/adaptor.go
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
package zhipu_v4
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"one-api/dto"
|
||||||
|
"one-api/relay/channel"
|
||||||
|
relaycommon "one-api/relay/common"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Adaptor struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) Init(info *relaycommon.RelayInfo, request dto.GeneralOpenAIRequest) {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
|
||||||
|
return fmt.Sprintf("%s/api/paas/v4/chat/completions", info.BaseUrl), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Request, info *relaycommon.RelayInfo) error {
|
||||||
|
channel.SetupApiRequestHeader(info, c, req)
|
||||||
|
token := getZhipuToken(info.ApiKey)
|
||||||
|
req.Header.Set("Authorization", token)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) ConvertRequest(c *gin.Context, relayMode int, request *dto.GeneralOpenAIRequest) (any, error) {
|
||||||
|
if request == nil {
|
||||||
|
return nil, errors.New("request is nil")
|
||||||
|
}
|
||||||
|
return requestOpenAI2Zhipu(*request), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (*http.Response, error) {
|
||||||
|
return channel.DoApiRequest(a, c, info, requestBody)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage *dto.Usage, err *dto.OpenAIErrorWithStatusCode) {
|
||||||
|
if info.IsStream {
|
||||||
|
err, usage = zhipuStreamHandler(c, resp)
|
||||||
|
} else {
|
||||||
|
err, usage = zhipuHandler(c, resp)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetModelList() []string {
|
||||||
|
return ModelList
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Adaptor) GetChannelName() string {
|
||||||
|
return ChannelName
|
||||||
|
}
|
7
relay/channel/zhipu_v4/constants.go
Normal file
7
relay/channel/zhipu_v4/constants.go
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
package zhipu_v4
|
||||||
|
|
||||||
|
var ModelList = []string{
|
||||||
|
"glm-4", "glm-4v", "glm-3-turbo",
|
||||||
|
}
|
||||||
|
|
||||||
|
var ChannelName = "zhipu_v4"
|
59
relay/channel/zhipu_v4/dto.go
Normal file
59
relay/channel/zhipu_v4/dto.go
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
package zhipu_v4
|
||||||
|
|
||||||
|
import (
|
||||||
|
"one-api/dto"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// type ZhipuMessage struct {
|
||||||
|
// Role string `json:"role,omitempty"`
|
||||||
|
// Content string `json:"content,omitempty"`
|
||||||
|
// ToolCalls any `json:"tool_calls,omitempty"`
|
||||||
|
// ToolCallId any `json:"tool_call_id,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// type ZhipuRequest struct {
|
||||||
|
// Model string `json:"model"`
|
||||||
|
// Stream bool `json:"stream,omitempty"`
|
||||||
|
// Messages []ZhipuMessage `json:"messages"`
|
||||||
|
// Temperature float64 `json:"temperature,omitempty"`
|
||||||
|
// TopP float64 `json:"top_p,omitempty"`
|
||||||
|
// MaxTokens int `json:"max_tokens,omitempty"`
|
||||||
|
// Stop []string `json:"stop,omitempty"`
|
||||||
|
// RequestId string `json:"request_id,omitempty"`
|
||||||
|
// Tools any `json:"tools,omitempty"`
|
||||||
|
// ToolChoice any `json:"tool_choice,omitempty"`
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// type ZhipuV4TextResponseChoice struct {
|
||||||
|
// Index int `json:"index"`
|
||||||
|
// ZhipuMessage `json:"message"`
|
||||||
|
// FinishReason string `json:"finish_reason"`
|
||||||
|
// }
|
||||||
|
type ZhipuV4Response struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
Created int64 `json:"created"`
|
||||||
|
Model string `json:"model"`
|
||||||
|
TextResponseChoices []dto.OpenAITextResponseChoice `json:"choices"`
|
||||||
|
Usage dto.Usage `json:"usage"`
|
||||||
|
Error dto.OpenAIError `json:"error"`
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
//type ZhipuV4StreamResponseChoice struct {
|
||||||
|
// Index int `json:"index,omitempty"`
|
||||||
|
// Delta ZhipuMessage `json:"delta"`
|
||||||
|
// FinishReason *string `json:"finish_reason,omitempty"`
|
||||||
|
//}
|
||||||
|
|
||||||
|
type ZhipuV4StreamResponse struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
Created int64 `json:"created"`
|
||||||
|
Choices []dto.ChatCompletionsStreamResponseChoice `json:"choices"`
|
||||||
|
Usage dto.Usage `json:"usage"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type tokenData struct {
|
||||||
|
Token string
|
||||||
|
ExpiryTime time.Time
|
||||||
|
}
|
262
relay/channel/zhipu_v4/relay-zhipu_v4.go
Normal file
262
relay/channel/zhipu_v4/relay-zhipu_v4.go
Normal file
@ -0,0 +1,262 @@
|
|||||||
|
package zhipu_v4
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/golang-jwt/jwt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"one-api/common"
|
||||||
|
"one-api/dto"
|
||||||
|
"one-api/service"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// https://open.bigmodel.cn/doc/api#chatglm_std
|
||||||
|
// chatglm_std, chatglm_lite
|
||||||
|
// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
|
||||||
|
// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
|
||||||
|
|
||||||
|
var zhipuTokens sync.Map
|
||||||
|
var expSeconds int64 = 24 * 3600
|
||||||
|
|
||||||
|
func getZhipuToken(apikey string) string {
|
||||||
|
data, ok := zhipuTokens.Load(apikey)
|
||||||
|
if ok {
|
||||||
|
tokenData := data.(tokenData)
|
||||||
|
if time.Now().Before(tokenData.ExpiryTime) {
|
||||||
|
return tokenData.Token
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
split := strings.Split(apikey, ".")
|
||||||
|
if len(split) != 2 {
|
||||||
|
common.SysError("invalid zhipu key: " + apikey)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
id := split[0]
|
||||||
|
secret := split[1]
|
||||||
|
|
||||||
|
expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
|
||||||
|
expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
|
||||||
|
|
||||||
|
timestamp := time.Now().UnixNano() / 1e6
|
||||||
|
|
||||||
|
payload := jwt.MapClaims{
|
||||||
|
"api_key": id,
|
||||||
|
"exp": expMillis,
|
||||||
|
"timestamp": timestamp,
|
||||||
|
}
|
||||||
|
|
||||||
|
token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
|
||||||
|
|
||||||
|
token.Header["alg"] = "HS256"
|
||||||
|
token.Header["sign_type"] = "SIGN"
|
||||||
|
|
||||||
|
tokenString, err := token.SignedString([]byte(secret))
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
zhipuTokens.Store(apikey, tokenData{
|
||||||
|
Token: tokenString,
|
||||||
|
ExpiryTime: expiryTime,
|
||||||
|
})
|
||||||
|
|
||||||
|
return tokenString
|
||||||
|
}
|
||||||
|
|
||||||
|
func requestOpenAI2Zhipu(request dto.GeneralOpenAIRequest) *dto.GeneralOpenAIRequest {
|
||||||
|
messages := make([]dto.Message, 0, len(request.Messages))
|
||||||
|
for _, message := range request.Messages {
|
||||||
|
messages = append(messages, dto.Message{
|
||||||
|
Role: message.Role,
|
||||||
|
Content: message.Content,
|
||||||
|
ToolCalls: message.ToolCalls,
|
||||||
|
ToolCallId: message.ToolCallId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
str, ok := request.Stop.(string)
|
||||||
|
var Stop []string
|
||||||
|
if ok {
|
||||||
|
Stop = []string{str}
|
||||||
|
} else {
|
||||||
|
Stop, _ = request.Stop.([]string)
|
||||||
|
}
|
||||||
|
return &dto.GeneralOpenAIRequest{
|
||||||
|
Model: request.Model,
|
||||||
|
Stream: request.Stream,
|
||||||
|
Messages: messages,
|
||||||
|
Temperature: request.Temperature,
|
||||||
|
TopP: request.TopP,
|
||||||
|
MaxTokens: request.MaxTokens,
|
||||||
|
Stop: Stop,
|
||||||
|
Tools: request.Tools,
|
||||||
|
ToolChoice: request.ToolChoice,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//func responseZhipu2OpenAI(response *dto.OpenAITextResponse) *dto.OpenAITextResponse {
|
||||||
|
// fullTextResponse := dto.OpenAITextResponse{
|
||||||
|
// Id: response.Id,
|
||||||
|
// Object: "chat.completion",
|
||||||
|
// Created: common.GetTimestamp(),
|
||||||
|
// Choices: make([]dto.OpenAITextResponseChoice, 0, len(response.TextResponseChoices)),
|
||||||
|
// Usage: response.Usage,
|
||||||
|
// }
|
||||||
|
// for i, choice := range response.TextResponseChoices {
|
||||||
|
// content, _ := json.Marshal(strings.Trim(choice.Content, "\""))
|
||||||
|
// openaiChoice := dto.OpenAITextResponseChoice{
|
||||||
|
// Index: i,
|
||||||
|
// Message: dto.Message{
|
||||||
|
// Role: choice.Role,
|
||||||
|
// Content: content,
|
||||||
|
// },
|
||||||
|
// FinishReason: "",
|
||||||
|
// }
|
||||||
|
// if i == len(response.TextResponseChoices)-1 {
|
||||||
|
// openaiChoice.FinishReason = "stop"
|
||||||
|
// }
|
||||||
|
// fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice)
|
||||||
|
// }
|
||||||
|
// return &fullTextResponse
|
||||||
|
//}
|
||||||
|
|
||||||
|
func streamResponseZhipu2OpenAI(zhipuResponse *ZhipuV4StreamResponse) *dto.ChatCompletionsStreamResponse {
|
||||||
|
var choice dto.ChatCompletionsStreamResponseChoice
|
||||||
|
choice.Delta.Content = zhipuResponse.Choices[0].Delta.Content
|
||||||
|
choice.Delta.Role = zhipuResponse.Choices[0].Delta.Role
|
||||||
|
choice.Delta.ToolCalls = zhipuResponse.Choices[0].Delta.ToolCalls
|
||||||
|
choice.Index = zhipuResponse.Choices[0].Index
|
||||||
|
choice.FinishReason = zhipuResponse.Choices[0].FinishReason
|
||||||
|
response := dto.ChatCompletionsStreamResponse{
|
||||||
|
Id: zhipuResponse.Id,
|
||||||
|
Object: "chat.completion.chunk",
|
||||||
|
Created: zhipuResponse.Created,
|
||||||
|
Model: "glm-4",
|
||||||
|
Choices: []dto.ChatCompletionsStreamResponseChoice{choice},
|
||||||
|
}
|
||||||
|
return &response
|
||||||
|
}
|
||||||
|
|
||||||
|
func lastStreamResponseZhipuV42OpenAI(zhipuResponse *ZhipuV4StreamResponse) (*dto.ChatCompletionsStreamResponse, *dto.Usage) {
|
||||||
|
response := streamResponseZhipu2OpenAI(zhipuResponse)
|
||||||
|
return response, &zhipuResponse.Usage
|
||||||
|
}
|
||||||
|
|
||||||
|
func zhipuStreamHandler(c *gin.Context, resp *http.Response) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
|
||||||
|
var usage *dto.Usage
|
||||||
|
scanner := bufio.NewScanner(resp.Body)
|
||||||
|
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
|
||||||
|
if atEOF && len(data) == 0 {
|
||||||
|
return 0, nil, nil
|
||||||
|
}
|
||||||
|
if i := strings.Index(string(data), "\n"); i >= 0 {
|
||||||
|
return i + 1, data[0:i], nil
|
||||||
|
}
|
||||||
|
if atEOF {
|
||||||
|
return len(data), data, nil
|
||||||
|
}
|
||||||
|
return 0, nil, nil
|
||||||
|
})
|
||||||
|
dataChan := make(chan string)
|
||||||
|
stopChan := make(chan bool)
|
||||||
|
go func() {
|
||||||
|
for scanner.Scan() {
|
||||||
|
data := scanner.Text()
|
||||||
|
if len(data) < 6 { // ignore blank line or wrong format
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if data[:6] != "data: " && data[:6] != "[DONE]" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
dataChan <- data
|
||||||
|
}
|
||||||
|
stopChan <- true
|
||||||
|
}()
|
||||||
|
service.SetEventStreamHeaders(c)
|
||||||
|
c.Stream(func(w io.Writer) bool {
|
||||||
|
select {
|
||||||
|
case data := <-dataChan:
|
||||||
|
if strings.HasPrefix(data, "data: [DONE]") {
|
||||||
|
data = data[:12]
|
||||||
|
}
|
||||||
|
// some implementations may add \r at the end of data
|
||||||
|
data = strings.TrimSuffix(data, "\r")
|
||||||
|
|
||||||
|
var streamResponse ZhipuV4StreamResponse
|
||||||
|
err := json.Unmarshal([]byte(data), &streamResponse)
|
||||||
|
if err != nil {
|
||||||
|
common.SysError("error unmarshalling stream response: " + err.Error())
|
||||||
|
}
|
||||||
|
var response *dto.ChatCompletionsStreamResponse
|
||||||
|
if strings.Contains(data, "prompt_tokens") {
|
||||||
|
response, usage = lastStreamResponseZhipuV42OpenAI(&streamResponse)
|
||||||
|
} else {
|
||||||
|
response = streamResponseZhipu2OpenAI(&streamResponse)
|
||||||
|
}
|
||||||
|
jsonResponse, err := json.Marshal(response)
|
||||||
|
if err != nil {
|
||||||
|
common.SysError("error marshalling stream response: " + err.Error())
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
|
||||||
|
return true
|
||||||
|
case <-stopChan:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
})
|
||||||
|
err := resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
return nil, usage
|
||||||
|
}
|
||||||
|
|
||||||
|
func zhipuHandler(c *gin.Context, resp *http.Response) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
|
||||||
|
var textResponse ZhipuV4Response
|
||||||
|
responseBody, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
err = json.Unmarshal(responseBody, &textResponse)
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
if textResponse.Error.Type != "" {
|
||||||
|
return &dto.OpenAIErrorWithStatusCode{
|
||||||
|
OpenAIError: textResponse.Error,
|
||||||
|
StatusCode: resp.StatusCode,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
// Reset response body
|
||||||
|
resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
|
||||||
|
|
||||||
|
// We shouldn't set the header before we parse the response body, because the parse part may fail.
|
||||||
|
// And then we will have to send an error response, but in this case, the header has already been set.
|
||||||
|
// So the HTTPClient will be confused by the response.
|
||||||
|
// For example, Postman will report error, and we cannot check the response at all.
|
||||||
|
for k, v := range resp.Header {
|
||||||
|
c.Writer.Header().Set(k, v[0])
|
||||||
|
}
|
||||||
|
c.Writer.WriteHeader(resp.StatusCode)
|
||||||
|
_, err = io.Copy(c.Writer, resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
err = resp.Body.Close()
|
||||||
|
if err != nil {
|
||||||
|
return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, &textResponse.Usage
|
||||||
|
}
|
@ -15,6 +15,7 @@ const (
|
|||||||
APITypeAIProxyLibrary
|
APITypeAIProxyLibrary
|
||||||
APITypeTencent
|
APITypeTencent
|
||||||
APITypeGemini
|
APITypeGemini
|
||||||
|
APITypeZhipu_v4
|
||||||
|
|
||||||
APITypeDummy // this one is only for count, do not add any channel after this
|
APITypeDummy // this one is only for count, do not add any channel after this
|
||||||
)
|
)
|
||||||
@ -40,6 +41,8 @@ func ChannelType2APIType(channelType int) int {
|
|||||||
apiType = APITypeTencent
|
apiType = APITypeTencent
|
||||||
case common.ChannelTypeGemini:
|
case common.ChannelTypeGemini:
|
||||||
apiType = APITypeGemini
|
apiType = APITypeGemini
|
||||||
|
case common.ChannelTypeZhipu_v4:
|
||||||
|
apiType = APITypeZhipu_v4
|
||||||
}
|
}
|
||||||
return apiType
|
return apiType
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"one-api/relay/channel/tencent"
|
"one-api/relay/channel/tencent"
|
||||||
"one-api/relay/channel/xunfei"
|
"one-api/relay/channel/xunfei"
|
||||||
"one-api/relay/channel/zhipu"
|
"one-api/relay/channel/zhipu"
|
||||||
|
"one-api/relay/channel/zhipu_v4"
|
||||||
"one-api/relay/constant"
|
"one-api/relay/constant"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -36,6 +37,8 @@ func GetAdaptor(apiType int) channel.Adaptor {
|
|||||||
return &xunfei.Adaptor{}
|
return &xunfei.Adaptor{}
|
||||||
case constant.APITypeZhipu:
|
case constant.APITypeZhipu:
|
||||||
return &zhipu.Adaptor{}
|
return &zhipu.Adaptor{}
|
||||||
|
case constant.APITypeZhipu_v4:
|
||||||
|
return &zhipu_v4.Adaptor{}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -154,6 +154,9 @@ func CountTokenMessages(messages []dto.Message, model string) (int, error) {
|
|||||||
for _, m := range arrayContent {
|
for _, m := range arrayContent {
|
||||||
if m.Type == "image_url" {
|
if m.Type == "image_url" {
|
||||||
var imageTokenNum int
|
var imageTokenNum int
|
||||||
|
if model == "glm-4v" {
|
||||||
|
imageTokenNum = 1047
|
||||||
|
} else {
|
||||||
if str, ok := m.ImageUrl.(string); ok {
|
if str, ok := m.ImageUrl.(string); ok {
|
||||||
imageTokenNum, err = getImageToken(&dto.MessageImageUrl{Url: str, Detail: "auto"})
|
imageTokenNum, err = getImageToken(&dto.MessageImageUrl{Url: str, Detail: "auto"})
|
||||||
} else {
|
} else {
|
||||||
@ -173,7 +176,7 @@ func CountTokenMessages(messages []dto.Message, model string) (int, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, err
|
return 0, err
|
||||||
}
|
}
|
||||||
|
}
|
||||||
tokenNum += imageTokenNum
|
tokenNum += imageTokenNum
|
||||||
log.Printf("image token num: %d", imageTokenNum)
|
log.Printf("image token num: %d", imageTokenNum)
|
||||||
} else {
|
} else {
|
||||||
|
@ -9,6 +9,8 @@ export const CHANNEL_OPTIONS = [
|
|||||||
{key: 17, text: '阿里通义千问', value: 17, color: 'orange', label: '阿里通义千问'},
|
{key: 17, text: '阿里通义千问', value: 17, color: 'orange', label: '阿里通义千问'},
|
||||||
{key: 18, text: '讯飞星火认知', value: 18, color: 'blue', label: '讯飞星火认知'},
|
{key: 18, text: '讯飞星火认知', value: 18, color: 'blue', label: '讯飞星火认知'},
|
||||||
{key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet', label: '智谱 ChatGLM'},
|
{key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet', label: '智谱 ChatGLM'},
|
||||||
|
{key: 16, text: '智谱 GLM-4V', value: 26, color: 'green', label: '智谱 GLM-4V'},
|
||||||
|
{key: 16, text: 'Moonshot', value: 25, color: 'green', label: 'Moonshot'},
|
||||||
{key: 19, text: '360 智脑', value: 19, color: 'blue', label: '360 智脑'},
|
{key: 19, text: '360 智脑', value: 19, color: 'blue', label: '360 智脑'},
|
||||||
{key: 23, text: '腾讯混元', value: 23, color: 'teal', label: '腾讯混元'},
|
{key: 23, text: '腾讯混元', value: 23, color: 'teal', label: '腾讯混元'},
|
||||||
{key: 8, text: '自定义渠道', value: 8, color: 'pink', label: '自定义渠道'},
|
{key: 8, text: '自定义渠道', value: 8, color: 'pink', label: '自定义渠道'},
|
||||||
|
@ -89,6 +89,9 @@ const EditChannel = (props) => {
|
|||||||
case 24:
|
case 24:
|
||||||
localModels = ['gemini-pro'];
|
localModels = ['gemini-pro'];
|
||||||
break;
|
break;
|
||||||
|
case 26:
|
||||||
|
localModels = ['glm-4', 'glm-4v', 'glm-3-turbo'];
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
setInputs((inputs) => ({...inputs, models: localModels}));
|
setInputs((inputs) => ({...inputs, models: localModels}));
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user