Compare commits

...

6 Commits

Author SHA1 Message Date
JustSong
3e81d8af45 chore: update i18n 2023-07-29 19:50:29 +08:00
JustSong
b8cb86c2c1 chore: adjust ui 2023-07-29 19:32:06 +08:00
JustSong
f45d586400 fix: fix model mapping cannot be deleted 2023-07-29 19:17:26 +08:00
JustSong
50dec03ff3 fix: fix model mapping cannot be deleted 2023-07-29 19:16:42 +08:00
JustSong
f31d400b6f chore: automatically add related models when switch type 2023-07-29 12:24:23 +08:00
JustSong
130e6bfd83 feat: support baidu's embedding model (close #324) 2023-07-29 12:15:07 +08:00
11 changed files with 190 additions and 31 deletions

View File

@@ -42,6 +42,7 @@ var ModelRatio = map[string]float64{
"claude-2": 30,
"ERNIE-Bot": 0.8572, // ¥0.012 / 1k tokens
"ERNIE-Bot-turbo": 0.5715, // ¥0.008 / 1k tokens
"Embedding-V1": 0.1429, // ¥0.002 / 1k tokens
"PaLM-2": 1,
"chatglm_pro": 0.7143, // ¥0.01 / 1k tokens
"chatglm_std": 0.3572, // ¥0.005 / 1k tokens

View File

@@ -288,6 +288,15 @@ func init() {
Root: "ERNIE-Bot-turbo",
Parent: nil,
},
{
Id: "Embedding-V1",
Object: "model",
Created: 1677649963,
OwnedBy: "baidu",
Permission: permission,
Root: "Embedding-V1",
Parent: nil,
},
{
Id: "PaLM-2",
Object: "model",

View File

@@ -54,6 +54,25 @@ type BaiduChatStreamResponse struct {
IsEnd bool `json:"is_end"`
}
type BaiduEmbeddingRequest struct {
Input []string `json:"input"`
}
type BaiduEmbeddingData struct {
Object string `json:"object"`
Embedding []float64 `json:"embedding"`
Index int `json:"index"`
}
type BaiduEmbeddingResponse struct {
Id string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Data []BaiduEmbeddingData `json:"data"`
Usage Usage `json:"usage"`
BaiduError
}
func requestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduChatRequest {
messages := make([]BaiduMessage, 0, len(request.Messages))
for _, message := range request.Messages {
@@ -112,6 +131,36 @@ func streamResponseBaidu2OpenAI(baiduResponse *BaiduChatStreamResponse) *ChatCom
return &response
}
func embeddingRequestOpenAI2Baidu(request GeneralOpenAIRequest) *BaiduEmbeddingRequest {
baiduEmbeddingRequest := BaiduEmbeddingRequest{
Input: nil,
}
switch request.Input.(type) {
case string:
baiduEmbeddingRequest.Input = []string{request.Input.(string)}
case []string:
baiduEmbeddingRequest.Input = request.Input.([]string)
}
return &baiduEmbeddingRequest
}
func embeddingResponseBaidu2OpenAI(response *BaiduEmbeddingResponse) *OpenAIEmbeddingResponse {
openAIEmbeddingResponse := OpenAIEmbeddingResponse{
Object: "list",
Data: make([]OpenAIEmbeddingResponseItem, 0, len(response.Data)),
Model: "baidu-embedding",
Usage: response.Usage,
}
for _, item := range response.Data {
openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, OpenAIEmbeddingResponseItem{
Object: item.Object,
Index: item.Index,
Embedding: item.Embedding,
})
}
return &openAIEmbeddingResponse
}
func baiduStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
var usage Usage
scanner := bufio.NewScanner(resp.Body)
@@ -212,3 +261,39 @@ func baiduHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCo
_, err = c.Writer.Write(jsonResponse)
return nil, &fullTextResponse.Usage
}
func baiduEmbeddingHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
var baiduResponse BaiduEmbeddingResponse
responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
}
err = resp.Body.Close()
if err != nil {
return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
}
err = json.Unmarshal(responseBody, &baiduResponse)
if err != nil {
return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
}
if baiduResponse.ErrorMsg != "" {
return &OpenAIErrorWithStatusCode{
OpenAIError: OpenAIError{
Message: baiduResponse.ErrorMsg,
Type: "baidu_error",
Param: "",
Code: baiduResponse.ErrorCode,
},
StatusCode: resp.StatusCode,
}, nil
}
fullTextResponse := embeddingResponseBaidu2OpenAI(&baiduResponse)
jsonResponse, err := json.Marshal(fullTextResponse)
if err != nil {
return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
}
c.Writer.Header().Set("Content-Type", "application/json")
c.Writer.WriteHeader(resp.StatusCode)
_, err = c.Writer.Write(jsonResponse)
return nil, &fullTextResponse.Usage
}

View File

@@ -74,7 +74,7 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
// map model name
modelMapping := c.GetString("model_mapping")
isModelMapped := false
if modelMapping != "" {
if modelMapping != "" && modelMapping != "{}" {
modelMap := make(map[string]string)
err := json.Unmarshal([]byte(modelMapping), &modelMap)
if err != nil {
@@ -139,6 +139,8 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant"
case "BLOOMZ-7B":
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1"
case "Embedding-V1":
fullRequestURL = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/embedding-v1"
}
apiKey := c.Request.Header.Get("Authorization")
apiKey = strings.TrimPrefix(apiKey, "Bearer ")
@@ -212,12 +214,20 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
}
requestBody = bytes.NewBuffer(jsonStr)
case APITypeBaidu:
baiduRequest := requestOpenAI2Baidu(textRequest)
jsonStr, err := json.Marshal(baiduRequest)
var jsonData []byte
var err error
switch relayMode {
case RelayModeEmbeddings:
baiduEmbeddingRequest := embeddingRequestOpenAI2Baidu(textRequest)
jsonData, err = json.Marshal(baiduEmbeddingRequest)
default:
baiduRequest := requestOpenAI2Baidu(textRequest)
jsonData, err = json.Marshal(baiduRequest)
}
if err != nil {
return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
}
requestBody = bytes.NewBuffer(jsonStr)
requestBody = bytes.NewBuffer(jsonData)
case APITypePaLM:
palmRequest := requestOpenAI2PaLM(textRequest)
jsonStr, err := json.Marshal(palmRequest)
@@ -386,7 +396,14 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
}
return nil
} else {
err, usage := baiduHandler(c, resp)
var err *OpenAIErrorWithStatusCode
var usage *Usage
switch relayMode {
case RelayModeEmbeddings:
err, usage = baiduEmbeddingHandler(c, resp)
default:
err, usage = baiduHandler(c, resp)
}
if err != nil {
return err
}

View File

@@ -99,6 +99,19 @@ type OpenAITextResponse struct {
Usage `json:"usage"`
}
type OpenAIEmbeddingResponseItem struct {
Object string `json:"object"`
Index int `json:"index"`
Embedding []float64 `json:"embedding"`
}
type OpenAIEmbeddingResponse struct {
Object string `json:"object"`
Data []OpenAIEmbeddingResponseItem `json:"data"`
Model string `json:"model"`
Usage `json:"usage"`
}
type ImageResponse struct {
Created int `json:"created"`
Data []struct {

View File

@@ -503,5 +503,12 @@
"请输入 AZURE_OPENAI_ENDPOINT": "Please enter AZURE_OPENAI_ENDPOINT",
"请输入自定义渠道的 Base URL": "Please enter the Base URL of the custom channel",
"Homepage URL 填": "Fill in the Homepage URL",
"Authorization callback URL 填": "Fill in the Authorization callback URL"
"Authorization callback URL 填": "Fill in the Authorization callback URL",
"请为通道命名": "Please name the channel",
"此项可选,用于修改请求体中的模型名称,为一个 JSON 字符串,键为请求中模型名称,值为要替换的模型名称,例如:": "This is optional, used to modify the model name in the request body, it's a JSON string, the key is the model name in the request, and the value is the model name to be replaced, for example:",
"模型重定向": "Model redirection",
"请输入渠道对应的鉴权密钥": "Please enter the authentication key corresponding to the channel",
"注意,": "Note that, ",
",图片演示。": "related image demo.",
"令牌创建成功,请在列表页面点击复制获取令牌!": "Token created successfully, please click copy on the list page to get the token!"
}

View File

@@ -363,9 +363,12 @@ const ChannelsTable = () => {
</Table.Cell>
<Table.Cell>
<Popup
content={channel.balance_updated_time ? renderTimestamp(channel.balance_updated_time) : '未更新'}
key={channel.id}
trigger={renderBalance(channel.type, channel.balance)}
trigger={<span onClick={() => {
updateChannelBalance(channel.id, channel.name, idx);
}} style={{ cursor: 'pointer' }}>
{renderBalance(channel.type, channel.balance)}
</span>}
content="点击更新"
basic
/>
</Table.Cell>
@@ -380,16 +383,16 @@ const ChannelsTable = () => {
>
测试
</Button>
<Button
size={'small'}
positive
loading={updatingBalance}
onClick={() => {
updateChannelBalance(channel.id, channel.name, idx);
}}
>
更新余额
</Button>
{/*<Button*/}
{/* size={'small'}*/}
{/* positive*/}
{/* loading={updatingBalance}*/}
{/* onClick={() => {*/}
{/* updateChannelBalance(channel.id, channel.name, idx);*/}
{/* }}*/}
{/*>*/}
{/* 更新余额*/}
{/*</Button>*/}
<Popup
trigger={
<Button size='small' negative>

View File

@@ -227,7 +227,7 @@ const UsersTable = () => {
content={user.email ? user.email : '未绑定邮箱地址'}
key={user.username}
header={user.display_name ? user.display_name : user.username}
trigger={<span>{renderText(user.username, 10)}</span>}
trigger={<span>{renderText(user.username, 15)}</span>}
hoverable
/>
</Table.Cell>

View File

@@ -1,5 +1,5 @@
export const toastConstants = {
SUCCESS_TIMEOUT: 500,
SUCCESS_TIMEOUT: 1500,
INFO_TIMEOUT: 3000,
ERROR_TIMEOUT: 5000,
WARNING_TIMEOUT: 10000,

View File

@@ -35,6 +35,27 @@ const EditChannel = () => {
const [customModel, setCustomModel] = useState('');
const handleInputChange = (e, { name, value }) => {
setInputs((inputs) => ({ ...inputs, [name]: value }));
if (name === 'type' && inputs.models.length === 0) {
let localModels = [];
switch (value) {
case 14:
localModels = ['claude-instant-1', 'claude-2'];
break;
case 11:
localModels = ['PaLM-2'];
break;
case 15:
localModels = ['ERNIE-Bot', 'ERNIE-Bot-turbo', 'Embedding-V1'];
break;
case 17:
localModels = ['qwen-v1', 'qwen-plus-v1'];
break;
case 16:
localModels = ['chatglm_pro', 'chatglm_std', 'chatglm_lite'];
break;
}
setInputs((inputs) => ({ ...inputs, models: localModels }));
}
};
const loadChannel = async () => {
@@ -132,7 +153,10 @@ const EditChannel = () => {
localInputs.base_url = localInputs.base_url.slice(0, localInputs.base_url.length - 1);
}
if (localInputs.type === 3 && localInputs.other === '') {
localInputs.other = '2023-03-15-preview';
localInputs.other = '2023-06-01-preview';
}
if (localInputs.model_mapping === '') {
localInputs.model_mapping = '{}';
}
let res;
localInputs.models = localInputs.models.join(',');
@@ -192,7 +216,7 @@ const EditChannel = () => {
<Form.Input
label='默认 API 版本'
name='other'
placeholder={'请输入默认 API 版本例如2023-03-15-preview该配置可以被实际的请求查询参数所覆盖'}
placeholder={'请输入默认 API 版本例如2023-06-01-preview该配置可以被实际的请求查询参数所覆盖'}
onChange={handleInputChange}
value={inputs.other}
autoComplete='new-password'
@@ -270,8 +294,8 @@ const EditChannel = () => {
}}>清除所有模型</Button>
<Input
action={
<Button type={'button'} onClick={()=>{
if (customModel.trim() === "") return;
<Button type={'button'} onClick={() => {
if (customModel.trim() === '') return;
if (inputs.models.includes(customModel)) return;
let localModels = [...inputs.models];
localModels.push(customModel);
@@ -279,9 +303,9 @@ const EditChannel = () => {
localModelOptions.push({
key: customModel,
text: customModel,
value: customModel,
value: customModel
});
setModelOptions(modelOptions=>{
setModelOptions(modelOptions => {
return [...modelOptions, ...localModelOptions];
});
setCustomModel('');
@@ -297,7 +321,7 @@ const EditChannel = () => {
</div>
<Form.Field>
<Form.TextArea
label='模型映射'
label='模型重定向'
placeholder={`此项可选,用于修改请求体中的模型名称,为一个 JSON 字符串,键为请求中模型名称,值为要替换的模型名称,例如:\n${JSON.stringify(MODEL_MAPPING_EXAMPLE, null, 2)}`}
name='model_mapping'
onChange={handleInputChange}
@@ -323,7 +347,7 @@ const EditChannel = () => {
label='密钥'
name='key'
required
placeholder={inputs.type === 15 ? "请输入 access token当前版本暂不支持自动刷新请每 30 天更新一次" : '请输入渠道对应的鉴权密钥'}
placeholder={inputs.type === 15 ? '请输入 access token当前版本暂不支持自动刷新请每 30 天更新一次' : '请输入渠道对应的鉴权密钥'}
onChange={handleInputChange}
value={inputs.key}
autoComplete='new-password'
@@ -354,7 +378,7 @@ const EditChannel = () => {
</Form.Field>
)
}
<Button type={isEdit ? "button" : "submit"} positive onClick={submit}>提交</Button>
<Button type={isEdit ? 'button' : 'submit'} positive onClick={submit}>提交</Button>
</Form>
</Segment>
</>

View File

@@ -83,7 +83,7 @@ const EditToken = () => {
if (isEdit) {
showSuccess('令牌更新成功!');
} else {
showSuccess('令牌创建成功!');
showSuccess('令牌创建成功,请在列表页面点击复制获取令牌');
setInputs(originInputs);
}
} else {