add baidu ai model api configrations

This commit is contained in:
RockYang
2023-10-10 18:19:56 +08:00
parent f5ed71bcc6
commit 4fc01f3f7b
6 changed files with 274 additions and 8 deletions

View File

@@ -39,12 +39,16 @@
<el-dialog
v-model="showDialog"
:title="title"
style="width: 90%; max-width: 600px;"
>
<el-alert title="注意如果是百度文心一言平台需要用竖线| API Key Secret Key 串接起来填入"
type="warning"
:closable="false"
show-icon
style="margin-bottom: 10px; font-size:14px;"/>
<el-form :model="item" label-width="120px" ref="formRef" :rules="rules">
<el-form-item label="所属平台" prop="platform">
<el-select v-model="item.platform" placeholder="请选择平台">
<el-option v-for="item in platforms" :value="item" :key="item">{{ item }}</el-option>
<el-option v-for="item in platforms" :value="item.value" :key="item.value">{{ item.name }}</el-option>
</el-select>
</el-form-item>
@@ -82,7 +86,13 @@ const rules = reactive({
const loading = ref(true)
const formRef = ref(null)
const title = ref("")
const platforms = ref(["Azure", "OpenAI", "ChatGLM"])
const platforms = ref([
{name: "清华智普ChatGLM", value: "ChatGLM"},
{name: "百度文心一言", value: "Baidu"},
{name: "微软Azure", value: "Azure"},
{name: "OpenAIChatGPT", value: "OpenAI"},
])
// 获取数据
httpGet('/api/admin/apikey/list').then((res) => {

View File

@@ -90,13 +90,25 @@
<el-input v-model="chat['chat_gml']['api_url']" placeholder="支持变量,{model} => 模型名称"/>
</el-form-item>
<el-form-item label="模型创意度">
<el-slider v-model="chat['chat_gml']['temperature']" :max="2" :step="0.1"/>
<el-slider v-model="chat['chat_gml']['temperature']" :max="1" :step="0.01"/>
<div class="tip">值越大 AI 回答越发散值越小回答越保守建议保持默认值</div>
</el-form-item>
<el-form-item label="最大响应长度">
<el-input v-model.number="chat['chat_gml']['max_tokens']" placeholder="回复的最大字数最大4096"/>
</el-form-item>
<el-divider content-position="center">文心一言</el-divider>
<el-form-item label="API 地址" prop="baidu.api_url">
<el-input v-model="chat['baidu']['api_url']" placeholder="支持变量,{model} => 模型名称"/>
</el-form-item>
<el-form-item label="模型创意度">
<el-slider v-model="chat['baidu']['temperature']" :max="1" :step="0.01"/>
<div class="tip">值越大 AI 回答越发散值越小回答越保守建议保持默认值</div>
</el-form-item>
<el-form-item label="最大响应长度">
<el-input v-model.number="chat['baidu']['max_tokens']" placeholder="回复的最大字数最大4096"/>
</el-form-item>
<el-form-item style="text-align: right">
<el-button type="primary" @click="save('chat')">保存</el-button>
</el-form-item>
@@ -116,7 +128,8 @@ const system = ref({models: []})
const chat = ref({
open_ai: {api_url: "", temperature: 1, max_tokens: 1024},
azure: {api_url: "", temperature: 1, max_tokens: 1024},
chat_gml: {api_url: "", temperature: 1, max_tokens: 1024},
chat_gml: {api_url: "", temperature: 0.95, max_tokens: 1024},
baidu: {api_url: "", temperature: 0.95, max_tokens: 1024},
context_deep: 0,
enable_context: true,
enable_history: true,