fix: playground max_tokens #512 #511

This commit is contained in:
1808837298@qq.com
2024-09-27 20:18:03 +08:00
parent 334a6f8280
commit f9ba10f180
2 changed files with 6 additions and 3 deletions

View File

@@ -75,7 +75,8 @@ func Playground(c *gin.Context) {
} }
channel, err := model.CacheGetRandomSatisfiedChannel(group, playgroundRequest.Model, 0) channel, err := model.CacheGetRandomSatisfiedChannel(group, playgroundRequest.Model, 0)
if err != nil { if err != nil {
openaiErr = service.OpenAIErrorWrapperLocal(err, "get_playground_channel_failed", http.StatusInternalServerError) message := fmt.Sprintf("当前分组 %s 下对于模型 %s 无可用渠道", group, playgroundRequest.Model)
openaiErr = service.OpenAIErrorWrapperLocal(errors.New(message), "get_playground_channel_failed", http.StatusInternalServerError)
return return
} }
middleware.SetupContextForSelectedChannel(c, channel, playgroundRequest.Model) middleware.SetupContextForSelectedChannel(c, channel, playgroundRequest.Model)

View File

@@ -133,8 +133,10 @@ const Playground = () => {
completeMessage(); completeMessage();
} else { } else {
let text = payload.choices[0].delta.content; let text = payload.choices[0].delta.content;
if (text) {
generateMockResponse(text); generateMockResponse(text);
} }
}
} else { } else {
completeMessage(); completeMessage();
} }
@@ -186,7 +188,7 @@ const Playground = () => {
stream: true, stream: true,
model: inputs.model, model: inputs.model,
group: inputs.group, group: inputs.group,
max_tokens: inputs.max_tokens, max_tokens: parseInt(inputs.max_tokens),
temperature: inputs.temperature, temperature: inputs.temperature,
}; };
}; };