This commit is contained in:
GH Action - Upstream Sync
2023-12-27 00:59:19 +00:00
3 changed files with 9 additions and 2 deletions

View File

@@ -122,6 +122,12 @@ export async function requestOpenai(req: NextRequest) {
// to disable nginx buffering
newHeaders.set("X-Accel-Buffering", "no");
// The latest version of the OpenAI API forced the content-encoding to be "br" in json response
// So if the streaming is disabled, we need to remove the content-encoding header
// Because Vercel uses gzip to compress the response, if we don't remove the content-encoding header
// The browser will try to decode the response with brotli and fail
newHeaders.delete("content-encoding");
return new Response(res.body, {
status: res.status,
statusText: res.statusText,

View File

@@ -144,6 +144,7 @@ export function getHeaders() {
const headers: Record<string, string> = {
"Content-Type": "application/json",
"x-requested-with": "XMLHttpRequest",
"Accept": "application/json",
};
const modelConfig = useChatStore.getState().currentSession().mask.modelConfig;
const isGoogle = modelConfig.model === "gemini-pro";