Merge remote-tracking branch 'upstream/main' into klaas20240303

This commit is contained in:
Klaas Reineke
2024-03-25 15:56:20 +01:00
134 changed files with 4418 additions and 920 deletions

View File

@@ -1,3 +1,4 @@
"use client";
import {
ApiPath,
DEFAULT_API_HOST,
@@ -8,7 +9,14 @@ import {
} from "@/app/constant";
import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { ChatOptions, getHeaders, LLMApi, LLMModel, LLMUsage } from "../api";
import {
ChatOptions,
getHeaders,
LLMApi,
LLMModel,
LLMUsage,
MultimodalContent,
} from "../api";
import Locale from "../../locales";
import {
EventStreamContentType,
@@ -17,6 +25,11 @@ import {
import { prettyObject } from "@/app/utils/format";
import { getClientConfig } from "@/app/config/client";
import { makeAzurePath } from "@/app/azure";
import {
getMessageTextContent,
getMessageImages,
isVisionModel,
} from "@/app/utils";
//# for AdEx custom usage tracking (calls per model per user per datekey)
// import { NextApiRequest, NextApiResponse } from 'next';
@@ -63,7 +76,9 @@ export class ChatGPTApi implements LLMApi {
if (baseUrl.length === 0) {
const isApp = !!getClientConfig()?.isApp;
baseUrl = isApp ? DEFAULT_API_HOST : ApiPath.OpenAI;
baseUrl = isApp
? DEFAULT_API_HOST + "/proxy" + ApiPath.OpenAI
: ApiPath.OpenAI;
}
if (baseUrl.endsWith("/")) {
@@ -77,6 +92,8 @@ export class ChatGPTApi implements LLMApi {
path = makeAzurePath(path, accessStore.azureApiVersion);
}
console.log("[Proxy Endpoint] ", baseUrl, path);
return [baseUrl, path].join("/");
}
@@ -85,9 +102,10 @@ export class ChatGPTApi implements LLMApi {
}
async chat(options: ChatOptions) {
const visionModel = isVisionModel(options.config.model);
const messages = options.messages.map((v) => ({
role: v.role,
content: v.content,
content: visionModel ? v.content : getMessageTextContent(v),
}));
const modelConfig = {
@@ -123,6 +141,16 @@ export class ChatGPTApi implements LLMApi {
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};
// add max_tokens to vision model
if (visionModel) {
Object.defineProperty(requestPayload, "max_tokens", {
enumerable: true,
configurable: true,
writable: true,
value: modelConfig.max_tokens,
});
}
console.log("[Request] openai payload: ", requestPayload);
const modelIdentifier = modelConfig.model;
@@ -177,6 +205,9 @@ export class ChatGPTApi implements LLMApi {
if (finished || controller.signal.aborted) {
responseText += remainText;
console.log("[Response Animation] finished");
if (responseText?.length === 0) {
options.onError?.(new Error("empty response from server"));
}
return;
}
@@ -377,6 +408,11 @@ export class ChatGPTApi implements LLMApi {
return chatModels.map((m) => ({
name: m.id,
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
}));
}
}