Merge remote-tracking branch 'upstream/main'

This commit is contained in:
Hk-Gosuto
2024-04-12 12:18:38 +08:00
31 changed files with 1146 additions and 61 deletions

View File

@@ -44,6 +44,20 @@ export interface OpenAIListModelResponse {
}>;
}
interface RequestPayload {
messages: {
role: "system" | "user" | "assistant";
content: string | MultimodalContent[];
}[];
stream?: boolean;
model: string;
temperature: number;
presence_penalty: number;
frequency_penalty: number;
top_p: number;
max_tokens?: number;
}
export class ChatGPTApi implements LLMApi {
private disableListModels = true;
@@ -181,7 +195,8 @@ export class ChatGPTApi implements LLMApi {
model: options.config.model,
},
};
const requestPayload = {
const requestPayload: RequestPayload = {
messages,
stream: options.config.stream,
model: modelConfig.model,
@@ -189,21 +204,13 @@ export class ChatGPTApi implements LLMApi {
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
max_tokens: modelConfig.model.includes("vision")
? modelConfig.max_tokens
: null,
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
};
// add max_tokens to vision model
if (visionModel) {
Object.defineProperty(requestPayload, "max_tokens", {
enumerable: true,
configurable: true,
writable: true,
value: modelConfig.max_tokens,
});
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
}
console.log("[Request] openai payload: ", requestPayload);