feat: support more models

This commit is contained in:
Mokou 2023-04-05 14:14:41 +08:00
parent bd7567fa85
commit 62fab0c263
2 changed files with 24 additions and 13 deletions

View File

@ -9,6 +9,7 @@ const makeRequestParam = (
messages: Message[],
options?: {
filterBot?: boolean;
model?: string;
stream?: boolean;
},
): ChatRequest => {
@ -22,7 +23,7 @@ const makeRequestParam = (
}
return {
model: "gpt-3.5-turbo",
model: options?.model || "gpt-3.5-turbo",
messages: sendMessages,
stream: options?.stream,
};
@ -57,8 +58,11 @@ export function requestOpenaiClient(path: string) {
});
}
export async function requestChat(messages: Message[]) {
const req: ChatRequest = makeRequestParam(messages, { filterBot: true });
export async function requestChat(messages: Message[], model: string) {
const req: ChatRequest = makeRequestParam(messages, {
filterBot: true,
model,
});
const res = await requestOpenaiClient("v1/chat/completions")(req);
@ -121,6 +125,7 @@ export async function requestChatStream(
const req = makeRequestParam(messages, {
stream: true,
filterBot: options?.filterBot,
model: options?.modelConfig?.model,
});
console.log("[Request] ", req);
@ -184,7 +189,11 @@ export async function requestChatStream(
}
}
export async function requestWithPrompt(messages: Message[], prompt: string) {
export async function requestWithPrompt(
messages: Message[],
prompt: string,
model: string,
) {
messages = messages.concat([
{
role: "user",
@ -193,7 +202,7 @@ export async function requestWithPrompt(messages: Message[], prompt: string) {
},
]);
const res = await requestChat(messages);
const res = await requestChat(messages, model);
return res?.choices?.at(0)?.message?.content ?? "";
}

View File

@ -417,6 +417,7 @@ export const useChatStore = create<ChatStore>()(
summarizeSession() {
const session = get().currentSession();
const config = get().config;
// should summarize topic after chating more than 50 words
const SUMMARIZE_MIN_LEN = 50;
@ -424,16 +425,17 @@ export const useChatStore = create<ChatStore>()(
session.topic === DEFAULT_TOPIC &&
countMessages(session.messages) >= SUMMARIZE_MIN_LEN
) {
requestWithPrompt(session.messages, Locale.Store.Prompt.Topic).then(
(res) => {
requestWithPrompt(
session.messages,
Locale.Store.Prompt.Topic,
config.modelConfig.model,
).then((res) => {
get().updateCurrentSession(
(session) => (session.topic = trimTopic(res)),
);
},
);
});
}
const config = get().config;
let toBeSummarizedMsgs = session.messages.slice(
session.lastSummarizeIndex,
);