feat: support more models

This commit is contained in:
Mokou 2023-04-05 14:14:41 +08:00
parent bd7567fa85
commit 62fab0c263
2 changed files with 24 additions and 13 deletions

View File

@ -9,6 +9,7 @@ const makeRequestParam = (
messages: Message[], messages: Message[],
options?: { options?: {
filterBot?: boolean; filterBot?: boolean;
model?: string;
stream?: boolean; stream?: boolean;
}, },
): ChatRequest => { ): ChatRequest => {
@ -22,7 +23,7 @@ const makeRequestParam = (
} }
return { return {
model: "gpt-3.5-turbo", model: options?.model || "gpt-3.5-turbo",
messages: sendMessages, messages: sendMessages,
stream: options?.stream, stream: options?.stream,
}; };
@ -57,8 +58,11 @@ export function requestOpenaiClient(path: string) {
}); });
} }
export async function requestChat(messages: Message[]) { export async function requestChat(messages: Message[], model: string) {
const req: ChatRequest = makeRequestParam(messages, { filterBot: true }); const req: ChatRequest = makeRequestParam(messages, {
filterBot: true,
model,
});
const res = await requestOpenaiClient("v1/chat/completions")(req); const res = await requestOpenaiClient("v1/chat/completions")(req);
@ -121,6 +125,7 @@ export async function requestChatStream(
const req = makeRequestParam(messages, { const req = makeRequestParam(messages, {
stream: true, stream: true,
filterBot: options?.filterBot, filterBot: options?.filterBot,
model: options?.modelConfig?.model,
}); });
console.log("[Request] ", req); console.log("[Request] ", req);
@ -184,7 +189,11 @@ export async function requestChatStream(
} }
} }
export async function requestWithPrompt(messages: Message[], prompt: string) { export async function requestWithPrompt(
messages: Message[],
prompt: string,
model: string,
) {
messages = messages.concat([ messages = messages.concat([
{ {
role: "user", role: "user",
@ -193,7 +202,7 @@ export async function requestWithPrompt(messages: Message[], prompt: string) {
}, },
]); ]);
const res = await requestChat(messages); const res = await requestChat(messages, model);
return res?.choices?.at(0)?.message?.content ?? ""; return res?.choices?.at(0)?.message?.content ?? "";
} }

View File

@ -417,6 +417,7 @@ export const useChatStore = create<ChatStore>()(
summarizeSession() { summarizeSession() {
const session = get().currentSession(); const session = get().currentSession();
const config = get().config;
// should summarize topic after chating more than 50 words // should summarize topic after chating more than 50 words
const SUMMARIZE_MIN_LEN = 50; const SUMMARIZE_MIN_LEN = 50;
@ -424,16 +425,17 @@ export const useChatStore = create<ChatStore>()(
session.topic === DEFAULT_TOPIC && session.topic === DEFAULT_TOPIC &&
countMessages(session.messages) >= SUMMARIZE_MIN_LEN countMessages(session.messages) >= SUMMARIZE_MIN_LEN
) { ) {
requestWithPrompt(session.messages, Locale.Store.Prompt.Topic).then( requestWithPrompt(
(res) => { session.messages,
Locale.Store.Prompt.Topic,
config.modelConfig.model,
).then((res) => {
get().updateCurrentSession( get().updateCurrentSession(
(session) => (session.topic = trimTopic(res)), (session) => (session.topic = trimTopic(res)),
); );
}, });
);
} }
const config = get().config;
let toBeSummarizedMsgs = session.messages.slice( let toBeSummarizedMsgs = session.messages.slice(
session.lastSummarizeIndex, session.lastSummarizeIndex,
); );