From 62fab0c2634a9aaf526f84bc32455190ffa290b2 Mon Sep 17 00:00:00 2001 From: Mokou Date: Wed, 5 Apr 2023 14:14:41 +0800 Subject: [PATCH] feat: support more models --- app/requests.ts | 19 ++++++++++++++----- app/store/app.ts | 18 ++++++++++-------- 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/app/requests.ts b/app/requests.ts index da9b5c97f..06d2521e1 100644 --- a/app/requests.ts +++ b/app/requests.ts @@ -9,6 +9,7 @@ const makeRequestParam = ( messages: Message[], options?: { filterBot?: boolean; + model?: string; stream?: boolean; }, ): ChatRequest => { @@ -22,7 +23,7 @@ const makeRequestParam = ( } return { - model: "gpt-3.5-turbo", + model: options?.model || "gpt-3.5-turbo", messages: sendMessages, stream: options?.stream, }; @@ -57,8 +58,11 @@ export function requestOpenaiClient(path: string) { }); } -export async function requestChat(messages: Message[]) { - const req: ChatRequest = makeRequestParam(messages, { filterBot: true }); +export async function requestChat(messages: Message[], model: string) { + const req: ChatRequest = makeRequestParam(messages, { + filterBot: true, + model, + }); const res = await requestOpenaiClient("v1/chat/completions")(req); @@ -121,6 +125,7 @@ export async function requestChatStream( const req = makeRequestParam(messages, { stream: true, filterBot: options?.filterBot, + model: options?.modelConfig?.model, }); console.log("[Request] ", req); @@ -184,7 +189,11 @@ export async function requestChatStream( } } -export async function requestWithPrompt(messages: Message[], prompt: string) { +export async function requestWithPrompt( + messages: Message[], + prompt: string, + model: string, +) { messages = messages.concat([ { role: "user", @@ -193,7 +202,7 @@ export async function requestWithPrompt(messages: Message[], prompt: string) { }, ]); - const res = await requestChat(messages); + const res = await requestChat(messages, model); return res?.choices?.at(0)?.message?.content ?? ""; } diff --git a/app/store/app.ts b/app/store/app.ts index d01e3cdd5..8c38a7ddf 100644 --- a/app/store/app.ts +++ b/app/store/app.ts @@ -417,6 +417,7 @@ export const useChatStore = create()( summarizeSession() { const session = get().currentSession(); + const config = get().config; // should summarize topic after chating more than 50 words const SUMMARIZE_MIN_LEN = 50; @@ -424,16 +425,17 @@ export const useChatStore = create()( session.topic === DEFAULT_TOPIC && countMessages(session.messages) >= SUMMARIZE_MIN_LEN ) { - requestWithPrompt(session.messages, Locale.Store.Prompt.Topic).then( - (res) => { - get().updateCurrentSession( - (session) => (session.topic = trimTopic(res)), - ); - }, - ); + requestWithPrompt( + session.messages, + Locale.Store.Prompt.Topic, + config.modelConfig.model, + ).then((res) => { + get().updateCurrentSession( + (session) => (session.topic = trimTopic(res)), + ); + }); } - const config = get().config; let toBeSummarizedMsgs = session.messages.slice( session.lastSummarizeIndex, );