From 1d14a991eedb17a492d6e840de71567c8a6884a7 Mon Sep 17 00:00:00 2001 From: Dogtiti <499960698@qq.com> Date: Mon, 11 Nov 2024 20:30:59 +0800 Subject: [PATCH 1/8] fix: use current session id to trigger rerender --- app/components/chat.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/components/chat.tsx b/app/components/chat.tsx index ed51d926f..5669cc9a3 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -2071,6 +2071,6 @@ function _Chat() { export function Chat() { const chatStore = useChatStore(); - const sessionIndex = chatStore.currentSessionIndex; - return <_Chat key={sessionIndex}>; + const session = chatStore.currentSession(); + return <_Chat key={session.id}>; } From 8d66fedb1f5093d6e29ac06a839316edb535512d Mon Sep 17 00:00:00 2001 From: LovelyGuYiMeng <76251800+LovelyGuYiMeng@users.noreply.github.com> Date: Tue, 12 Nov 2024 14:28:11 +0800 Subject: [PATCH 2/8] Update visionKeywords --- app/utils.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/app/utils.ts b/app/utils.ts index 2dd80b8a3..1c359ef95 100644 --- a/app/utils.ts +++ b/app/utils.ts @@ -257,11 +257,11 @@ export function isVisionModel(model: string) { const excludeKeywords = ["claude-3-5-haiku-20241022"]; const visionKeywords = [ "vision", - "claude-3", - "gemini-1.5-pro", - "gemini-1.5-flash", "gpt-4o", - "gpt-4o-mini", + "claude-3", + "gemini-1.5", + "qwen-vl", + "qwen2-vl", ]; const isGpt4Turbo = model.includes("gpt-4-turbo") && !model.includes("preview"); From 9a86c42c95be5b2bb85f44c0bdeb7714dc526a49 Mon Sep 17 00:00:00 2001 From: opchips Date: Tue, 12 Nov 2024 16:33:55 +0800 Subject: [PATCH 3/8] update --- app/components/markdown.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/components/markdown.tsx b/app/components/markdown.tsx index 8ea731123..f58b16427 100644 --- a/app/components/markdown.tsx +++ b/app/components/markdown.tsx @@ -90,7 +90,11 @@ export function PreCode(props: { children: any }) { const refText = ref.current.querySelector("code")?.innerText; if (htmlDom) { setHtmlCode((htmlDom as HTMLElement).innerText); - } else if (refText?.startsWith(" Date: Wed, 13 Nov 2024 14:24:44 +0800 Subject: [PATCH 4/8] =?UTF-8?q?fix:=20[#5308]=20gemini=E5=AF=B9=E8=AF=9D?= =?UTF-8?q?=E6=80=BB=E7=BB=93?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/client/platforms/google.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 53ff00aee..d5aa93af1 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -51,8 +51,10 @@ export class GeminiProApi implements LLMApi { console.log("[Proxy Endpoint] ", baseUrl, path); let chatPath = [baseUrl, path].join("/"); + if (!chatPath.includes("gemini-pro")) { + chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse"; + } - chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse"; return chatPath; } extractMessage(res: any) { @@ -60,6 +62,7 @@ export class GeminiProApi implements LLMApi { return ( res?.candidates?.at(0)?.content?.parts.at(0)?.text || + res?.at(0)?.candidates?.at(0)?.content?.parts.at(0)?.text || res?.error?.message || "" ); @@ -167,6 +170,7 @@ export class GeminiProApi implements LLMApi { try { // https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb const chatPath = this.path(Google.ChatPath(modelConfig.model)); + console.log("[Chat Path] ", chatPath); const chatPayload = { method: "POST", From 0628ddfc6f36479650d50281e3fa0ba1a847f777 Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Wed, 13 Nov 2024 14:27:41 +0800 Subject: [PATCH 5/8] chore: update --- app/client/platforms/google.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index d5aa93af1..30f35359e 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -170,7 +170,6 @@ export class GeminiProApi implements LLMApi { try { // https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb const chatPath = this.path(Google.ChatPath(modelConfig.model)); - console.log("[Chat Path] ", chatPath); const chatPayload = { method: "POST", From b41c012d27d5495bec12f6aa6f9537ebb6873083 Mon Sep 17 00:00:00 2001 From: DDMeaqua Date: Wed, 13 Nov 2024 15:12:46 +0800 Subject: [PATCH 6/8] chore: shouldStream --- app/client/platforms/google.ts | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/app/client/platforms/google.ts b/app/client/platforms/google.ts index 30f35359e..a7bce4fc2 100644 --- a/app/client/platforms/google.ts +++ b/app/client/platforms/google.ts @@ -29,7 +29,7 @@ import { RequestPayload } from "./openai"; import { fetch } from "@/app/utils/stream"; export class GeminiProApi implements LLMApi { - path(path: string): string { + path(path: string, shouldStream = false): string { const accessStore = useAccessStore.getState(); let baseUrl = ""; @@ -51,7 +51,7 @@ export class GeminiProApi implements LLMApi { console.log("[Proxy Endpoint] ", baseUrl, path); let chatPath = [baseUrl, path].join("/"); - if (!chatPath.includes("gemini-pro")) { + if (shouldStream) { chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse"; } @@ -169,7 +169,10 @@ export class GeminiProApi implements LLMApi { options.onController?.(controller); try { // https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb - const chatPath = this.path(Google.ChatPath(modelConfig.model)); + const chatPath = this.path( + Google.ChatPath(modelConfig.model), + shouldStream, + ); const chatPayload = { method: "POST", From 19facc7c85a0e509b5d4ca1eaa98782f29477c9a Mon Sep 17 00:00:00 2001 From: Sherlock <1075773551@qq.com> Date: Thu, 14 Nov 2024 21:31:45 +0800 Subject: [PATCH 7/8] feat: support mort user-friendly scrolling --- app/components/chat.tsx | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/app/components/chat.tsx b/app/components/chat.tsx index 5669cc9a3..51fe74fe7 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -960,9 +960,24 @@ function _Chat() { (scrollRef.current.scrollTop + scrollRef.current.clientHeight), ) <= 1 : false; + const isAttachWithTop = useMemo(() => { + const lastMessage = scrollRef.current?.lastElementChild as HTMLElement; + // if scrolllRef is not ready or no message, return false + if (!scrollRef?.current || !lastMessage) return false; + const topDistance = + lastMessage!.getBoundingClientRect().top - + scrollRef.current.getBoundingClientRect().top; + // leave some space for user question + return topDistance < 100; + }, [scrollRef?.current?.scrollHeight]); + + const isTyping = userInput !== ""; + + // if user is typing, should auto scroll to bottom + // if user is not typing, should auto scroll to bottom only if already at bottom const { setAutoScroll, scrollDomToBottom } = useScrollToBottom( scrollRef, - isScrolledToBottom, + (isScrolledToBottom || isAttachWithTop) && !isTyping, ); const [hitBottom, setHitBottom] = useState(true); const isMobileScreen = useMobileScreen(); From e56216549efe58c1b734f5094eb77bfaa6654c69 Mon Sep 17 00:00:00 2001 From: opchips Date: Fri, 15 Nov 2024 11:56:26 +0800 Subject: [PATCH 8/8] =?UTF-8?q?fix:=20=E4=BB=A3=E7=A0=81=E5=9D=97=E5=B5=8C?= =?UTF-8?q?=E5=85=A5=E5=B0=8F=E4=BB=A3=E7=A0=81=E5=9D=97=E6=97=B6=E6=B8=B2?= =?UTF-8?q?=E6=9F=93=E9=94=99=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/components/markdown.tsx | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/app/components/markdown.tsx b/app/components/markdown.tsx index f58b16427..ba85f0970 100644 --- a/app/components/markdown.tsx +++ b/app/components/markdown.tsx @@ -248,6 +248,10 @@ function escapeBrackets(text: string) { function tryWrapHtmlCode(text: string) { // try add wrap html code (fixed: html codeblock include 2 newline) + // ignore embed codeblock + if (text.includes("```")) { + return text; + } return text .replace( /([`]*?)(\w*?)([\n\r]*?)()/g,