mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-03 16:46:40 +08:00
Merge branch 'ChatGPTNextWeb:main' into main
This commit is contained in:
commit
e0e5549c8b
@ -29,7 +29,7 @@ import { RequestPayload } from "./openai";
|
|||||||
import { fetch } from "@/app/utils/stream";
|
import { fetch } from "@/app/utils/stream";
|
||||||
|
|
||||||
export class GeminiProApi implements LLMApi {
|
export class GeminiProApi implements LLMApi {
|
||||||
path(path: string): string {
|
path(path: string, shouldStream = false): string {
|
||||||
const accessStore = useAccessStore.getState();
|
const accessStore = useAccessStore.getState();
|
||||||
|
|
||||||
let baseUrl = "";
|
let baseUrl = "";
|
||||||
@ -51,8 +51,10 @@ export class GeminiProApi implements LLMApi {
|
|||||||
console.log("[Proxy Endpoint] ", baseUrl, path);
|
console.log("[Proxy Endpoint] ", baseUrl, path);
|
||||||
|
|
||||||
let chatPath = [baseUrl, path].join("/");
|
let chatPath = [baseUrl, path].join("/");
|
||||||
|
if (shouldStream) {
|
||||||
|
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
|
||||||
|
}
|
||||||
|
|
||||||
chatPath += chatPath.includes("?") ? "&alt=sse" : "?alt=sse";
|
|
||||||
return chatPath;
|
return chatPath;
|
||||||
}
|
}
|
||||||
extractMessage(res: any) {
|
extractMessage(res: any) {
|
||||||
@ -60,6 +62,7 @@ export class GeminiProApi implements LLMApi {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
res?.candidates?.at(0)?.content?.parts.at(0)?.text ||
|
res?.candidates?.at(0)?.content?.parts.at(0)?.text ||
|
||||||
|
res?.at(0)?.candidates?.at(0)?.content?.parts.at(0)?.text ||
|
||||||
res?.error?.message ||
|
res?.error?.message ||
|
||||||
""
|
""
|
||||||
);
|
);
|
||||||
@ -166,7 +169,10 @@ export class GeminiProApi implements LLMApi {
|
|||||||
options.onController?.(controller);
|
options.onController?.(controller);
|
||||||
try {
|
try {
|
||||||
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
|
// https://github.com/google-gemini/cookbook/blob/main/quickstarts/rest/Streaming_REST.ipynb
|
||||||
const chatPath = this.path(Google.ChatPath(modelConfig.model));
|
const chatPath = this.path(
|
||||||
|
Google.ChatPath(modelConfig.model),
|
||||||
|
shouldStream,
|
||||||
|
);
|
||||||
|
|
||||||
const chatPayload = {
|
const chatPayload = {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
|
@ -960,9 +960,24 @@ function _Chat() {
|
|||||||
(scrollRef.current.scrollTop + scrollRef.current.clientHeight),
|
(scrollRef.current.scrollTop + scrollRef.current.clientHeight),
|
||||||
) <= 1
|
) <= 1
|
||||||
: false;
|
: false;
|
||||||
|
const isAttachWithTop = useMemo(() => {
|
||||||
|
const lastMessage = scrollRef.current?.lastElementChild as HTMLElement;
|
||||||
|
// if scrolllRef is not ready or no message, return false
|
||||||
|
if (!scrollRef?.current || !lastMessage) return false;
|
||||||
|
const topDistance =
|
||||||
|
lastMessage!.getBoundingClientRect().top -
|
||||||
|
scrollRef.current.getBoundingClientRect().top;
|
||||||
|
// leave some space for user question
|
||||||
|
return topDistance < 100;
|
||||||
|
}, [scrollRef?.current?.scrollHeight]);
|
||||||
|
|
||||||
|
const isTyping = userInput !== "";
|
||||||
|
|
||||||
|
// if user is typing, should auto scroll to bottom
|
||||||
|
// if user is not typing, should auto scroll to bottom only if already at bottom
|
||||||
const { setAutoScroll, scrollDomToBottom } = useScrollToBottom(
|
const { setAutoScroll, scrollDomToBottom } = useScrollToBottom(
|
||||||
scrollRef,
|
scrollRef,
|
||||||
isScrolledToBottom,
|
(isScrolledToBottom || isAttachWithTop) && !isTyping,
|
||||||
);
|
);
|
||||||
const [hitBottom, setHitBottom] = useState(true);
|
const [hitBottom, setHitBottom] = useState(true);
|
||||||
const isMobileScreen = useMobileScreen();
|
const isMobileScreen = useMobileScreen();
|
||||||
@ -2071,6 +2086,6 @@ function _Chat() {
|
|||||||
|
|
||||||
export function Chat() {
|
export function Chat() {
|
||||||
const chatStore = useChatStore();
|
const chatStore = useChatStore();
|
||||||
const sessionIndex = chatStore.currentSessionIndex;
|
const session = chatStore.currentSession();
|
||||||
return <_Chat key={sessionIndex}></_Chat>;
|
return <_Chat key={session.id}></_Chat>;
|
||||||
}
|
}
|
||||||
|
@ -90,7 +90,11 @@ export function PreCode(props: { children: any }) {
|
|||||||
const refText = ref.current.querySelector("code")?.innerText;
|
const refText = ref.current.querySelector("code")?.innerText;
|
||||||
if (htmlDom) {
|
if (htmlDom) {
|
||||||
setHtmlCode((htmlDom as HTMLElement).innerText);
|
setHtmlCode((htmlDom as HTMLElement).innerText);
|
||||||
} else if (refText?.startsWith("<!DOCTYPE")) {
|
} else if (
|
||||||
|
refText?.startsWith("<!DOCTYPE") ||
|
||||||
|
refText?.startsWith("<svg") ||
|
||||||
|
refText?.startsWith("<?xml")
|
||||||
|
) {
|
||||||
setHtmlCode(refText);
|
setHtmlCode(refText);
|
||||||
}
|
}
|
||||||
}, 600);
|
}, 600);
|
||||||
@ -244,6 +248,10 @@ function escapeBrackets(text: string) {
|
|||||||
|
|
||||||
function tryWrapHtmlCode(text: string) {
|
function tryWrapHtmlCode(text: string) {
|
||||||
// try add wrap html code (fixed: html codeblock include 2 newline)
|
// try add wrap html code (fixed: html codeblock include 2 newline)
|
||||||
|
// ignore embed codeblock
|
||||||
|
if (text.includes("```")) {
|
||||||
|
return text;
|
||||||
|
}
|
||||||
return text
|
return text
|
||||||
.replace(
|
.replace(
|
||||||
/([`]*?)(\w*?)([\n\r]*?)(<!DOCTYPE html>)/g,
|
/([`]*?)(\w*?)([\n\r]*?)(<!DOCTYPE html>)/g,
|
||||||
|
@ -257,11 +257,11 @@ export function isVisionModel(model: string) {
|
|||||||
const excludeKeywords = ["claude-3-5-haiku-20241022"];
|
const excludeKeywords = ["claude-3-5-haiku-20241022"];
|
||||||
const visionKeywords = [
|
const visionKeywords = [
|
||||||
"vision",
|
"vision",
|
||||||
"claude-3",
|
|
||||||
"gemini-1.5-pro",
|
|
||||||
"gemini-1.5-flash",
|
|
||||||
"gpt-4o",
|
"gpt-4o",
|
||||||
"gpt-4o-mini",
|
"claude-3",
|
||||||
|
"gemini-1.5",
|
||||||
|
"qwen-vl",
|
||||||
|
"qwen2-vl",
|
||||||
];
|
];
|
||||||
const isGpt4Turbo =
|
const isGpt4Turbo =
|
||||||
model.includes("gpt-4-turbo") && !model.includes("preview");
|
model.includes("gpt-4-turbo") && !model.includes("preview");
|
||||||
|
Loading…
Reference in New Issue
Block a user