mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-14 21:13:47 +08:00
feat: web search
This commit is contained in:
@@ -52,6 +52,8 @@ import PluginIcon from "../icons/plugin.svg";
|
||||
import ShortcutkeyIcon from "../icons/shortcutkey.svg";
|
||||
import ReloadIcon from "../icons/reload.svg";
|
||||
import HeadphoneIcon from "../icons/headphone.svg";
|
||||
import SearchCloseIcon from "../icons/search_close.svg";
|
||||
import SearchOpenIcon from "../icons/search_open.svg";
|
||||
import {
|
||||
ChatMessage,
|
||||
SubmitKey,
|
||||
@@ -509,6 +511,17 @@ export function ChatActions(props: {
|
||||
const pluginStore = usePluginStore();
|
||||
const session = chatStore.currentSession();
|
||||
|
||||
// switch web search
|
||||
const webSearch = chatStore.currentSession().mask.webSearch;
|
||||
function switchWebSearch() {
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.mask.webSearch =
|
||||
!session.mask.webSearch &&
|
||||
!isFunctionCallModel(currentModel) &&
|
||||
isEnableWebSearch;
|
||||
});
|
||||
}
|
||||
|
||||
// switch Plugins
|
||||
const usePlugins = chatStore.currentSession().mask.usePlugins;
|
||||
function switchUsePlugins() {
|
||||
@@ -593,6 +606,11 @@ export function ChatActions(props: {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
const isEnableWebSearch = useMemo(
|
||||
() => accessStore.enableWebSearch(),
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const show = isVisionModel(currentModel);
|
||||
@@ -723,6 +741,17 @@ export function ChatActions(props: {
|
||||
text={currentModelName}
|
||||
icon={<RobotIcon />}
|
||||
/>
|
||||
{!isFunctionCallModel(currentModel) && isEnableWebSearch && (
|
||||
<ChatAction
|
||||
onClick={switchWebSearch}
|
||||
text={
|
||||
webSearch
|
||||
? Locale.Chat.InputActions.CloseWebSearch
|
||||
: Locale.Chat.InputActions.OpenWebSearch
|
||||
}
|
||||
icon={webSearch ? <SearchOpenIcon /> : <SearchCloseIcon />}
|
||||
/>
|
||||
)}
|
||||
|
||||
{showModelSelector && (
|
||||
<SearchSelector
|
||||
@@ -1351,7 +1380,12 @@ function _Chat() {
|
||||
const textContent = getMessageTextContent(userMessage);
|
||||
const images = getMessageImages(userMessage);
|
||||
chatStore
|
||||
.onUserInput(textContent, images, userMessage.fileInfos)
|
||||
.onUserInput(
|
||||
textContent,
|
||||
images,
|
||||
userMessage.fileInfos,
|
||||
userMessage.webSearchReferences,
|
||||
)
|
||||
.then(() => setIsLoading(false));
|
||||
inputRef.current?.focus();
|
||||
};
|
||||
@@ -1432,34 +1466,36 @@ function _Chat() {
|
||||
|
||||
// preview messages
|
||||
const renderMessages = useMemo(() => {
|
||||
return context
|
||||
.concat(session.messages as RenderMessage[])
|
||||
.concat(
|
||||
isLoading
|
||||
? [
|
||||
{
|
||||
...createMessage({
|
||||
role: "assistant",
|
||||
content: "……",
|
||||
}),
|
||||
preview: true,
|
||||
},
|
||||
]
|
||||
: [],
|
||||
)
|
||||
.concat(
|
||||
userInput.length > 0 && config.sendPreviewBubble
|
||||
? [
|
||||
{
|
||||
...createMessage({
|
||||
role: "user",
|
||||
content: userInput,
|
||||
}),
|
||||
preview: true,
|
||||
},
|
||||
]
|
||||
: [],
|
||||
);
|
||||
return (
|
||||
context
|
||||
.concat(session.messages as RenderMessage[])
|
||||
// .concat(
|
||||
// isLoading
|
||||
// ? [
|
||||
// {
|
||||
// ...createMessage({
|
||||
// role: "assistant",
|
||||
// content: "……",
|
||||
// }),
|
||||
// preview: true,
|
||||
// },
|
||||
// ]
|
||||
// : [],
|
||||
// )
|
||||
.concat(
|
||||
userInput.length > 0 && config.sendPreviewBubble
|
||||
? [
|
||||
{
|
||||
...createMessage({
|
||||
role: "user",
|
||||
content: userInput,
|
||||
}),
|
||||
preview: true,
|
||||
},
|
||||
]
|
||||
: [],
|
||||
)
|
||||
);
|
||||
}, [
|
||||
config.sendPreviewBubble,
|
||||
context,
|
||||
@@ -2093,6 +2129,7 @@ function _Chat() {
|
||||
<Markdown
|
||||
key={message.streaming ? "loading" : "done"}
|
||||
content={getMessageTextContent(message)}
|
||||
webSearchReferences={message.webSearchReferences}
|
||||
loading={
|
||||
(message.preview || message.streaming) &&
|
||||
message.content.length === 0 &&
|
||||
@@ -2140,9 +2177,9 @@ function _Chat() {
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{message?.audio_url && (
|
||||
{message?.audioUrl && (
|
||||
<div className={styles["chat-message-audio"]}>
|
||||
<audio src={message.audio_url} controls />
|
||||
<audio src={message.audioUrl} controls />
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ import { useChatStore } from "../store";
|
||||
import { IconButton } from "./button";
|
||||
|
||||
import { useAppConfig } from "../store/config";
|
||||
import { TavilySearchResponse } from "@tavily/core";
|
||||
|
||||
export function Mermaid(props: { code: string }) {
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
@@ -273,10 +274,20 @@ function tryWrapHtmlCode(text: string) {
|
||||
);
|
||||
}
|
||||
|
||||
function _MarkDownContent(props: { content: string }) {
|
||||
function _MarkDownContent(props: {
|
||||
content: string;
|
||||
webSearchReferences?: TavilySearchResponse;
|
||||
}) {
|
||||
const escapedContent = useMemo(() => {
|
||||
return tryWrapHtmlCode(escapeBrackets(props.content));
|
||||
}, [props.content]);
|
||||
let content = tryWrapHtmlCode(escapeBrackets(props.content));
|
||||
if (props.webSearchReferences?.results) {
|
||||
content = content.replace(/\[citation:(\d+)\]/g, (match, index) => {
|
||||
const result = props.webSearchReferences?.results[parseInt(index) - 1];
|
||||
return result ? `[\[${index}\]](${result.url})` : match;
|
||||
});
|
||||
}
|
||||
return content;
|
||||
}, [props.content, props.webSearchReferences]);
|
||||
|
||||
return (
|
||||
<ReactMarkdown
|
||||
@@ -332,6 +343,7 @@ export function Markdown(
|
||||
fontFamily?: string;
|
||||
parentRef?: RefObject<HTMLDivElement>;
|
||||
defaultShow?: boolean;
|
||||
webSearchReferences?: TavilySearchResponse;
|
||||
} & React.DOMAttributes<HTMLDivElement>,
|
||||
) {
|
||||
const mdRef = useRef<HTMLDivElement>(null);
|
||||
@@ -351,7 +363,10 @@ export function Markdown(
|
||||
{props.loading ? (
|
||||
<LoadingIcon />
|
||||
) : (
|
||||
<MarkdownContent content={props.content} />
|
||||
<MarkdownContent
|
||||
content={props.content}
|
||||
webSearchReferences={props.webSearchReferences}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -191,11 +191,11 @@ export function RealtimeChat({
|
||||
});
|
||||
}
|
||||
if (hasAudio) {
|
||||
// upload audio get audio_url
|
||||
// upload audio get audioUrl
|
||||
const blob = audioHandlerRef.current?.savePlayFile();
|
||||
uploadImage(blob!).then((audio_url) => {
|
||||
botMessage.audio_url = audio_url;
|
||||
// update text and audio_url
|
||||
uploadImage(blob!).then((audioUrl) => {
|
||||
botMessage.audioUrl = audioUrl;
|
||||
// update text and audioUrl
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
@@ -215,15 +215,15 @@ export function RealtimeChat({
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat([userMessage]);
|
||||
});
|
||||
// save input audio_url, and update session
|
||||
// save input audioUrl, and update session
|
||||
const { audioStartMillis, audioEndMillis } = item;
|
||||
// upload audio get audio_url
|
||||
// upload audio get audioUrl
|
||||
const blob = audioHandlerRef.current?.saveRecordFile(
|
||||
audioStartMillis,
|
||||
audioEndMillis,
|
||||
);
|
||||
uploadImage(blob!).then((audio_url) => {
|
||||
userMessage.audio_url = audio_url;
|
||||
uploadImage(blob!).then((audioUrl) => {
|
||||
userMessage.audioUrl = audioUrl;
|
||||
chatStore.updateTargetSession(session, (session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user