mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-01 23:56:39 +08:00
Merge branch 'main' into feat-drop-upload
This commit is contained in:
commit
423fbff624
@ -13,7 +13,9 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
|
||||
|
||||
if (config.disableGPT4) {
|
||||
remoteModelRes.data = remoteModelRes.data.filter(
|
||||
(m) => !m.id.startsWith("gpt-4") || m.id.startsWith("gpt-4o-mini"),
|
||||
(m) =>
|
||||
!(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
|
||||
m.id.startsWith("gpt-4o-mini"),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -203,7 +203,7 @@ export class ClaudeApi implements LLMApi {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin as string[],
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return stream(
|
||||
path,
|
||||
|
@ -125,7 +125,7 @@ export class MoonshotApi implements LLMApi {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin as string[],
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
return stream(
|
||||
chatPath,
|
||||
|
@ -160,6 +160,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
let requestPayload: RequestPayload | DalleRequestPayload;
|
||||
|
||||
const isDalle3 = _isDalle3(options.config.model);
|
||||
const isO1 = options.config.model.startsWith("o1");
|
||||
if (isDalle3) {
|
||||
const prompt = getMessageTextContent(
|
||||
options.messages.slice(-1)?.pop() as any,
|
||||
@ -181,30 +182,32 @@ export class ChatGPTApi implements LLMApi {
|
||||
const content = visionModel
|
||||
? await preProcessImageContent(v.content)
|
||||
: getMessageTextContent(v);
|
||||
messages.push({ role: v.role, content });
|
||||
if (!(isO1 && v.role === "system"))
|
||||
messages.push({ role: v.role, content });
|
||||
}
|
||||
|
||||
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
|
||||
requestPayload = {
|
||||
messages,
|
||||
stream: options.config.stream,
|
||||
stream: !isO1 ? options.config.stream : false,
|
||||
model: modelConfig.model,
|
||||
temperature: modelConfig.temperature,
|
||||
presence_penalty: modelConfig.presence_penalty,
|
||||
frequency_penalty: modelConfig.frequency_penalty,
|
||||
top_p: modelConfig.top_p,
|
||||
temperature: !isO1 ? modelConfig.temperature : 1,
|
||||
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
|
||||
frequency_penalty: !isO1 ? modelConfig.frequency_penalty : 0,
|
||||
top_p: !isO1 ? modelConfig.top_p : 1,
|
||||
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
|
||||
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
|
||||
};
|
||||
|
||||
// add max_tokens to vision model
|
||||
if (visionModel && modelConfig.model.includes("preview")) {
|
||||
if (visionModel) {
|
||||
requestPayload["max_tokens"] = Math.max(modelConfig.max_tokens, 4000);
|
||||
}
|
||||
}
|
||||
|
||||
console.log("[Request] openai payload: ", requestPayload);
|
||||
|
||||
const shouldStream = !isDalle3 && !!options.config.stream;
|
||||
const shouldStream = !isDalle3 && !!options.config.stream && !isO1;
|
||||
const controller = new AbortController();
|
||||
options.onController?.(controller);
|
||||
|
||||
@ -244,7 +247,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
const [tools, funcs] = usePluginStore
|
||||
.getState()
|
||||
.getAsTools(
|
||||
useChatStore.getState().currentSession().mask?.plugin as string[],
|
||||
useChatStore.getState().currentSession().mask?.plugin || [],
|
||||
);
|
||||
// console.log("getAsTools", tools, funcs);
|
||||
stream(
|
||||
@ -313,7 +316,7 @@ export class ChatGPTApi implements LLMApi {
|
||||
// make a fetch request
|
||||
const requestTimeoutId = setTimeout(
|
||||
() => controller.abort(),
|
||||
isDalle3 ? REQUEST_TIMEOUT_MS * 2 : REQUEST_TIMEOUT_MS, // dalle3 using b64_json is slow.
|
||||
isDalle3 || isO1 ? REQUEST_TIMEOUT_MS * 2 : REQUEST_TIMEOUT_MS, // dalle3 using b64_json is slow.
|
||||
);
|
||||
|
||||
const res = await fetch(chatPath, chatPayload);
|
||||
@ -407,7 +410,9 @@ export class ChatGPTApi implements LLMApi {
|
||||
});
|
||||
|
||||
const resJson = (await res.json()) as OpenAIListModelResponse;
|
||||
const chatModels = resJson.data?.filter((m) => m.id.startsWith("gpt-"));
|
||||
const chatModels = resJson.data?.filter(
|
||||
(m) => m.id.startsWith("gpt-") || m.id.startsWith("chatgpt-"),
|
||||
);
|
||||
console.log("[Models]", chatModels);
|
||||
|
||||
if (!chatModels) {
|
||||
|
@ -80,7 +80,7 @@ export const HTMLPreview = forwardRef<HTMLPreviewHander, HTMLPreviewProps>(
|
||||
}, [props.autoHeight, props.height, iframeHeight]);
|
||||
|
||||
const srcDoc = useMemo(() => {
|
||||
const script = `<script>new ResizeObserver((entries) => parent.postMessage({id: '${frameId}', height: entries[0].target.clientHeight}, '*')).observe(document.body)</script>`;
|
||||
const script = `<script>window.addEventListener("DOMContentLoaded", () => new ResizeObserver((entries) => parent.postMessage({id: '${frameId}', height: entries[0].target.clientHeight}, '*')).observe(document.body))</script>`;
|
||||
if (props.code.includes("<!DOCTYPE html>")) {
|
||||
props.code.replace("<!DOCTYPE html>", "<!DOCTYPE html>" + script);
|
||||
}
|
||||
|
@ -647,6 +647,53 @@
|
||||
}
|
||||
}
|
||||
|
||||
.shortcut-key-container {
|
||||
padding: 10px;
|
||||
overflow-y: auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.shortcut-key-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(350px, 1fr));
|
||||
gap: 16px;
|
||||
}
|
||||
|
||||
.shortcut-key-item {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
overflow: hidden;
|
||||
padding: 10px;
|
||||
background-color: var(--white);
|
||||
}
|
||||
|
||||
.shortcut-key-title {
|
||||
font-size: 14px;
|
||||
color: var(--black);
|
||||
}
|
||||
|
||||
.shortcut-key-keys {
|
||||
display: flex;
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.shortcut-key {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
border: var(--border-in-light);
|
||||
border-radius: 8px;
|
||||
padding: 4px;
|
||||
background-color: var(--gray);
|
||||
min-width: 32px;
|
||||
}
|
||||
|
||||
.shortcut-key span {
|
||||
font-size: 12px;
|
||||
color: var(--black);
|
||||
}
|
||||
.drag-overlay {
|
||||
display: none;
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ import SizeIcon from "../icons/size.svg";
|
||||
import QualityIcon from "../icons/hd.svg";
|
||||
import StyleIcon from "../icons/palette.svg";
|
||||
import PluginIcon from "../icons/plugin.svg";
|
||||
import ShortcutkeyIcon from "../icons/shortcutkey.svg";
|
||||
import FileUploadIcon from "../icons/file-upload.svg";
|
||||
|
||||
import {
|
||||
@ -68,6 +69,7 @@ import {
|
||||
isVisionModel,
|
||||
isDalle3,
|
||||
showPlugins,
|
||||
safeLocalStorage,
|
||||
} from "../utils";
|
||||
|
||||
import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
|
||||
@ -110,6 +112,8 @@ import { getClientConfig } from "../config/client";
|
||||
import { useAllModels } from "../utils/hooks";
|
||||
import { MultimodalContent } from "../client/api";
|
||||
|
||||
const localStorage = safeLocalStorage();
|
||||
|
||||
const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
|
||||
loading: () => <LoadingIcon />,
|
||||
});
|
||||
@ -189,7 +193,7 @@ function PromptToast(props: {
|
||||
|
||||
return (
|
||||
<div className={styles["prompt-toast"]} key="prompt-toast">
|
||||
{props.showToast && (
|
||||
{props.showToast && context.length > 0 && (
|
||||
<div
|
||||
className={styles["prompt-toast-inner"] + " clickable"}
|
||||
role="button"
|
||||
@ -438,6 +442,7 @@ export function ChatActions(props: {
|
||||
showPromptHints: () => void;
|
||||
hitBottom: boolean;
|
||||
uploading: boolean;
|
||||
setShowShortcutKeyModal: React.Dispatch<React.SetStateAction<boolean>>;
|
||||
}) {
|
||||
const config = useAppConfig();
|
||||
const navigate = useNavigate();
|
||||
@ -503,6 +508,8 @@ export function ChatActions(props: {
|
||||
const currentStyle =
|
||||
chatStore.currentSession().mask.modelConfig?.style ?? "vivid";
|
||||
|
||||
const isMobileScreen = useMobileScreen();
|
||||
|
||||
useEffect(() => {
|
||||
const show = isVisionModel(currentModel);
|
||||
setShowUploadImage(show);
|
||||
@ -618,7 +625,7 @@ export function ChatActions(props: {
|
||||
items={models.map((m) => ({
|
||||
title: `${m.displayName}${
|
||||
m?.provider?.providerName
|
||||
? "(" + m?.provider?.providerName + ")"
|
||||
? " (" + m?.provider?.providerName + ")"
|
||||
: ""
|
||||
}`,
|
||||
value: `${m.name}@${m?.provider?.providerName}`,
|
||||
@ -756,6 +763,14 @@ export function ChatActions(props: {
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{!isMobileScreen && (
|
||||
<ChatAction
|
||||
onClick={() => props.setShowShortcutKeyModal(true)}
|
||||
text={Locale.Chat.ShortcutKey.Title}
|
||||
icon={<ShortcutkeyIcon />}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@ -830,6 +845,67 @@ export function DeleteImageButton(props: { deleteImage: () => void }) {
|
||||
);
|
||||
}
|
||||
|
||||
export function ShortcutKeyModal(props: { onClose: () => void }) {
|
||||
const isMac = navigator.platform.toUpperCase().indexOf("MAC") >= 0;
|
||||
const shortcuts = [
|
||||
{
|
||||
title: Locale.Chat.ShortcutKey.newChat,
|
||||
keys: isMac ? ["⌘", "Shift", "O"] : ["Ctrl", "Shift", "O"],
|
||||
},
|
||||
{ title: Locale.Chat.ShortcutKey.focusInput, keys: ["Shift", "Esc"] },
|
||||
{
|
||||
title: Locale.Chat.ShortcutKey.copyLastCode,
|
||||
keys: isMac ? ["⌘", "Shift", ";"] : ["Ctrl", "Shift", ";"],
|
||||
},
|
||||
{
|
||||
title: Locale.Chat.ShortcutKey.copyLastMessage,
|
||||
keys: isMac ? ["⌘", "Shift", "C"] : ["Ctrl", "Shift", "C"],
|
||||
},
|
||||
{
|
||||
title: Locale.Chat.ShortcutKey.showShortcutKey,
|
||||
keys: isMac ? ["⌘", "/"] : ["Ctrl", "/"],
|
||||
},
|
||||
];
|
||||
return (
|
||||
<div className="modal-mask">
|
||||
<Modal
|
||||
title={Locale.Chat.ShortcutKey.Title}
|
||||
onClose={props.onClose}
|
||||
actions={[
|
||||
<IconButton
|
||||
type="primary"
|
||||
text={Locale.UI.Confirm}
|
||||
icon={<ConfirmIcon />}
|
||||
key="ok"
|
||||
onClick={() => {
|
||||
props.onClose();
|
||||
}}
|
||||
/>,
|
||||
]}
|
||||
>
|
||||
<div className={styles["shortcut-key-container"]}>
|
||||
<div className={styles["shortcut-key-grid"]}>
|
||||
{shortcuts.map((shortcut, index) => (
|
||||
<div key={index} className={styles["shortcut-key-item"]}>
|
||||
<div className={styles["shortcut-key-title"]}>
|
||||
{shortcut.title}
|
||||
</div>
|
||||
<div className={styles["shortcut-key-keys"]}>
|
||||
{shortcut.keys.map((key, i) => (
|
||||
<div key={i} className={styles["shortcut-key"]}>
|
||||
<span>{key}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</Modal>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function _Chat() {
|
||||
type RenderMessage = ChatMessage & { preview?: boolean };
|
||||
|
||||
@ -942,7 +1018,7 @@ function _Chat() {
|
||||
.onUserInput(userInput, attachImages)
|
||||
.then(() => setIsLoading(false));
|
||||
setAttachImages([]);
|
||||
localStorage.setItem(LAST_INPUT_KEY, userInput);
|
||||
chatStore.setLastInput(userInput);
|
||||
setUserInput("");
|
||||
setPromptHints([]);
|
||||
if (!isMobileScreen) inputRef.current?.focus();
|
||||
@ -1008,7 +1084,7 @@ function _Chat() {
|
||||
userInput.length <= 0 &&
|
||||
!(e.metaKey || e.altKey || e.ctrlKey)
|
||||
) {
|
||||
setUserInput(localStorage.getItem(LAST_INPUT_KEY) ?? "");
|
||||
setUserInput(chatStore.lastInput ?? "");
|
||||
e.preventDefault();
|
||||
return;
|
||||
}
|
||||
@ -1374,6 +1450,70 @@ function _Chat() {
|
||||
setAttachImages(images);
|
||||
}
|
||||
|
||||
// 快捷键 shortcut keys
|
||||
const [showShortcutKeyModal, setShowShortcutKeyModal] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (event: any) => {
|
||||
// 打开新聊天 command + shift + o
|
||||
if (
|
||||
(event.metaKey || event.ctrlKey) &&
|
||||
event.shiftKey &&
|
||||
event.key.toLowerCase() === "o"
|
||||
) {
|
||||
event.preventDefault();
|
||||
setTimeout(() => {
|
||||
chatStore.newSession();
|
||||
navigate(Path.Chat);
|
||||
}, 10);
|
||||
}
|
||||
// 聚焦聊天输入 shift + esc
|
||||
else if (event.shiftKey && event.key.toLowerCase() === "escape") {
|
||||
event.preventDefault();
|
||||
inputRef.current?.focus();
|
||||
}
|
||||
// 复制最后一个代码块 command + shift + ;
|
||||
else if (
|
||||
(event.metaKey || event.ctrlKey) &&
|
||||
event.shiftKey &&
|
||||
event.code === "Semicolon"
|
||||
) {
|
||||
event.preventDefault();
|
||||
const copyCodeButton =
|
||||
document.querySelectorAll<HTMLElement>(".copy-code-button");
|
||||
if (copyCodeButton.length > 0) {
|
||||
copyCodeButton[copyCodeButton.length - 1].click();
|
||||
}
|
||||
}
|
||||
// 复制最后一个回复 command + shift + c
|
||||
else if (
|
||||
(event.metaKey || event.ctrlKey) &&
|
||||
event.shiftKey &&
|
||||
event.key.toLowerCase() === "c"
|
||||
) {
|
||||
event.preventDefault();
|
||||
const lastNonUserMessage = messages
|
||||
.filter((message) => message.role !== "user")
|
||||
.pop();
|
||||
if (lastNonUserMessage) {
|
||||
const lastMessageContent = getMessageTextContent(lastNonUserMessage);
|
||||
copyToClipboard(lastMessageContent);
|
||||
}
|
||||
}
|
||||
// 展示快捷键 command + /
|
||||
else if ((event.metaKey || event.ctrlKey) && event.key === "/") {
|
||||
event.preventDefault();
|
||||
setShowShortcutKeyModal(true);
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener("keydown", handleKeyDown);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener("keydown", handleKeyDown);
|
||||
};
|
||||
}, [messages, chatStore, navigate]);
|
||||
|
||||
const [showDragOverlay, setShowDragOverlay] = useState(false);
|
||||
|
||||
const handleDragOver = (e: React.DragEvent<HTMLDivElement>) => {
|
||||
@ -1749,6 +1889,7 @@ function _Chat() {
|
||||
setUserInput("/");
|
||||
onSearch("");
|
||||
}}
|
||||
setShowShortcutKeyModal={setShowShortcutKeyModal}
|
||||
/>
|
||||
<label
|
||||
className={`${styles["chat-input-panel-inner"]} ${
|
||||
@ -1820,6 +1961,10 @@ function _Chat() {
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{showShortcutKeyModal && (
|
||||
<ShortcutKeyModal onClose={() => setShowShortcutKeyModal(false)} />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -36,7 +36,8 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
|
||||
if (props.model) {
|
||||
return (
|
||||
<div className="no-dark">
|
||||
{props.model?.startsWith("gpt-4") ? (
|
||||
{props.model?.startsWith("gpt-4") ||
|
||||
props.model?.startsWith("chatgpt-4o") ? (
|
||||
<BlackBotIcon className="user-avatar" />
|
||||
) : (
|
||||
<BotIcon className="user-avatar" />
|
||||
|
@ -8,6 +8,7 @@ import { ISSUE_URL } from "../constant";
|
||||
import Locale from "../locales";
|
||||
import { showConfirm } from "./ui-lib";
|
||||
import { useSyncStore } from "../store/sync";
|
||||
import { useChatStore } from "../store/chat";
|
||||
|
||||
interface IErrorBoundaryState {
|
||||
hasError: boolean;
|
||||
@ -30,8 +31,7 @@ export class ErrorBoundary extends React.Component<any, IErrorBoundaryState> {
|
||||
try {
|
||||
useSyncStore.getState().export();
|
||||
} finally {
|
||||
localStorage.clear();
|
||||
location.reload();
|
||||
useChatStore.getState().clearAllData();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -237,9 +237,26 @@ function escapeBrackets(text: string) {
|
||||
);
|
||||
}
|
||||
|
||||
function tryWrapHtmlCode(text: string) {
|
||||
// try add wrap html code (fixed: html codeblock include 2 newline)
|
||||
return text
|
||||
.replace(
|
||||
/([`]*?)(\w*?)([\n\r]*?)(<!DOCTYPE html>)/g,
|
||||
(match, quoteStart, lang, newLine, doctype) => {
|
||||
return !quoteStart ? "\n```html\n" + doctype : match;
|
||||
},
|
||||
)
|
||||
.replace(
|
||||
/(<\/body>)([\r\n\s]*?)(<\/html>)([\n\r]*?)([`]*?)([\n\r]*?)/g,
|
||||
(match, bodyEnd, space, htmlEnd, newLine, quoteEnd) => {
|
||||
return !quoteEnd ? bodyEnd + space + htmlEnd + "\n```\n" : match;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
function _MarkDownContent(props: { content: string }) {
|
||||
const escapedContent = useMemo(() => {
|
||||
return escapeBrackets(escapeDollarNumber(props.content));
|
||||
return tryWrapHtmlCode(escapeBrackets(escapeDollarNumber(props.content)));
|
||||
}, [props.content]);
|
||||
|
||||
return (
|
||||
|
@ -426,16 +426,7 @@ export function MaskPage() {
|
||||
const maskStore = useMaskStore();
|
||||
const chatStore = useChatStore();
|
||||
|
||||
const [filterLang, setFilterLang] = useState<Lang | undefined>(
|
||||
() => localStorage.getItem("Mask-language") as Lang | undefined,
|
||||
);
|
||||
useEffect(() => {
|
||||
if (filterLang) {
|
||||
localStorage.setItem("Mask-language", filterLang);
|
||||
} else {
|
||||
localStorage.removeItem("Mask-language");
|
||||
}
|
||||
}, [filterLang]);
|
||||
const filterLang = maskStore.language;
|
||||
|
||||
const allMasks = maskStore
|
||||
.getAll()
|
||||
@ -542,9 +533,9 @@ export function MaskPage() {
|
||||
onChange={(e) => {
|
||||
const value = e.currentTarget.value;
|
||||
if (value === Locale.Settings.Lang.All) {
|
||||
setFilterLang(undefined);
|
||||
maskStore.setLanguage(undefined);
|
||||
} else {
|
||||
setFilterLang(value as Lang);
|
||||
maskStore.setLanguage(value as Lang);
|
||||
}
|
||||
}}
|
||||
>
|
||||
|
@ -5,13 +5,19 @@ import Locale from "../locales";
|
||||
import { InputRange } from "./input-range";
|
||||
import { ListItem, Select } from "./ui-lib";
|
||||
import { useAllModels } from "../utils/hooks";
|
||||
import { groupBy } from "lodash-es";
|
||||
|
||||
export function ModelConfigList(props: {
|
||||
modelConfig: ModelConfig;
|
||||
updateConfig: (updater: (config: ModelConfig) => void) => void;
|
||||
}) {
|
||||
const allModels = useAllModels();
|
||||
const groupModels = groupBy(
|
||||
allModels.filter((v) => v.available),
|
||||
"provider.providerName",
|
||||
);
|
||||
const value = `${props.modelConfig.model}@${props.modelConfig?.providerName}`;
|
||||
const compressModelValue = `${props.modelConfig.compressModel}@${props.modelConfig?.compressProviderName}`;
|
||||
|
||||
return (
|
||||
<>
|
||||
@ -19,6 +25,7 @@ export function ModelConfigList(props: {
|
||||
<Select
|
||||
aria-label={Locale.Settings.Model}
|
||||
value={value}
|
||||
align="left"
|
||||
onChange={(e) => {
|
||||
const [model, providerName] = e.currentTarget.value.split("@");
|
||||
props.updateConfig((config) => {
|
||||
@ -27,13 +34,15 @@ export function ModelConfigList(props: {
|
||||
});
|
||||
}}
|
||||
>
|
||||
{allModels
|
||||
.filter((v) => v.available)
|
||||
.map((v, i) => (
|
||||
<option value={`${v.name}@${v.provider?.providerName}`} key={i}>
|
||||
{v.displayName}({v.provider?.providerName})
|
||||
</option>
|
||||
))}
|
||||
{Object.keys(groupModels).map((providerName, index) => (
|
||||
<optgroup label={providerName} key={index}>
|
||||
{groupModels[providerName].map((v, i) => (
|
||||
<option value={`${v.name}@${v.provider?.providerName}`} key={i}>
|
||||
{v.displayName}
|
||||
</option>
|
||||
))}
|
||||
</optgroup>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
@ -228,6 +237,30 @@ export function ModelConfigList(props: {
|
||||
}
|
||||
></input>
|
||||
</ListItem>
|
||||
<ListItem
|
||||
title={Locale.Settings.CompressModel.Title}
|
||||
subTitle={Locale.Settings.CompressModel.SubTitle}
|
||||
>
|
||||
<Select
|
||||
aria-label={Locale.Settings.CompressModel.Title}
|
||||
value={compressModelValue}
|
||||
onChange={(e) => {
|
||||
const [model, providerName] = e.currentTarget.value.split("@");
|
||||
props.updateConfig((config) => {
|
||||
config.compressModel = ModalConfigValidator.model(model);
|
||||
config.compressProviderName = providerName as ServiceProvider;
|
||||
});
|
||||
}}
|
||||
>
|
||||
{allModels
|
||||
.filter((v) => v.available)
|
||||
.map((v, i) => (
|
||||
<option value={`${v.name}@${v.provider?.providerName}`} key={i}>
|
||||
{v.displayName}({v.provider?.providerName})
|
||||
</option>
|
||||
))}
|
||||
</Select>
|
||||
</ListItem>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
@ -252,6 +252,12 @@
|
||||
position: relative;
|
||||
max-width: fit-content;
|
||||
|
||||
&.left-align-option {
|
||||
option {
|
||||
text-align: left;
|
||||
}
|
||||
}
|
||||
|
||||
.select-with-icon-select {
|
||||
height: 100%;
|
||||
border: var(--border-in-light);
|
||||
|
@ -292,13 +292,19 @@ export function PasswordInput(
|
||||
|
||||
export function Select(
|
||||
props: React.DetailedHTMLProps<
|
||||
React.SelectHTMLAttributes<HTMLSelectElement>,
|
||||
React.SelectHTMLAttributes<HTMLSelectElement> & {
|
||||
align?: "left" | "center";
|
||||
},
|
||||
HTMLSelectElement
|
||||
>,
|
||||
) {
|
||||
const { className, children, ...otherProps } = props;
|
||||
const { className, children, align, ...otherProps } = props;
|
||||
return (
|
||||
<div className={`${styles["select-with-icon"]} ${className}`}>
|
||||
<div
|
||||
className={`${styles["select-with-icon"]} ${
|
||||
align === "left" ? styles["left-align-option"] : ""
|
||||
} ${className}`}
|
||||
>
|
||||
<select className={styles["select-with-icon-select"]} {...otherProps}>
|
||||
{children}
|
||||
</select>
|
||||
|
@ -120,12 +120,15 @@ export const getServerSideConfig = () => {
|
||||
if (disableGPT4) {
|
||||
if (customModels) customModels += ",";
|
||||
customModels += DEFAULT_MODELS.filter(
|
||||
(m) => m.name.startsWith("gpt-4") && !m.name.startsWith("gpt-4o-mini"),
|
||||
(m) =>
|
||||
(m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o")) &&
|
||||
!m.name.startsWith("gpt-4o-mini"),
|
||||
)
|
||||
.map((m) => "-" + m.name)
|
||||
.join(",");
|
||||
if (
|
||||
defaultModel.startsWith("gpt-4") &&
|
||||
(defaultModel.startsWith("gpt-4") ||
|
||||
defaultModel.startsWith("chatgpt-4o")) &&
|
||||
!defaultModel.startsWith("gpt-4o-mini")
|
||||
)
|
||||
defaultModel = "";
|
||||
|
@ -246,9 +246,12 @@ export const KnowledgeCutOffDate: Record<string, string> = {
|
||||
"gpt-4o": "2023-10",
|
||||
"gpt-4o-2024-05-13": "2023-10",
|
||||
"gpt-4o-2024-08-06": "2023-10",
|
||||
"chatgpt-4o-latest": "2023-10",
|
||||
"gpt-4o-mini": "2023-10",
|
||||
"gpt-4o-mini-2024-07-18": "2023-10",
|
||||
"gpt-4-vision-preview": "2023-04",
|
||||
"o1-mini": "2023-10",
|
||||
"o1-preview": "2023-10",
|
||||
// After improvements,
|
||||
// it's now easier to add "KnowledgeCutOffDate" instead of stupid hardcoding it, as was done previously.
|
||||
"gemini-pro": "2023-12",
|
||||
@ -268,12 +271,15 @@ const openaiModels = [
|
||||
"gpt-4o",
|
||||
"gpt-4o-2024-05-13",
|
||||
"gpt-4o-2024-08-06",
|
||||
"chatgpt-4o-latest",
|
||||
"gpt-4o-mini",
|
||||
"gpt-4o-mini-2024-07-18",
|
||||
"gpt-4-vision-preview",
|
||||
"gpt-4-turbo-2024-04-09",
|
||||
"gpt-4-1106-preview",
|
||||
"dall-e-3",
|
||||
"o1-mini",
|
||||
"o1-preview",
|
||||
];
|
||||
|
||||
const googleModels = [
|
||||
|
1
app/icons/shortcutkey.svg
Normal file
1
app/icons/shortcutkey.svg
Normal file
@ -0,0 +1 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?><svg width="16" height="16" viewBox="0 0 48 48" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M42 7H6C4.89543 7 4 7.89543 4 9V37C4 38.1046 4.89543 39 6 39H42C43.1046 39 44 38.1046 44 37V9C44 7.89543 43.1046 7 42 7Z" fill="none" stroke="#000" stroke-width="3" stroke-linejoin="round"/><path d="M12 19H14" stroke="#000" stroke-width="3" stroke-linecap="round" stroke-linejoin="round"/><path d="M21 19H23" stroke="#000" stroke-width="3" stroke-linecap="round" stroke-linejoin="round"/><path d="M29 19H36" stroke="#000" stroke-width="3" stroke-linecap="round" stroke-linejoin="round"/><path d="M12 28H36" stroke="#000" stroke-width="3" stroke-linecap="round" stroke-linejoin="round"/></svg>
|
After Width: | Height: | Size: 734 B |
@ -41,7 +41,11 @@ export default function RootLayout({
|
||||
name="viewport"
|
||||
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no"
|
||||
/>
|
||||
<link rel="manifest" href="/site.webmanifest"></link>
|
||||
<link
|
||||
rel="manifest"
|
||||
href="/site.webmanifest"
|
||||
crossOrigin="use-credentials"
|
||||
></link>
|
||||
<script src="/serviceWorkerRegister.js" defer></script>
|
||||
</head>
|
||||
<body>
|
||||
|
@ -404,6 +404,10 @@ const ar: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "النموذج",
|
||||
CompressModel: {
|
||||
Title: "نموذج الضغط",
|
||||
SubTitle: "النموذج المستخدم لضغط السجل التاريخي",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "العشوائية (temperature)",
|
||||
SubTitle: "كلما زادت القيمة، زادت العشوائية في الردود",
|
||||
|
@ -411,6 +411,10 @@ const bn: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "মডেল (model)",
|
||||
CompressModel: {
|
||||
Title: "সংকোচন মডেল",
|
||||
SubTitle: "ইতিহাস সংকুচিত করার জন্য ব্যবহৃত মডেল",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "যাদুকরিতা (temperature)",
|
||||
SubTitle: "মান বাড়ালে উত্তর বেশি এলোমেলো হবে",
|
||||
|
@ -1,3 +1,4 @@
|
||||
import { ShortcutKeyModal } from "../components/chat";
|
||||
import { getClientConfig } from "../config/client";
|
||||
import { SubmitKey } from "../store/config";
|
||||
|
||||
@ -83,6 +84,14 @@ const cn = {
|
||||
SaveAs: "存为面具",
|
||||
},
|
||||
IsContext: "预设提示词",
|
||||
ShortcutKey: {
|
||||
Title: "键盘快捷方式",
|
||||
newChat: "打开新聊天",
|
||||
focusInput: "聚焦输入框",
|
||||
copyLastMessage: "复制最后一个回复",
|
||||
copyLastCode: "复制最后一个代码块",
|
||||
showShortcutKey: "显示快捷方式",
|
||||
},
|
||||
},
|
||||
Export: {
|
||||
Title: "分享聊天记录",
|
||||
@ -463,6 +472,10 @@ const cn = {
|
||||
},
|
||||
|
||||
Model: "模型 (model)",
|
||||
CompressModel: {
|
||||
Title: "压缩模型",
|
||||
SubTitle: "用于压缩历史记录的模型",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "随机性 (temperature)",
|
||||
SubTitle: "值越大,回复越随机",
|
||||
@ -497,8 +510,8 @@ const cn = {
|
||||
},
|
||||
},
|
||||
Copy: {
|
||||
Success: "已写入剪切板",
|
||||
Failed: "复制失败,请赋予剪切板权限",
|
||||
Success: "已写入剪贴板",
|
||||
Failed: "复制失败,请赋予剪贴板权限",
|
||||
},
|
||||
Download: {
|
||||
Success: "内容已下载到您的目录。",
|
||||
|
@ -410,6 +410,10 @@ const cs: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Model (model)",
|
||||
CompressModel: {
|
||||
Title: "Kompresní model",
|
||||
SubTitle: "Model používaný pro kompresi historie",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Náhodnost (temperature)",
|
||||
SubTitle: "Čím vyšší hodnota, tím náhodnější odpovědi",
|
||||
|
@ -421,6 +421,10 @@ const de: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Modell",
|
||||
CompressModel: {
|
||||
Title: "Kompressionsmodell",
|
||||
SubTitle: "Modell zur Komprimierung des Verlaufs",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Zufälligkeit (temperature)",
|
||||
SubTitle: "Je höher der Wert, desto zufälliger die Antwort",
|
||||
|
@ -85,6 +85,14 @@ const en: LocaleType = {
|
||||
SaveAs: "Save as Mask",
|
||||
},
|
||||
IsContext: "Contextual Prompt",
|
||||
ShortcutKey: {
|
||||
Title: "Keyboard Shortcuts",
|
||||
newChat: "Open New Chat",
|
||||
focusInput: "Focus Input Field",
|
||||
copyLastMessage: "Copy Last Reply",
|
||||
copyLastCode: "Copy Last Code Block",
|
||||
showShortcutKey: "Show Shortcuts",
|
||||
},
|
||||
},
|
||||
Export: {
|
||||
Title: "Export Messages",
|
||||
@ -468,6 +476,10 @@ const en: LocaleType = {
|
||||
},
|
||||
|
||||
Model: "Model",
|
||||
CompressModel: {
|
||||
Title: "Compression Model",
|
||||
SubTitle: "Model used to compress history",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Temperature",
|
||||
SubTitle: "A larger value makes the more random output",
|
||||
|
@ -423,6 +423,10 @@ const es: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Modelo (model)",
|
||||
CompressModel: {
|
||||
Title: "Modelo de compresión",
|
||||
SubTitle: "Modelo utilizado para comprimir el historial",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Aleatoriedad (temperature)",
|
||||
SubTitle: "Cuanto mayor sea el valor, más aleatorio será el resultado",
|
||||
|
@ -422,6 +422,10 @@ const fr: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Modèle",
|
||||
CompressModel: {
|
||||
Title: "Modèle de compression",
|
||||
SubTitle: "Modèle utilisé pour compresser l'historique",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Aléatoire (temperature)",
|
||||
SubTitle: "Plus la valeur est élevée, plus les réponses sont aléatoires",
|
||||
|
@ -411,6 +411,10 @@ const id: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Model",
|
||||
CompressModel: {
|
||||
Title: "Model Kompresi",
|
||||
SubTitle: "Model yang digunakan untuk mengompres riwayat",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Randomness (temperature)",
|
||||
SubTitle: "Semakin tinggi nilainya, semakin acak responsnya",
|
||||
|
@ -18,10 +18,13 @@ import ar from "./ar";
|
||||
import bn from "./bn";
|
||||
import sk from "./sk";
|
||||
import { merge } from "../utils/merge";
|
||||
import { safeLocalStorage } from "@/app/utils";
|
||||
|
||||
import type { LocaleType } from "./cn";
|
||||
export type { LocaleType, PartialLocaleType } from "./cn";
|
||||
|
||||
const localStorage = safeLocalStorage();
|
||||
|
||||
const ALL_LANGS = {
|
||||
cn,
|
||||
en,
|
||||
@ -82,17 +85,11 @@ merge(fallbackLang, targetLang);
|
||||
export default fallbackLang as LocaleType;
|
||||
|
||||
function getItem(key: string) {
|
||||
try {
|
||||
return localStorage.getItem(key);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
return localStorage.getItem(key);
|
||||
}
|
||||
|
||||
function setItem(key: string, value: string) {
|
||||
try {
|
||||
localStorage.setItem(key, value);
|
||||
} catch {}
|
||||
localStorage.setItem(key, value);
|
||||
}
|
||||
|
||||
function getLanguage() {
|
||||
|
@ -423,6 +423,10 @@ const it: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Modello (model)",
|
||||
CompressModel: {
|
||||
Title: "Modello di compressione",
|
||||
SubTitle: "Modello utilizzato per comprimere la cronologia",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Casualità (temperature)",
|
||||
SubTitle: "Valore più alto, risposte più casuali",
|
||||
|
@ -407,6 +407,10 @@ const jp: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "モデル (model)",
|
||||
CompressModel: {
|
||||
Title: "圧縮モデル",
|
||||
SubTitle: "履歴を圧縮するために使用されるモデル",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "ランダム性 (temperature)",
|
||||
SubTitle: "値が大きいほど応答がランダムになります",
|
||||
|
@ -404,6 +404,10 @@ const ko: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "모델 (model)",
|
||||
CompressModel: {
|
||||
Title: "압축 모델",
|
||||
SubTitle: "기록을 압축하는 데 사용되는 모델",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "무작위성 (temperature)",
|
||||
SubTitle: "값이 클수록 응답이 더 무작위적",
|
||||
|
@ -415,6 +415,10 @@ const no: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Modell",
|
||||
CompressModel: {
|
||||
Title: "Komprimeringsmodell",
|
||||
SubTitle: "Modell brukt for å komprimere historikken",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Tilfeldighet (temperature)",
|
||||
SubTitle: "Høyere verdi gir mer tilfeldige svar",
|
||||
|
@ -346,6 +346,10 @@ const pt: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Modelo",
|
||||
CompressModel: {
|
||||
Title: "Modelo de Compressão",
|
||||
SubTitle: "Modelo usado para comprimir o histórico",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Temperatura",
|
||||
SubTitle: "Um valor maior torna a saída mais aleatória",
|
||||
|
@ -414,6 +414,10 @@ const ru: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Модель",
|
||||
CompressModel: {
|
||||
Title: "Модель сжатия",
|
||||
SubTitle: "Модель, используемая для сжатия истории",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Случайность (temperature)",
|
||||
SubTitle: "Чем больше значение, тем более случайные ответы",
|
||||
|
@ -365,6 +365,10 @@ const sk: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Model",
|
||||
CompressModel: {
|
||||
Title: "Kompresný model",
|
||||
SubTitle: "Model používaný na kompresiu histórie",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Teplota",
|
||||
SubTitle: "Vyššia hodnota robí výstup náhodnejším",
|
||||
|
@ -414,6 +414,10 @@ const tr: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Model (model)",
|
||||
CompressModel: {
|
||||
Title: "Sıkıştırma Modeli",
|
||||
SubTitle: "Geçmişi sıkıştırmak için kullanılan model",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Rastgelelik (temperature)",
|
||||
SubTitle: "Değer arttıkça yanıt daha rastgele olur",
|
||||
|
@ -83,6 +83,14 @@ const tw = {
|
||||
SaveAs: "另存新檔",
|
||||
},
|
||||
IsContext: "預設提示詞",
|
||||
ShortcutKey: {
|
||||
Title: "鍵盤快捷方式",
|
||||
newChat: "打開新聊天",
|
||||
focusInput: "聚焦輸入框",
|
||||
copyLastMessage: "複製最後一個回覆",
|
||||
copyLastCode: "複製最後一個代碼塊",
|
||||
showShortcutKey: "顯示快捷方式",
|
||||
},
|
||||
},
|
||||
Export: {
|
||||
Title: "將聊天記錄匯出為 Markdown",
|
||||
@ -362,6 +370,10 @@ const tw = {
|
||||
},
|
||||
|
||||
Model: "模型 (model)",
|
||||
CompressModel: {
|
||||
Title: "壓縮模型",
|
||||
SubTitle: "用於壓縮歷史記錄的模型",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "隨機性 (temperature)",
|
||||
SubTitle: "值越大,回應越隨機",
|
||||
|
@ -410,6 +410,10 @@ const vi: PartialLocaleType = {
|
||||
},
|
||||
|
||||
Model: "Mô hình (model)",
|
||||
CompressModel: {
|
||||
Title: "Mô hình nén",
|
||||
SubTitle: "Mô hình được sử dụng để nén lịch sử",
|
||||
},
|
||||
Temperature: {
|
||||
Title: "Độ ngẫu nhiên (temperature)",
|
||||
SubTitle: "Giá trị càng lớn, câu trả lời càng ngẫu nhiên",
|
||||
|
@ -1,33 +1,31 @@
|
||||
import { trimTopic, getMessageTextContent } from "../utils";
|
||||
import { getMessageTextContent, trimTopic } from "../utils";
|
||||
|
||||
import Locale, { getLang } from "../locales";
|
||||
import { indexedDBStorage } from "@/app/utils/indexedDB-storage";
|
||||
import { nanoid } from "nanoid";
|
||||
import type {
|
||||
ClientApi,
|
||||
MultimodalContent,
|
||||
RequestMessage,
|
||||
} from "../client/api";
|
||||
import { getClientApi } from "../client/api";
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
import { showToast } from "../components/ui-lib";
|
||||
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
||||
import { createEmptyMask, Mask } from "./mask";
|
||||
import {
|
||||
DEFAULT_INPUT_TEMPLATE,
|
||||
DEFAULT_MODELS,
|
||||
DEFAULT_SYSTEM_TEMPLATE,
|
||||
KnowledgeCutOffDate,
|
||||
StoreKey,
|
||||
SUMMARIZE_MODEL,
|
||||
GEMINI_SUMMARIZE_MODEL,
|
||||
} from "../constant";
|
||||
import { getClientApi } from "../client/api";
|
||||
import type {
|
||||
ClientApi,
|
||||
RequestMessage,
|
||||
MultimodalContent,
|
||||
} from "../client/api";
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
import Locale, { getLang } from "../locales";
|
||||
import { isDalle3, safeLocalStorage } from "../utils";
|
||||
import { prettyObject } from "../utils/format";
|
||||
import { estimateTokenLength } from "../utils/token";
|
||||
import { nanoid } from "nanoid";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { collectModelsWithDefaultModel } from "../utils/model";
|
||||
import { useAccessStore } from "./access";
|
||||
import { isDalle3 } from "../utils";
|
||||
import { indexedDBStorage } from "@/app/utils/indexedDB-storage";
|
||||
import { estimateTokenLength } from "../utils/token";
|
||||
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
||||
import { createEmptyMask, Mask } from "./mask";
|
||||
|
||||
const localStorage = safeLocalStorage();
|
||||
|
||||
export type ChatMessageTool = {
|
||||
id: string;
|
||||
@ -104,27 +102,6 @@ function createEmptySession(): ChatSession {
|
||||
};
|
||||
}
|
||||
|
||||
function getSummarizeModel(currentModel: string) {
|
||||
// if it is using gpt-* models, force to use 4o-mini to summarize
|
||||
if (currentModel.startsWith("gpt")) {
|
||||
const configStore = useAppConfig.getState();
|
||||
const accessStore = useAccessStore.getState();
|
||||
const allModel = collectModelsWithDefaultModel(
|
||||
configStore.models,
|
||||
[configStore.customModels, accessStore.customModels].join(","),
|
||||
accessStore.defaultModel,
|
||||
);
|
||||
const summarizeModel = allModel.find(
|
||||
(m) => m.name === SUMMARIZE_MODEL && m.available,
|
||||
);
|
||||
return summarizeModel?.name ?? currentModel;
|
||||
}
|
||||
if (currentModel.startsWith("gemini")) {
|
||||
return GEMINI_SUMMARIZE_MODEL;
|
||||
}
|
||||
return currentModel;
|
||||
}
|
||||
|
||||
function countMessages(msgs: ChatMessage[]) {
|
||||
return msgs.reduce(
|
||||
(pre, cur) => pre + estimateTokenLength(getMessageTextContent(cur)),
|
||||
@ -179,6 +156,7 @@ function fillTemplateWith(input: string, modelConfig: ModelConfig) {
|
||||
const DEFAULT_CHAT_STATE = {
|
||||
sessions: [createEmptySession()],
|
||||
currentSessionIndex: 0,
|
||||
lastInput: "",
|
||||
};
|
||||
|
||||
export const useChatStore = createPersistStore(
|
||||
@ -476,7 +454,8 @@ export const useChatStore = createPersistStore(
|
||||
// system prompts, to get close to OpenAI Web ChatGPT
|
||||
const shouldInjectSystemPrompts =
|
||||
modelConfig.enableInjectSystemPrompts &&
|
||||
session.mask.modelConfig.model.startsWith("gpt-");
|
||||
(session.mask.modelConfig.model.startsWith("gpt-") ||
|
||||
session.mask.modelConfig.model.startsWith("chatgpt-"));
|
||||
|
||||
var systemPrompts: ChatMessage[] = [];
|
||||
systemPrompts = shouldInjectSystemPrompts
|
||||
@ -577,7 +556,7 @@ export const useChatStore = createPersistStore(
|
||||
return;
|
||||
}
|
||||
|
||||
const providerName = modelConfig.providerName;
|
||||
const providerName = modelConfig.compressProviderName;
|
||||
const api: ClientApi = getClientApi(providerName);
|
||||
|
||||
// remove error messages if any
|
||||
@ -599,7 +578,7 @@ export const useChatStore = createPersistStore(
|
||||
api.llm.chat({
|
||||
messages: topicMessages,
|
||||
config: {
|
||||
model: getSummarizeModel(session.mask.modelConfig.model),
|
||||
model: modelConfig.compressModel,
|
||||
stream: false,
|
||||
providerName,
|
||||
},
|
||||
@ -662,7 +641,7 @@ export const useChatStore = createPersistStore(
|
||||
config: {
|
||||
...modelcfg,
|
||||
stream: true,
|
||||
model: getSummarizeModel(session.mask.modelConfig.model),
|
||||
model: modelConfig.compressModel,
|
||||
},
|
||||
onUpdate(message) {
|
||||
session.memoryPrompt = message;
|
||||
@ -700,13 +679,18 @@ export const useChatStore = createPersistStore(
|
||||
localStorage.clear();
|
||||
location.reload();
|
||||
},
|
||||
setLastInput(lastInput: string) {
|
||||
set({
|
||||
lastInput,
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
return methods;
|
||||
},
|
||||
{
|
||||
name: StoreKey.Chat,
|
||||
version: 3.1,
|
||||
version: 3.2,
|
||||
migrate(persistedState, version) {
|
||||
const state = persistedState as any;
|
||||
const newState = JSON.parse(
|
||||
@ -753,6 +737,16 @@ export const useChatStore = createPersistStore(
|
||||
});
|
||||
}
|
||||
|
||||
// add default summarize model for every session
|
||||
if (version < 3.2) {
|
||||
newState.sessions.forEach((s) => {
|
||||
const config = useAppConfig.getState();
|
||||
s.mask.modelConfig.compressModel = config.modelConfig.compressModel;
|
||||
s.mask.modelConfig.compressProviderName =
|
||||
config.modelConfig.compressProviderName;
|
||||
});
|
||||
}
|
||||
|
||||
return newState as any;
|
||||
},
|
||||
},
|
||||
|
@ -50,7 +50,7 @@ export const DEFAULT_CONFIG = {
|
||||
models: DEFAULT_MODELS as any as LLMModel[],
|
||||
|
||||
modelConfig: {
|
||||
model: "gpt-3.5-turbo" as ModelType,
|
||||
model: "gpt-4o-mini" as ModelType,
|
||||
providerName: "OpenAI" as ServiceProvider,
|
||||
temperature: 0.5,
|
||||
top_p: 1,
|
||||
@ -60,6 +60,8 @@ export const DEFAULT_CONFIG = {
|
||||
sendMemory: true,
|
||||
historyMessageCount: 4,
|
||||
compressMessageLengthThreshold: 1000,
|
||||
compressModel: "gpt-4o-mini" as ModelType,
|
||||
compressProviderName: "OpenAI" as ServiceProvider,
|
||||
enableInjectSystemPrompts: true,
|
||||
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
|
||||
size: "1024x1024" as DalleSize,
|
||||
@ -140,7 +142,7 @@ export const useAppConfig = createPersistStore(
|
||||
}),
|
||||
{
|
||||
name: StoreKey.Config,
|
||||
version: 3.9,
|
||||
version: 4,
|
||||
migrate(persistedState, version) {
|
||||
const state = persistedState as ChatConfig;
|
||||
|
||||
@ -178,6 +180,13 @@ export const useAppConfig = createPersistStore(
|
||||
: config?.template ?? DEFAULT_INPUT_TEMPLATE;
|
||||
}
|
||||
|
||||
if (version < 4) {
|
||||
state.modelConfig.compressModel =
|
||||
DEFAULT_CONFIG.modelConfig.compressModel;
|
||||
state.modelConfig.compressProviderName =
|
||||
DEFAULT_CONFIG.modelConfig.compressProviderName;
|
||||
}
|
||||
|
||||
return state as any;
|
||||
},
|
||||
},
|
||||
|
@ -23,9 +23,12 @@ export type Mask = {
|
||||
|
||||
export const DEFAULT_MASK_STATE = {
|
||||
masks: {} as Record<string, Mask>,
|
||||
language: undefined as Lang | undefined,
|
||||
};
|
||||
|
||||
export type MaskState = typeof DEFAULT_MASK_STATE;
|
||||
export type MaskState = typeof DEFAULT_MASK_STATE & {
|
||||
language?: Lang | undefined;
|
||||
};
|
||||
|
||||
export const DEFAULT_MASK_AVATAR = "gpt-bot";
|
||||
export const createEmptyMask = () =>
|
||||
@ -102,6 +105,11 @@ export const useMaskStore = createPersistStore(
|
||||
search(text: string) {
|
||||
return Object.values(get().masks);
|
||||
},
|
||||
setLanguage(language: Lang | undefined) {
|
||||
set({
|
||||
language,
|
||||
});
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: StoreKey.Mask,
|
||||
|
@ -199,7 +199,7 @@ export const usePluginStore = createPersistStore(
|
||||
|
||||
getAsTools(ids: string[]) {
|
||||
const plugins = get().plugins;
|
||||
const selected = ids
|
||||
const selected = (ids || [])
|
||||
.map((id) => plugins[id])
|
||||
.filter((i) => i)
|
||||
.map((p) => FunctionToolService.add(p));
|
||||
|
60
app/utils.ts
60
app/utils.ts
@ -318,3 +318,63 @@ export function adapter(config: Record<string, unknown>) {
|
||||
: path;
|
||||
return fetch(fetchUrl as string, { ...rest, responseType: "text" });
|
||||
}
|
||||
|
||||
export function safeLocalStorage(): {
|
||||
getItem: (key: string) => string | null;
|
||||
setItem: (key: string, value: string) => void;
|
||||
removeItem: (key: string) => void;
|
||||
clear: () => void;
|
||||
} {
|
||||
let storage: Storage | null;
|
||||
|
||||
try {
|
||||
if (typeof window !== "undefined" && window.localStorage) {
|
||||
storage = window.localStorage;
|
||||
} else {
|
||||
storage = null;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error("localStorage is not available:", e);
|
||||
storage = null;
|
||||
}
|
||||
|
||||
return {
|
||||
getItem(key: string): string | null {
|
||||
if (storage) {
|
||||
return storage.getItem(key);
|
||||
} else {
|
||||
console.warn(
|
||||
`Attempted to get item "${key}" from localStorage, but localStorage is not available.`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
setItem(key: string, value: string): void {
|
||||
if (storage) {
|
||||
storage.setItem(key, value);
|
||||
} else {
|
||||
console.warn(
|
||||
`Attempted to set item "${key}" in localStorage, but localStorage is not available.`,
|
||||
);
|
||||
}
|
||||
},
|
||||
removeItem(key: string): void {
|
||||
if (storage) {
|
||||
storage.removeItem(key);
|
||||
} else {
|
||||
console.warn(
|
||||
`Attempted to remove item "${key}" from localStorage, but localStorage is not available.`,
|
||||
);
|
||||
}
|
||||
},
|
||||
clear(): void {
|
||||
if (storage) {
|
||||
storage.clear();
|
||||
} else {
|
||||
console.warn(
|
||||
"Attempted to clear localStorage, but localStorage is not available.",
|
||||
);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
@ -1,5 +1,8 @@
|
||||
import { StateStorage } from "zustand/middleware";
|
||||
import { get, set, del, clear } from "idb-keyval";
|
||||
import { safeLocalStorage } from "@/app/utils";
|
||||
|
||||
const localStorage = safeLocalStorage();
|
||||
|
||||
class IndexedDBStorage implements StateStorage {
|
||||
public async getItem(name: string): Promise<string | null> {
|
||||
|
@ -9,7 +9,7 @@
|
||||
},
|
||||
"package": {
|
||||
"productName": "NextChat",
|
||||
"version": "2.15.0"
|
||||
"version": "2.15.2"
|
||||
},
|
||||
"tauri": {
|
||||
"allowlist": {
|
||||
|
Loading…
Reference in New Issue
Block a user