mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-11-17 14:33:41 +08:00
init
This commit is contained in:
@@ -27,13 +27,15 @@ const DEFAULT_ACCESS_STATE = {
|
||||
// azure
|
||||
azureUrl: "",
|
||||
azureApiKey: "",
|
||||
azureApiVersion: "2023-08-01-preview",
|
||||
azureApiVersion: "2023-05-15",
|
||||
|
||||
// server config
|
||||
needCode: true,
|
||||
hideUserApiKey: false,
|
||||
hideBalanceQuery: false,
|
||||
disableGPT4: false,
|
||||
midjourneyProxyUrl: "",
|
||||
useMjImgSelfProxy: false,
|
||||
disableFastLink: false,
|
||||
customModels: "",
|
||||
};
|
||||
@@ -53,7 +55,8 @@ export const useAccessStore = createPersistStore(
|
||||
},
|
||||
|
||||
isValidAzure() {
|
||||
return ensure(get(), ["azureUrl", "azureApiKey", "azureApiVersion"]);
|
||||
return true;
|
||||
// return ensure(get(), ["azureUrl", "azureApiKey", "azureApiVersion"]);
|
||||
},
|
||||
|
||||
isAuthorized() {
|
||||
@@ -70,24 +73,15 @@ export const useAccessStore = createPersistStore(
|
||||
fetch() {
|
||||
if (fetchState > 0 || getClientConfig()?.buildMode === "export") return;
|
||||
fetchState = 1;
|
||||
fetch("/api/config", {
|
||||
method: "post",
|
||||
body: null,
|
||||
headers: {
|
||||
...getHeaders(),
|
||||
},
|
||||
})
|
||||
.then((res) => res.json())
|
||||
.then((res: DangerConfig) => {
|
||||
console.log("[Config] got config from server", res);
|
||||
set(() => ({ ...res }));
|
||||
})
|
||||
.catch(() => {
|
||||
console.error("[Config] failed to fetch config");
|
||||
})
|
||||
.finally(() => {
|
||||
fetchState = 2;
|
||||
});
|
||||
|
||||
const res = {
|
||||
needCode: false,
|
||||
hideUserApiKey: true,
|
||||
disableGPT4: false,
|
||||
hideBalanceQuery: true,
|
||||
};
|
||||
set(() => ({ ...res }));
|
||||
fetchState = 2; // 设置 fetchState 值为 "获取已完成"
|
||||
},
|
||||
}),
|
||||
{
|
||||
@@ -101,7 +95,7 @@ export const useAccessStore = createPersistStore(
|
||||
azureApiVersion: string;
|
||||
};
|
||||
state.openaiApiKey = state.token;
|
||||
state.azureApiVersion = "2023-08-01-preview";
|
||||
state.azureApiVersion = "2023-05-15";
|
||||
}
|
||||
|
||||
return persistedState as any;
|
||||
|
||||
@@ -11,7 +11,12 @@ import {
|
||||
StoreKey,
|
||||
SUMMARIZE_MODEL,
|
||||
} from "../constant";
|
||||
import { api, RequestMessage } from "../client/api";
|
||||
import {
|
||||
api,
|
||||
getHeaders,
|
||||
useGetMidjourneySelfProxyUrl,
|
||||
RequestMessage,
|
||||
} from "../client/api";
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
import { prettyObject } from "../utils/format";
|
||||
import { estimateTokenLength } from "../utils/token";
|
||||
@@ -24,6 +29,7 @@ export type ChatMessage = RequestMessage & {
|
||||
isError?: boolean;
|
||||
id: string;
|
||||
model?: ModelType;
|
||||
attr?: any;
|
||||
};
|
||||
|
||||
export function createMessage(override: Partial<ChatMessage>): ChatMessage {
|
||||
@@ -80,6 +86,8 @@ function createEmptySession(): ChatSession {
|
||||
};
|
||||
}
|
||||
|
||||
const ChatFetchTaskPool: Record<string, any> = {};
|
||||
|
||||
function getSummarizeModel(currentModel: string) {
|
||||
// if it is using gpt-* models, force to use 3.5 to summarize
|
||||
return currentModel.startsWith("gpt") ? SUMMARIZE_MODEL : currentModel;
|
||||
@@ -266,12 +274,143 @@ export const useChatStore = createPersistStore(
|
||||
get().summarizeSession();
|
||||
},
|
||||
|
||||
async onUserInput(content: string) {
|
||||
fetchMidjourneyStatus(botMessage: ChatMessage, extAttr?: any) {
|
||||
const taskId = botMessage?.attr?.taskId;
|
||||
if (
|
||||
!taskId ||
|
||||
["SUCCESS", "FAILURE"].includes(botMessage?.attr?.status) ||
|
||||
ChatFetchTaskPool[taskId]
|
||||
)
|
||||
return;
|
||||
ChatFetchTaskPool[taskId] = setTimeout(async () => {
|
||||
ChatFetchTaskPool[taskId] = null;
|
||||
const statusRes = await fetch(
|
||||
`/api/midjourney/mj/task/${taskId}/fetch`,
|
||||
{
|
||||
method: "GET",
|
||||
headers: getHeaders(),
|
||||
},
|
||||
);
|
||||
const statusResJson = await statusRes.json();
|
||||
if (statusRes.status < 200 || statusRes.status >= 300) {
|
||||
botMessage.content =
|
||||
Locale.Midjourney.TaskStatusFetchFail +
|
||||
": " +
|
||||
(statusResJson?.error || statusResJson?.description) ||
|
||||
Locale.Midjourney.UnknownReason;
|
||||
console.log("【mid】状态码不对");
|
||||
} else {
|
||||
let isFinished = false;
|
||||
let content;
|
||||
const prefixContent = Locale.Midjourney.TaskPrefix(
|
||||
statusResJson.prompt,
|
||||
taskId,
|
||||
);
|
||||
console.log("【mid】请求成功了", statusResJson);
|
||||
switch (statusResJson?.status) {
|
||||
case "SUCCESS":
|
||||
console.log("[mid] SUCCESS", statusResJson);
|
||||
|
||||
content = statusResJson.imageUrl;
|
||||
isFinished = true;
|
||||
if (statusResJson.imageUrl) {
|
||||
let imgUrl = useGetMidjourneySelfProxyUrl(
|
||||
statusResJson.imageUrl,
|
||||
);
|
||||
botMessage.attr.imgUrl = imgUrl;
|
||||
botMessage.content =
|
||||
prefixContent + `[](${imgUrl})`;
|
||||
}
|
||||
if (
|
||||
statusResJson.action === "DESCRIBE" &&
|
||||
statusResJson.prompt
|
||||
) {
|
||||
botMessage.content += `\n${statusResJson.prompt}`;
|
||||
}
|
||||
break;
|
||||
case "FAILURE":
|
||||
console.log("[mid] FAILURE", statusResJson);
|
||||
content =
|
||||
statusResJson.failReason || Locale.Midjourney.UnknownReason;
|
||||
isFinished = true;
|
||||
botMessage.content =
|
||||
prefixContent +
|
||||
`**${
|
||||
Locale.Midjourney.TaskStatus
|
||||
}:** [${new Date().toLocaleString()}] - ${content}`;
|
||||
break;
|
||||
case "NOT_START":
|
||||
content = Locale.Midjourney.TaskNotStart;
|
||||
break;
|
||||
case "IN_PROGRESS":
|
||||
console.log("[mid] ", statusResJson);
|
||||
content = Locale.Midjourney.TaskProgressTip(
|
||||
statusResJson.progress,
|
||||
);
|
||||
break;
|
||||
case "SUBMITTED":
|
||||
content = Locale.Midjourney.TaskRemoteSubmit;
|
||||
break;
|
||||
default:
|
||||
console.log("[mid] ", statusResJson);
|
||||
content = statusResJson.status;
|
||||
}
|
||||
botMessage.attr.status = statusResJson.status;
|
||||
if (isFinished) {
|
||||
botMessage.attr.finished = true;
|
||||
} else {
|
||||
botMessage.content =
|
||||
prefixContent +
|
||||
`**${
|
||||
Locale.Midjourney.TaskStatus
|
||||
}:** [${new Date().toLocaleString()}] - ${content}`;
|
||||
if (
|
||||
statusResJson.status === "IN_PROGRESS" &&
|
||||
statusResJson.imageUrl
|
||||
) {
|
||||
let imgUrl = useGetMidjourneySelfProxyUrl(
|
||||
statusResJson.imageUrl,
|
||||
);
|
||||
botMessage.attr.imgUrl = imgUrl;
|
||||
botMessage.content += `\n[](${imgUrl})`;
|
||||
}
|
||||
this.fetchMidjourneyStatus(taskId, botMessage);
|
||||
}
|
||||
set(() => ({}));
|
||||
if (isFinished) {
|
||||
extAttr?.setAutoScroll(true);
|
||||
}
|
||||
}
|
||||
}, 3000);
|
||||
},
|
||||
|
||||
async onUserInput(content: string, extAttr?: any) {
|
||||
const session = get().currentSession();
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
let userContent: string = "";
|
||||
if (
|
||||
extAttr?.mjImageMode &&
|
||||
(extAttr?.useImages?.length ?? 0) > 0 &&
|
||||
extAttr.mjImageMode !== "IMAGINE"
|
||||
) {
|
||||
if (
|
||||
extAttr.mjImageMode === "BLEND" &&
|
||||
(extAttr.useImages.length < 2 || extAttr.useImages.length > 5)
|
||||
) {
|
||||
alert(Locale.Midjourney.BlendMinImg(2, 5));
|
||||
return new Promise((resolve: any, reject) => {
|
||||
resolve(false);
|
||||
});
|
||||
}
|
||||
userContent = `/mj ${extAttr?.mjImageMode}`;
|
||||
extAttr.useImages.forEach((img: any, index: number) => {
|
||||
userContent += `::[${index + 1}]${img.filename}`;
|
||||
});
|
||||
} else {
|
||||
userContent = fillTemplateWith(content, modelConfig);
|
||||
}
|
||||
|
||||
const userContent = fillTemplateWith(content, modelConfig);
|
||||
console.log("[User Input] after template: ", userContent);
|
||||
// console.log("[User Input] after template: ", userContent);
|
||||
|
||||
const userMessage: ChatMessage = createMessage({
|
||||
role: "user",
|
||||
@@ -282,6 +421,7 @@ export const useChatStore = createPersistStore(
|
||||
role: "assistant",
|
||||
streaming: true,
|
||||
model: modelConfig.model,
|
||||
attr: {},
|
||||
});
|
||||
|
||||
// get recent messages
|
||||
@@ -301,57 +441,222 @@ export const useChatStore = createPersistStore(
|
||||
]);
|
||||
});
|
||||
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
if (
|
||||
content.toLowerCase().startsWith("/mj") ||
|
||||
content.toLowerCase().startsWith("/MJ")
|
||||
) {
|
||||
botMessage.model = "midjourney";
|
||||
const startFn = async () => {
|
||||
const prompt = content.substring(3).trim();
|
||||
let action: string = "IMAGINE";
|
||||
const firstSplitIndex = prompt.indexOf("::");
|
||||
if (firstSplitIndex > 0) {
|
||||
action = prompt.substring(0, firstSplitIndex);
|
||||
}
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
if (
|
||||
![
|
||||
"UPSCALE",
|
||||
"VARIATION",
|
||||
"IMAGINE",
|
||||
"DESCRIBE",
|
||||
"BLEND",
|
||||
"REROLL",
|
||||
].includes(action)
|
||||
) {
|
||||
botMessage.content = Locale.Midjourney.TaskErrUnknownType;
|
||||
botMessage.streaming = false;
|
||||
return;
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message.includes("aborted");
|
||||
botMessage.content +=
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
console.log("[action] ", action);
|
||||
botMessage.attr.action = action;
|
||||
let actionIndex: any = null;
|
||||
let actionUseTaskId: any = null;
|
||||
if (
|
||||
action === "VARIATION" ||
|
||||
action == "UPSCALE" ||
|
||||
action == "REROLL"
|
||||
) {
|
||||
actionIndex = parseInt(
|
||||
prompt.substring(firstSplitIndex + 2, firstSplitIndex + 3),
|
||||
);
|
||||
actionUseTaskId = prompt.substring(firstSplitIndex + 5);
|
||||
}
|
||||
try {
|
||||
let res = null;
|
||||
const reqFn = (path: string, method: string, body?: any) => {
|
||||
return fetch("/api/midjourney/mj/" + path, {
|
||||
method: method,
|
||||
headers: getHeaders(),
|
||||
body: body,
|
||||
});
|
||||
};
|
||||
switch (action) {
|
||||
case "IMAGINE": {
|
||||
res = await reqFn(
|
||||
"submit/imagine",
|
||||
"POST",
|
||||
JSON.stringify({
|
||||
prompt: prompt,
|
||||
// base64Array: extAttr?.useImages?.[0]?.base64 ?? null,
|
||||
base64Array: extAttr?.useImages?.[0]?.base64
|
||||
? [extAttr?.useImages?.[0]?.base64]
|
||||
: null,
|
||||
}),
|
||||
);
|
||||
break;
|
||||
}
|
||||
case "DESCRIBE": {
|
||||
res = await reqFn(
|
||||
"submit/describe",
|
||||
"POST",
|
||||
JSON.stringify({
|
||||
base64: extAttr.useImages[0].base64,
|
||||
}),
|
||||
);
|
||||
break;
|
||||
}
|
||||
case "BLEND": {
|
||||
const base64Array = extAttr.useImages.map(
|
||||
(ui: any) => ui.base64,
|
||||
);
|
||||
res = await reqFn(
|
||||
"submit/blend",
|
||||
"POST",
|
||||
JSON.stringify({ base64Array }),
|
||||
);
|
||||
break;
|
||||
}
|
||||
case "UPSCALE":
|
||||
case "VARIATION":
|
||||
case "REROLL": {
|
||||
res = await reqFn(
|
||||
"submit/change",
|
||||
"POST",
|
||||
JSON.stringify({
|
||||
action: action,
|
||||
index: actionIndex,
|
||||
taskId: actionUseTaskId,
|
||||
}),
|
||||
);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
}
|
||||
if (res == null) {
|
||||
botMessage.content =
|
||||
Locale.Midjourney.TaskErrNotSupportType(action);
|
||||
botMessage.streaming = false;
|
||||
return;
|
||||
}
|
||||
if (!res.ok) {
|
||||
const text = await res.text();
|
||||
throw new Error(
|
||||
`\n${Locale.Midjourney.StatusCode(
|
||||
res.status,
|
||||
)}\n${Locale.Midjourney.RespBody(
|
||||
text || Locale.Midjourney.None,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
const resJson = await res.json();
|
||||
if (
|
||||
res.status < 200 ||
|
||||
res.status >= 300 ||
|
||||
(resJson.code != 1 && resJson.code != 22)
|
||||
) {
|
||||
botMessage.content = Locale.Midjourney.TaskSubmitErr(
|
||||
resJson?.msg ||
|
||||
resJson?.error ||
|
||||
resJson?.description ||
|
||||
Locale.Midjourney.UnknownError,
|
||||
);
|
||||
} else {
|
||||
const taskId: string = resJson.result;
|
||||
const prefixContent = Locale.Midjourney.TaskPrefix(
|
||||
prompt,
|
||||
taskId,
|
||||
);
|
||||
botMessage.content =
|
||||
prefixContent +
|
||||
`[${new Date().toLocaleString()}] - ${
|
||||
Locale.Midjourney.TaskSubmitOk
|
||||
}: ` +
|
||||
resJson?.description || Locale.Midjourney.PleaseWait;
|
||||
botMessage.attr.taskId = taskId;
|
||||
botMessage.attr.status = resJson.status;
|
||||
this.fetchMidjourneyStatus(botMessage, extAttr);
|
||||
}
|
||||
} catch (e: any) {
|
||||
console.error(e);
|
||||
botMessage.content = Locale.Midjourney.TaskSubmitErr(
|
||||
e?.error || e?.message || Locale.Midjourney.UnknownError,
|
||||
);
|
||||
} finally {
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
botMessage.streaming = false;
|
||||
}
|
||||
};
|
||||
await startFn();
|
||||
get().onNewMessage(botMessage);
|
||||
set(() => ({}));
|
||||
extAttr?.setAutoScroll(true);
|
||||
} else {
|
||||
// make request
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
onUpdate(message) {
|
||||
botMessage.streaming = true;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
}
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
},
|
||||
onFinish(message) {
|
||||
botMessage.streaming = false;
|
||||
if (message) {
|
||||
botMessage.content = message;
|
||||
get().onNewMessage(botMessage);
|
||||
}
|
||||
ChatControllerPool.remove(session.id, botMessage.id);
|
||||
},
|
||||
onError(error) {
|
||||
const isAborted = error.message.includes("aborted");
|
||||
botMessage.content =
|
||||
"\n\n" +
|
||||
prettyObject({
|
||||
error: true,
|
||||
message: error.message,
|
||||
message2: "用上面刷新按钮试试。",
|
||||
});
|
||||
botMessage.streaming = false;
|
||||
userMessage.isError = !isAborted;
|
||||
botMessage.isError = !isAborted;
|
||||
get().updateCurrentSession((session) => {
|
||||
session.messages = session.messages.concat();
|
||||
});
|
||||
ChatControllerPool.remove(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
);
|
||||
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
});
|
||||
console.error("[Chat] failed ", error);
|
||||
},
|
||||
onController(controller) {
|
||||
// collect controller for stop/retry
|
||||
ChatControllerPool.addController(
|
||||
session.id,
|
||||
botMessage.id ?? messageIndex,
|
||||
controller,
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
getMemoryPrompt() {
|
||||
@@ -528,12 +833,12 @@ export const useChatStore = createPersistStore(
|
||||
|
||||
const lastSummarizeIndex = session.messages.length;
|
||||
|
||||
console.log(
|
||||
"[Chat History] ",
|
||||
toBeSummarizedMsgs,
|
||||
historyMsgLength,
|
||||
modelConfig.compressMessageLengthThreshold,
|
||||
);
|
||||
// console.log(
|
||||
// "[Chat History] ",
|
||||
// toBeSummarizedMsgs,
|
||||
// historyMsgLength,
|
||||
// modelConfig.compressMessageLengthThreshold,
|
||||
// );
|
||||
|
||||
if (
|
||||
historyMsgLength > modelConfig.compressMessageLengthThreshold &&
|
||||
|
||||
@@ -5,9 +5,11 @@ import {
|
||||
DEFAULT_INPUT_TEMPLATE,
|
||||
DEFAULT_MODELS,
|
||||
DEFAULT_SIDEBAR_WIDTH,
|
||||
DISABLE_MODELS,
|
||||
StoreKey,
|
||||
} from "../constant";
|
||||
import { createPersistStore } from "../utils/store";
|
||||
import { get } from "immutable";
|
||||
|
||||
export type ModelType = (typeof DEFAULT_MODELS)[number]["name"];
|
||||
|
||||
@@ -26,35 +28,38 @@ export enum Theme {
|
||||
}
|
||||
|
||||
export const DEFAULT_CONFIG = {
|
||||
submitKey: SubmitKey.Enter as SubmitKey,
|
||||
lastUpdate: Date.now(), // timestamp, to merge state
|
||||
|
||||
submitKey: isMacOS() ? SubmitKey.MetaEnter : SubmitKey.CtrlEnter,
|
||||
// submitKey: isMacOS() ? SubmitKey.MetaEnter : SubmitKey.CtrlEnter,
|
||||
avatar: "1f603",
|
||||
fontSize: 14,
|
||||
theme: Theme.Auto as Theme,
|
||||
tightBorder: !!getClientConfig()?.isApp,
|
||||
sendPreviewBubble: true,
|
||||
sendPreviewBubble: false,
|
||||
enableAutoGenerateTitle: true,
|
||||
sidebarWidth: DEFAULT_SIDEBAR_WIDTH,
|
||||
|
||||
disablePromptHint: false,
|
||||
|
||||
dontShowMaskSplashScreen: false, // dont show splash screen when create chat
|
||||
hideBuiltinMasks: false, // dont add builtin masks
|
||||
dontShowMaskSplashScreen: true,
|
||||
hideBuiltinMasks: false, // don't add builtin masks
|
||||
|
||||
customModels: "",
|
||||
models: DEFAULT_MODELS as any as LLMModel[],
|
||||
|
||||
dontUseModel: DISABLE_MODELS,
|
||||
|
||||
modelConfig: {
|
||||
model: "gpt-3.5-turbo" as ModelType,
|
||||
temperature: 0.5,
|
||||
model: "gpt-3.5-turbo-1106" as ModelType,
|
||||
temperature: 0.8,
|
||||
top_p: 1,
|
||||
max_tokens: 4000,
|
||||
max_tokens: 2000,
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0,
|
||||
sendMemory: true,
|
||||
historyMessageCount: 4,
|
||||
compressMessageLengthThreshold: 1000,
|
||||
compressMessageLengthThreshold: 2000,
|
||||
enableInjectSystemPrompts: true,
|
||||
template: DEFAULT_INPUT_TEMPLATE,
|
||||
},
|
||||
@@ -132,7 +137,7 @@ export const useAppConfig = createPersistStore(
|
||||
}),
|
||||
{
|
||||
name: StoreKey.Config,
|
||||
version: 3.8,
|
||||
version: 3.88,
|
||||
migrate(persistedState, version) {
|
||||
const state = persistedState as ChatConfig;
|
||||
|
||||
@@ -148,7 +153,8 @@ export const useAppConfig = createPersistStore(
|
||||
}
|
||||
|
||||
if (version < 3.5) {
|
||||
state.customModels = "claude,claude-100k";
|
||||
// state.customModels = "claude,claude-100k";
|
||||
state.customModels = "";
|
||||
}
|
||||
|
||||
if (version < 3.6) {
|
||||
@@ -162,6 +168,10 @@ export const useAppConfig = createPersistStore(
|
||||
if (version < 3.8) {
|
||||
state.lastUpdate = Date.now();
|
||||
}
|
||||
if (version < 3.88) {
|
||||
state.lastUpdate = Date.now();
|
||||
return { ...DEFAULT_CONFIG };
|
||||
}
|
||||
|
||||
return state as any;
|
||||
},
|
||||
|
||||
@@ -37,7 +37,7 @@ export const createEmptyMask = () =>
|
||||
lang: getLang(),
|
||||
builtin: false,
|
||||
createdAt: Date.now(),
|
||||
}) as Mask;
|
||||
} as Mask);
|
||||
|
||||
export const useMaskStore = createPersistStore(
|
||||
{ ...DEFAULT_MASK_STATE },
|
||||
@@ -92,7 +92,7 @@ export const useMaskStore = createPersistStore(
|
||||
...config.modelConfig,
|
||||
...m.modelConfig,
|
||||
},
|
||||
}) as Mask,
|
||||
} as Mask),
|
||||
);
|
||||
return userMasks.concat(buildinMasks);
|
||||
},
|
||||
|
||||
@@ -42,6 +42,9 @@ export const SearchService = {
|
||||
},
|
||||
|
||||
search(text: string) {
|
||||
if (text.startsWith("mj ")) {
|
||||
return [];
|
||||
}
|
||||
const userResults = this.userEngine.search(text);
|
||||
const builtinResults = this.builtinEngine.search(text);
|
||||
return userResults.concat(builtinResults).map((v) => v.item);
|
||||
@@ -147,7 +150,7 @@ export const usePromptStore = createPersistStore(
|
||||
},
|
||||
|
||||
onRehydrateStorage(state) {
|
||||
const PROMPT_URL = "./prompts.json";
|
||||
const PROMPT_URL = "https://cos.xiaosi.cc/next/public/prompts.json";
|
||||
|
||||
type PromptList = Array<[string, string]>;
|
||||
|
||||
|
||||
@@ -60,8 +60,10 @@ export const useSyncStore = createPersistStore(
|
||||
export() {
|
||||
const state = getLocalAppState();
|
||||
const datePart = isApp
|
||||
? `${new Date().toLocaleDateString().replace(/\//g, '_')} ${new Date().toLocaleTimeString().replace(/:/g, '_')}`
|
||||
: new Date().toLocaleString();
|
||||
? `${new Date().toLocaleDateString().replace(/\//g, "_")} ${new Date()
|
||||
.toLocaleTimeString()
|
||||
.replace(/:/g, "_")}`
|
||||
: new Date().toLocaleString();
|
||||
|
||||
const fileName = `Backup-${datePart}.json`;
|
||||
downloadAs(JSON.stringify(state), fileName);
|
||||
|
||||
@@ -85,35 +85,40 @@ export const useUpdateStore = createPersistStore(
|
||||
}));
|
||||
if (window.__TAURI__?.notification && isApp) {
|
||||
// Check if notification permission is granted
|
||||
await window.__TAURI__?.notification.isPermissionGranted().then((granted) => {
|
||||
if (!granted) {
|
||||
return;
|
||||
} else {
|
||||
// Request permission to show notifications
|
||||
window.__TAURI__?.notification.requestPermission().then((permission) => {
|
||||
if (permission === 'granted') {
|
||||
if (version === remoteId) {
|
||||
// Show a notification using Tauri
|
||||
window.__TAURI__?.notification.sendNotification({
|
||||
title: "ChatGPT Next Web",
|
||||
body: `${Locale.Settings.Update.IsLatest}`,
|
||||
icon: `${ChatGptIcon.src}`,
|
||||
sound: "Default"
|
||||
});
|
||||
} else {
|
||||
const updateMessage = Locale.Settings.Update.FoundUpdate(`${remoteId}`);
|
||||
// Show a notification for the new version using Tauri
|
||||
window.__TAURI__?.notification.sendNotification({
|
||||
title: "ChatGPT Next Web",
|
||||
body: updateMessage,
|
||||
icon: `${ChatGptIcon.src}`,
|
||||
sound: "Default"
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
await window.__TAURI__?.notification
|
||||
.isPermissionGranted()
|
||||
.then((granted) => {
|
||||
if (!granted) {
|
||||
return;
|
||||
} else {
|
||||
// Request permission to show notifications
|
||||
window.__TAURI__?.notification
|
||||
.requestPermission()
|
||||
.then((permission) => {
|
||||
if (permission === "granted") {
|
||||
if (version === remoteId) {
|
||||
// Show a notification using Tauri
|
||||
window.__TAURI__?.notification.sendNotification({
|
||||
title: "ChatGPT Next Web",
|
||||
body: `${Locale.Settings.Update.IsLatest}`,
|
||||
icon: `${ChatGptIcon.src}`,
|
||||
sound: "Default",
|
||||
});
|
||||
} else {
|
||||
const updateMessage =
|
||||
Locale.Settings.Update.FoundUpdate(`${remoteId}`);
|
||||
// Show a notification for the new version using Tauri
|
||||
window.__TAURI__?.notification.sendNotification({
|
||||
title: "ChatGPT Next Web",
|
||||
body: updateMessage,
|
||||
icon: `${ChatGptIcon.src}`,
|
||||
sound: "Default",
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
console.log("[Got Upstream] ", remoteId);
|
||||
} catch (error) {
|
||||
|
||||
Reference in New Issue
Block a user