mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-10 20:16:37 +08:00
Merge branch 'main' of https://github.com/Yidadaa/ChatGPT-Next-Web
This commit is contained in:
commit
d9338ac2c7
@ -218,7 +218,7 @@
|
||||
overscroll-behavior: none;
|
||||
}
|
||||
|
||||
.chat-body-title {
|
||||
.chat-body-main-title {
|
||||
cursor: pointer;
|
||||
|
||||
&:hover {
|
||||
@ -226,6 +226,12 @@
|
||||
}
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 600px) {
|
||||
.chat-body-title {
|
||||
text-align: center;
|
||||
}
|
||||
}
|
||||
|
||||
.chat-message {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
@ -808,9 +808,9 @@ export function Chat() {
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="window-header-title">
|
||||
<div className={`window-header-title ${styles["chat-body-title"]}`}>
|
||||
<div
|
||||
className={`window-header-main-title " ${styles["chat-body-title"]}`}
|
||||
className={`window-header-main-title ${styles["chat-body-main-title"]}`}
|
||||
onClickCapture={renameSession}
|
||||
>
|
||||
{!session.topic ? DEFAULT_TOPIC : session.topic}
|
||||
|
@ -37,7 +37,7 @@ function useHotKey() {
|
||||
|
||||
useEffect(() => {
|
||||
const onKeyDown = (e: KeyboardEvent) => {
|
||||
if (e.metaKey || e.altKey || e.ctrlKey) {
|
||||
if (e.altKey || e.ctrlKey) {
|
||||
if (e.key === "ArrowUp") {
|
||||
chatStore.nextSession(-1);
|
||||
} else if (e.key === "ArrowDown") {
|
||||
|
@ -54,3 +54,8 @@ export const OpenaiPath = {
|
||||
};
|
||||
|
||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||
export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||
You are ChatGPT, a large language model trained by OpenAI.
|
||||
Knowledge cutoff: 2021-09
|
||||
Current model: {{model}}
|
||||
Current time: {{time}}`;
|
||||
|
@ -7,7 +7,11 @@ import Locale, { getLang } from "../locales";
|
||||
import { showToast } from "../components/ui-lib";
|
||||
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
||||
import { createEmptyMask, Mask } from "./mask";
|
||||
import { DEFAULT_INPUT_TEMPLATE, StoreKey } from "../constant";
|
||||
import {
|
||||
DEFAULT_INPUT_TEMPLATE,
|
||||
DEFAULT_SYSTEM_TEMPLATE,
|
||||
StoreKey,
|
||||
} from "../constant";
|
||||
import { api, RequestMessage } from "../client/api";
|
||||
import { ChatControllerPool } from "../client/controller";
|
||||
import { prettyObject } from "../utils/format";
|
||||
@ -279,7 +283,7 @@ export const useChatStore = create<ChatStore>()(
|
||||
const modelConfig = session.mask.modelConfig;
|
||||
|
||||
const userContent = fillTemplateWith(content, modelConfig);
|
||||
console.log("[User Input] fill with template: ", userContent);
|
||||
console.log("[User Input] after template: ", userContent);
|
||||
|
||||
const userMessage: ChatMessage = createMessage({
|
||||
role: "user",
|
||||
@ -312,7 +316,6 @@ export const useChatStore = create<ChatStore>()(
|
||||
});
|
||||
|
||||
// make request
|
||||
console.log("[User Input] ", sendMessages);
|
||||
api.llm.chat({
|
||||
messages: sendMessages,
|
||||
config: { ...modelConfig, stream: true },
|
||||
@ -391,6 +394,27 @@ export const useChatStore = create<ChatStore>()(
|
||||
// in-context prompts
|
||||
const contextPrompts = session.mask.context.slice();
|
||||
|
||||
// system prompts, to get close to OpenAI Web ChatGPT
|
||||
// only will be injected if user does not use a mask or set none context prompts
|
||||
const shouldInjectSystemPrompts = contextPrompts.length === 0;
|
||||
const systemPrompts = shouldInjectSystemPrompts
|
||||
? [
|
||||
createMessage({
|
||||
role: "system",
|
||||
content: fillTemplateWith("", {
|
||||
...modelConfig,
|
||||
template: DEFAULT_SYSTEM_TEMPLATE,
|
||||
}),
|
||||
}),
|
||||
]
|
||||
: [];
|
||||
if (shouldInjectSystemPrompts) {
|
||||
console.log(
|
||||
"[Global System Prompt] ",
|
||||
systemPrompts.at(0)?.content ?? "empty",
|
||||
);
|
||||
}
|
||||
|
||||
// long term memory
|
||||
const shouldSendLongTermMemory =
|
||||
modelConfig.sendMemory &&
|
||||
@ -409,6 +433,7 @@ export const useChatStore = create<ChatStore>()(
|
||||
);
|
||||
|
||||
// lets concat send messages, including 4 parts:
|
||||
// 0. system prompt: to get close to OpenAI Web ChatGPT
|
||||
// 1. long term memory: summarized memory messages
|
||||
// 2. pre-defined in-context prompts
|
||||
// 3. short term memory: latest n messages
|
||||
@ -435,6 +460,7 @@ export const useChatStore = create<ChatStore>()(
|
||||
|
||||
// concat all messages
|
||||
const recentMessages = [
|
||||
...systemPrompts,
|
||||
...longTermMemoryPrompts,
|
||||
...contextPrompts,
|
||||
...reversedRecentMessages.reverse(),
|
||||
|
@ -177,9 +177,9 @@ export const useAppConfig = create<ChatConfigStore>()(
|
||||
}),
|
||||
{
|
||||
name: StoreKey.Config,
|
||||
version: 3.1,
|
||||
version: 3.2,
|
||||
migrate(persistedState, version) {
|
||||
if (version === 3.1) return persistedState as any;
|
||||
if (version === 3.2) return persistedState as any;
|
||||
|
||||
const state = persistedState as ChatConfig;
|
||||
state.modelConfig.sendMemory = true;
|
||||
|
@ -25,10 +25,6 @@
|
||||
.window-header-sub-title {
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
@media screen and (max-width: 600px) {
|
||||
text-align: center;
|
||||
}
|
||||
}
|
||||
|
||||
.window-actions {
|
||||
|
Loading…
Reference in New Issue
Block a user