mirror of
https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web.git
synced 2025-10-11 04:26:37 +08:00
Merge branch 'main' of https://github.com/Yidadaa/ChatGPT-Next-Web
This commit is contained in:
commit
d9338ac2c7
@ -218,7 +218,7 @@
|
|||||||
overscroll-behavior: none;
|
overscroll-behavior: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
.chat-body-title {
|
.chat-body-main-title {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
|
|
||||||
&:hover {
|
&:hover {
|
||||||
@ -226,6 +226,12 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@media only screen and (max-width: 600px) {
|
||||||
|
.chat-body-title {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
.chat-message {
|
.chat-message {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
|
@ -808,9 +808,9 @@ export function Chat() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
<div className="window-header-title">
|
<div className={`window-header-title ${styles["chat-body-title"]}`}>
|
||||||
<div
|
<div
|
||||||
className={`window-header-main-title " ${styles["chat-body-title"]}`}
|
className={`window-header-main-title ${styles["chat-body-main-title"]}`}
|
||||||
onClickCapture={renameSession}
|
onClickCapture={renameSession}
|
||||||
>
|
>
|
||||||
{!session.topic ? DEFAULT_TOPIC : session.topic}
|
{!session.topic ? DEFAULT_TOPIC : session.topic}
|
||||||
|
@ -37,7 +37,7 @@ function useHotKey() {
|
|||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const onKeyDown = (e: KeyboardEvent) => {
|
const onKeyDown = (e: KeyboardEvent) => {
|
||||||
if (e.metaKey || e.altKey || e.ctrlKey) {
|
if (e.altKey || e.ctrlKey) {
|
||||||
if (e.key === "ArrowUp") {
|
if (e.key === "ArrowUp") {
|
||||||
chatStore.nextSession(-1);
|
chatStore.nextSession(-1);
|
||||||
} else if (e.key === "ArrowDown") {
|
} else if (e.key === "ArrowDown") {
|
||||||
|
@ -54,3 +54,8 @@ export const OpenaiPath = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
|
||||||
|
export const DEFAULT_SYSTEM_TEMPLATE = `
|
||||||
|
You are ChatGPT, a large language model trained by OpenAI.
|
||||||
|
Knowledge cutoff: 2021-09
|
||||||
|
Current model: {{model}}
|
||||||
|
Current time: {{time}}`;
|
||||||
|
@ -7,7 +7,11 @@ import Locale, { getLang } from "../locales";
|
|||||||
import { showToast } from "../components/ui-lib";
|
import { showToast } from "../components/ui-lib";
|
||||||
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
import { ModelConfig, ModelType, useAppConfig } from "./config";
|
||||||
import { createEmptyMask, Mask } from "./mask";
|
import { createEmptyMask, Mask } from "./mask";
|
||||||
import { DEFAULT_INPUT_TEMPLATE, StoreKey } from "../constant";
|
import {
|
||||||
|
DEFAULT_INPUT_TEMPLATE,
|
||||||
|
DEFAULT_SYSTEM_TEMPLATE,
|
||||||
|
StoreKey,
|
||||||
|
} from "../constant";
|
||||||
import { api, RequestMessage } from "../client/api";
|
import { api, RequestMessage } from "../client/api";
|
||||||
import { ChatControllerPool } from "../client/controller";
|
import { ChatControllerPool } from "../client/controller";
|
||||||
import { prettyObject } from "../utils/format";
|
import { prettyObject } from "../utils/format";
|
||||||
@ -279,7 +283,7 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
const modelConfig = session.mask.modelConfig;
|
const modelConfig = session.mask.modelConfig;
|
||||||
|
|
||||||
const userContent = fillTemplateWith(content, modelConfig);
|
const userContent = fillTemplateWith(content, modelConfig);
|
||||||
console.log("[User Input] fill with template: ", userContent);
|
console.log("[User Input] after template: ", userContent);
|
||||||
|
|
||||||
const userMessage: ChatMessage = createMessage({
|
const userMessage: ChatMessage = createMessage({
|
||||||
role: "user",
|
role: "user",
|
||||||
@ -312,7 +316,6 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
});
|
});
|
||||||
|
|
||||||
// make request
|
// make request
|
||||||
console.log("[User Input] ", sendMessages);
|
|
||||||
api.llm.chat({
|
api.llm.chat({
|
||||||
messages: sendMessages,
|
messages: sendMessages,
|
||||||
config: { ...modelConfig, stream: true },
|
config: { ...modelConfig, stream: true },
|
||||||
@ -391,6 +394,27 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
// in-context prompts
|
// in-context prompts
|
||||||
const contextPrompts = session.mask.context.slice();
|
const contextPrompts = session.mask.context.slice();
|
||||||
|
|
||||||
|
// system prompts, to get close to OpenAI Web ChatGPT
|
||||||
|
// only will be injected if user does not use a mask or set none context prompts
|
||||||
|
const shouldInjectSystemPrompts = contextPrompts.length === 0;
|
||||||
|
const systemPrompts = shouldInjectSystemPrompts
|
||||||
|
? [
|
||||||
|
createMessage({
|
||||||
|
role: "system",
|
||||||
|
content: fillTemplateWith("", {
|
||||||
|
...modelConfig,
|
||||||
|
template: DEFAULT_SYSTEM_TEMPLATE,
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
]
|
||||||
|
: [];
|
||||||
|
if (shouldInjectSystemPrompts) {
|
||||||
|
console.log(
|
||||||
|
"[Global System Prompt] ",
|
||||||
|
systemPrompts.at(0)?.content ?? "empty",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// long term memory
|
// long term memory
|
||||||
const shouldSendLongTermMemory =
|
const shouldSendLongTermMemory =
|
||||||
modelConfig.sendMemory &&
|
modelConfig.sendMemory &&
|
||||||
@ -409,6 +433,7 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
);
|
);
|
||||||
|
|
||||||
// lets concat send messages, including 4 parts:
|
// lets concat send messages, including 4 parts:
|
||||||
|
// 0. system prompt: to get close to OpenAI Web ChatGPT
|
||||||
// 1. long term memory: summarized memory messages
|
// 1. long term memory: summarized memory messages
|
||||||
// 2. pre-defined in-context prompts
|
// 2. pre-defined in-context prompts
|
||||||
// 3. short term memory: latest n messages
|
// 3. short term memory: latest n messages
|
||||||
@ -435,6 +460,7 @@ export const useChatStore = create<ChatStore>()(
|
|||||||
|
|
||||||
// concat all messages
|
// concat all messages
|
||||||
const recentMessages = [
|
const recentMessages = [
|
||||||
|
...systemPrompts,
|
||||||
...longTermMemoryPrompts,
|
...longTermMemoryPrompts,
|
||||||
...contextPrompts,
|
...contextPrompts,
|
||||||
...reversedRecentMessages.reverse(),
|
...reversedRecentMessages.reverse(),
|
||||||
|
@ -177,9 +177,9 @@ export const useAppConfig = create<ChatConfigStore>()(
|
|||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
name: StoreKey.Config,
|
name: StoreKey.Config,
|
||||||
version: 3.1,
|
version: 3.2,
|
||||||
migrate(persistedState, version) {
|
migrate(persistedState, version) {
|
||||||
if (version === 3.1) return persistedState as any;
|
if (version === 3.2) return persistedState as any;
|
||||||
|
|
||||||
const state = persistedState as ChatConfig;
|
const state = persistedState as ChatConfig;
|
||||||
state.modelConfig.sendMemory = true;
|
state.modelConfig.sendMemory = true;
|
||||||
|
@ -25,10 +25,6 @@
|
|||||||
.window-header-sub-title {
|
.window-header-sub-title {
|
||||||
font-size: 14px;
|
font-size: 14px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@media screen and (max-width: 600px) {
|
|
||||||
text-align: center;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.window-actions {
|
.window-actions {
|
||||||
|
Loading…
Reference in New Issue
Block a user