Merge pull request #86 from sijinhui/dev

Dev
This commit is contained in:
sijinhui 2024-05-07 16:47:21 +08:00 committed by GitHub
commit 8fd87dd383
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 325 additions and 80 deletions

View File

@ -54,7 +54,7 @@ typings
.env*.local .env*.local
# Next.js build output # Next.js build output
.next .next/
# out # out
# Nuxt.js build output # Nuxt.js build output

View File

@ -8,9 +8,8 @@ on:
jobs: jobs:
build: build:
name: build test image to aly name: build test image to aly
# runs-on: thinkpad runs-on: ubuntu-latest
runs-on: self #runs-on: self
# runs-on: self-hosted
steps: steps:
- name: Check out the repo - name: Check out the repo
uses: actions/checkout@v4 uses: actions/checkout@v4
@ -23,7 +22,7 @@ jobs:
echo ${{ secrets.ALY_DOCKER_PASSWORD }} | docker login registry.cn-hangzhou.aliyuncs.com -u ${{ secrets.ALY_DOCKER_USERNAME }} --password-stdin echo ${{ secrets.ALY_DOCKER_PASSWORD }} | docker login registry.cn-hangzhou.aliyuncs.com -u ${{ secrets.ALY_DOCKER_USERNAME }} --password-stdin
echo "${{ secrets.DOCKER_ENV }}" > .env echo "${{ secrets.DOCKER_ENV }}" > .env
echo "COMPOSE_PROJECT_NAME=test-chatgpt-web" >> .env echo "COMPOSE_PROJECT_NAME=test-chatgpt-web" >> .env
bash ./start.sh #bash ./start.sh
# 替换测试镜像 # 替换测试镜像
sed -i 's@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web:test@g' docker-compose.yml sed -i 's@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web:test@g' docker-compose.yml
sed -i 's|23000:|23001:|g' docker-compose.yml sed -i 's|23000:|23001:|g' docker-compose.yml

View File

@ -0,0 +1,109 @@
name: DEV DEPLOY TO TX
on:
workflow_dispatch:
# push:
# branches:
# - dev
jobs:
build:
name: build test image to aly
# runs-on: thinkpad
runs-on: self
# runs-on: self-hosted
steps:
- name: Check out the repo
uses: actions/checkout@v4
with:
ref: 'dev'
clean: false
github-server-url: 'https://gh.siji.ci'
- name: build and deploy to Docker Hub
run: |
echo ${{ secrets.ALY_DOCKER_PASSWORD }} | docker login registry.cn-hangzhou.aliyuncs.com -u ${{ secrets.ALY_DOCKER_USERNAME }} --password-stdin
echo "${{ secrets.DOCKER_ENV }}" > .env
echo "COMPOSE_PROJECT_NAME=test-chatgpt-web" >> .env
bash ./start.sh
# 替换测试镜像
sed -i 's@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web:test@g' docker-compose.yml
sed -i 's|23000:|23001:|g' docker-compose.yml
docker-compose build
docker-compose push
yes | docker system prune --filter "until=168h"
deploy:
name: 部署到dev服务器
runs-on: self
needs: build
steps:
- name: Check out the repo
uses: actions/checkout@v4
with:
clean: true
ref: 'dev'
github-server-url: 'https://gh.siji.ci'
- name: Set up SSH key
uses: webfactory/ssh-agent@v0.9.0
with:
ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }}
- name: Sync repository to tx
run: |
yes | docker image prune
ssh -o StrictHostKeyChecking=no -p${{ secrets.SSH_PORT }} root@${{ secrets.TX_SSH_IP }} 'mkdir -p /data/test/ChatGPT-Next-Web'
rsync -az -e 'ssh -o StrictHostKeyChecking=no -p${{ secrets.SSH_PORT }}' --delete $GITHUB_WORKSPACE/ root@tx.xiaosi.cc:/data/test/ChatGPT-Next-Web
- name: deploy-to-tx
uses: appleboy/ssh-action@master
env:
SERVER_WORKDIR: ${{ secrets.SERVER_WORKDIR }} #传递工作目录变量
with:
host: ${{ secrets.TX_SSH_IP }} #服务器地址
username: root #用户名
port: ${{ secrets.SSH_PORT }}
key: ${{ secrets.SSH_PRIVATE_KEY }} #私钥 安全问题一定都以变量的方式传递!!!
envs: SERVER_WORKDIR,ALY_DOCKER_PASSWORD,ALY_DOCKER_USERNAME,DOCKER_ENV #使用工作目录变量
script: |
cd /data/test/ChatGPT-Next-Web
echo "${{ secrets.DOCKER_ENV }}" > .env
# 测试分支,
echo "COMPOSE_PROJECT_NAME=test-chatgpt-web" >> .env
sed -i 's@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web:test@g' docker-compose.yml
echo ${{ secrets.ALY_DOCKER_PASSWORD }} | docker login registry.cn-hangzhou.aliyuncs.com -u ${{ secrets.ALY_DOCKER_USERNAME }} --password-stdin
sed -i 's|23000:|23001:|g' docker-compose.yml
sed -i 's|container_name:|#container_name:|g' docker-compose.yml
docker network ls | grep -qw chatgpt-ns || docker network create chatgpt-ns
docker-compose pull && docker-compose up -d
yes | docker image prune
rm -rf /www/server/nginx/proxy_cache_dir/* || true
rm -rf /www/server/nginx/proxy_temp_dir/* || true
- name: Sync repository to xy-nm
run: |
yes | docker image prune
ssh -o StrictHostKeyChecking=no -p${{ secrets.SSH_PORT }} root@${{ secrets.NM_SSH_IP }} 'mkdir -p /data/test/ChatGPT-Next-Web'
rsync -az -e 'ssh -o StrictHostKeyChecking=no -p${{ secrets.SSH_PORT }}' --delete $GITHUB_WORKSPACE/ root@xy-nm.xiaosi.cc:/data/test/ChatGPT-Next-Web
- name: deploy-to-xy-nm
uses: appleboy/ssh-action@master
env:
SERVER_WORKDIR: ${{ secrets.SERVER_WORKDIR }} #传递工作目录变量
with:
host: ${{ secrets.NM_SSH_IP }} #服务器地址
username: root #用户名
port: ${{ secrets.SSH_PORT }}
key: ${{ secrets.SSH_PRIVATE_KEY }} #私钥 安全问题一定都以变量的方式传递!!!
envs: SERVER_WORKDIR,ALY_DOCKER_PASSWORD,ALY_DOCKER_USERNAME,DOCKER_ENV #使用工作目录变量
script: |
cd /data/test/ChatGPT-Next-Web
echo "${{ secrets.DOCKER_ENV }}" > .env
# 测试分支,
echo "COMPOSE_PROJECT_NAME=test-chatgpt-web" >> .env
sed -i 's@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web@image: registry.cn-hangzhou.aliyuncs.com/si-private/chatgpt-next-web:test@g' docker-compose.yml
echo ${{ secrets.ALY_DOCKER_PASSWORD }} | docker login registry.cn-hangzhou.aliyuncs.com -u ${{ secrets.ALY_DOCKER_USERNAME }} --password-stdin
sed -i 's|23000:|23001:|g' docker-compose.yml
sed -i 's|container_name:|#container_name:|g' docker-compose.yml
docker network ls | grep -qw chatgpt-ns || docker network create chatgpt-ns
docker-compose pull && docker-compose up -d
yes | docker image prune
rm -rf /www/server/nginx/proxy_cache_dir/* || true
rm -rf /www/server/nginx/proxy_temp_dir/* || true

View File

@ -1,59 +1,66 @@
#FROM registry.cn-hangzhou.aliyuncs.com/sijinhui/node:18-alpine AS base #FROM registry.cn-hangzhou.aliyuncs.com/sijinhui/node:18-alpine AS base
FROM hub.siji.ci/library/node:20-alpine AS base FROM hub.siji.ci/library/node:22.1-alpine AS base
RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.ustc.edu.cn/g' /etc/apk/repositories RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.ustc.edu.cn/g' /etc/apk/repositories
RUN apk update && apk add --no-cache git tzdata RUN apk update && apk add --no-cache git tzdata
RUN apk add --no-cache \
vips-dev \
fftw-dev \
glib-dev \
glib \
expat-dev
# 设置时区环境变量 # 设置时区环境变量
ENV TZ=Asia/Chongqing ENV TZ=Asia/Chongqing
# 更新并安装时区工具 # 更新并安装时区工具
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
#FROM base AS deps FROM base AS deps
#RUN apk add --no-cache libc6-compat g++ make RUN apk add --no-cache libc6-compat g++ make
#
#WORKDIR /app WORKDIR /app
#
#COPY package.json ./ COPY package.json ./
#
#RUN yarn config set registry 'https://registry.npmmirror.com/' #RUN yarn config set registry 'https://registry.npmmirror.com/'
#RUN yarn config set sharp_binary_host "https://npm.taobao.org/mirrors/sharp" #RUN yarn config set sharp_binary_host "https://npm.taobao.org/mirrors/sharp"
#RUN yarn config set sharp_libvips_binary_host "https://npm.taobao.org/mirrors/sharp-libvips" #RUN yarn config set sharp_libvips_binary_host "https://npm.taobao.org/mirrors/sharp-libvips"
#RUN # 清理遗留的缓存 #RUN # 清理遗留的缓存
#RUN yarn cache clean RUN yarn cache clean
#RUN yarn install RUN yarn install
#
## 避免下面那个报错 FROM base AS builder
#RUN mkdir -p "/app/node_modules/tiktoken"
# ENV OPENAI_API_KEY=""
#FROM base AS builder ENV GOOGLE_API_KEY=""
# ENV CODE=""
#ENV OPENAI_API_KEY=""
#ENV GOOGLE_API_KEY="" WORKDIR /app
#ENV CODE="" COPY . .
# COPY --from=deps /app/node_modules ./node_modules
#WORKDIR /app # 避免下面那个报错
#COPY . . RUN mkdir -p "/app/node_modules/tiktoken"
#COPY --from=deps /app/node_modules ./node_modules RUN mkdir -p "/app/node_modules/sharp"
#
#RUN yarn build RUN yarn build
#
FROM base AS runner FROM base AS runner
WORKDIR /app WORKDIR /app
#
#RUN apk add proxychains-ng RUN apk add proxychains-ng
#
#ENV PROXY_URL="" ENV PROXY_URL=""
#ENV OPENAI_API_KEY="" ENV OPENAI_API_KEY=""
#ENV GOOGLE_API_KEY="" ENV GOOGLE_API_KEY=""
#ENV CODE="" ENV CODE=""
#
#COPY --from=builder /app/public ./public COPY --from=builder /app/public ./public
#COPY --from=builder /app/.next/standalone ./ COPY --from=builder /app/.next/standalone ./
#COPY --from=builder /app/.next/static ./.next/static COPY --from=builder /app/.next/static ./.next/static
#COPY --from=builder /app/.next/server ./.next/server COPY --from=builder /app/.next/server ./.next/server
#
## 一个插件一直有问题。 # 一个插件一直有问题。
#COPY --from=deps /app/node_modules/tiktoken ./node_modules/tiktoken COPY --from=builder /app/node_modules/tiktoken ./node_modules/tiktoken
COPY out/ . COPY --from=builder /app/node_modules/sharp ./node_modules/sharp
#COPY out/ .
RUN rm -f .env RUN rm -f .env

83
Dockerfile.linux-build Normal file
View File

@ -0,0 +1,83 @@
#FROM registry.cn-hangzhou.aliyuncs.com/sijinhui/node:18-alpine AS base
FROM hub.siji.ci/library/node:20-alpine AS base
RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.ustc.edu.cn/g' /etc/apk/repositories
RUN apk update && apk add --no-cache git tzdata
# 设置时区环境变量
ENV TZ=Asia/Chongqing
# 更新并安装时区工具
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
#FROM base AS deps
#RUN apk add --no-cache libc6-compat g++ make
#
#WORKDIR /app
#
#COPY package.json ./
#
#RUN yarn config set registry 'https://registry.npmmirror.com/'
#RUN yarn config set sharp_binary_host "https://npm.taobao.org/mirrors/sharp"
#RUN yarn config set sharp_libvips_binary_host "https://npm.taobao.org/mirrors/sharp-libvips"
#RUN # 清理遗留的缓存
#RUN yarn cache clean
#RUN yarn install
#
## 避免下面那个报错
#RUN mkdir -p "/app/node_modules/tiktoken"
#
#FROM base AS builder
#
#ENV OPENAI_API_KEY=""
#ENV GOOGLE_API_KEY=""
#ENV CODE=""
#
#WORKDIR /app
#COPY . .
#COPY --from=deps /app/node_modules ./node_modules
#
#RUN yarn build
#
FROM base AS runner
WORKDIR /app
#
#RUN apk add proxychains-ng
#
#ENV PROXY_URL=""
#ENV OPENAI_API_KEY=""
#ENV GOOGLE_API_KEY=""
#ENV CODE=""
#
#COPY --from=builder /app/public ./public
#COPY --from=builder /app/.next/standalone ./
#COPY --from=builder /app/.next/static ./.next/static
#COPY --from=builder /app/.next/server ./.next/server
#
## 一个插件一直有问题。
#COPY --from=deps /app/node_modules/tiktoken ./node_modules/tiktoken
COPY out/ .
RUN rm -f .env
EXPOSE 3000
ENV KEEP_ALIVE_TIMEOUT=30
ENV HOSTNAME=""
CMD if [ -n "$PROXY_URL" ]; then \
export HOSTNAME="127.0.0.1"; \
protocol=$(echo $PROXY_URL | cut -d: -f1); \
host=$(echo $PROXY_URL | cut -d/ -f3 | cut -d: -f1); \
port=$(echo $PROXY_URL | cut -d: -f3); \
conf=/etc/proxychains.conf; \
echo "strict_chain" > $conf; \
echo "proxy_dns" >> $conf; \
echo "remote_dns_subnet 224" >> $conf; \
echo "tcp_read_time_out 15000" >> $conf; \
echo "tcp_connect_time_out 8000" >> $conf; \
echo "localnet 127.0.0.0/255.0.0.0" >> $conf; \
echo "localnet ::1/128" >> $conf; \
echo "[ProxyList]" >> $conf; \
echo "$protocol $host $port" >> $conf; \
cat /etc/proxychains.conf; \
proxychains -f $conf node server.js; \
else \
node server.js; \
fi

View File

@ -1,12 +1,12 @@
import { NextRequest, NextResponse } from "next/server"; import { NextRequest, NextResponse } from "next/server";
import { STORAGE_KEY, internalWhiteWebDavEndpoints } from "../../../constant"; import { STORAGE_KEY, internalAllowedWebDavEndpoints } from "../../../constant";
import { getServerSideConfig } from "@/app/config/server"; import { getServerSideConfig } from "@/app/config/server";
const config = getServerSideConfig(); const config = getServerSideConfig();
const mergedWhiteWebDavEndpoints = [ const mergedAllowedWebDavEndpoints = [
...internalWhiteWebDavEndpoints, ...internalAllowedWebDavEndpoints,
...config.whiteWebDevEndpoints, ...config.allowedWebDevEndpoints,
].filter((domain) => Boolean(domain.trim())); ].filter((domain) => Boolean(domain.trim()));
async function handle( async function handle(
@ -24,7 +24,9 @@ async function handle(
// Validate the endpoint to prevent potential SSRF attacks // Validate the endpoint to prevent potential SSRF attacks
if ( if (
!mergedWhiteWebDavEndpoints.some((white) => endpoint?.startsWith(white)) !mergedAllowedWebDavEndpoints.some((allowedEndpoint) =>
endpoint?.startsWith(allowedEndpoint),
)
) { ) {
return NextResponse.json( return NextResponse.json(
{ {

View File

@ -161,6 +161,13 @@ export class ClaudeApi implements LLMApi {
}; };
}); });
if (prompt[0]?.role === "assistant") {
prompt.unshift({
role: "user",
content: ";",
});
}
const requestBody: AnthropicChatRequest = { const requestBody: AnthropicChatRequest = {
messages: prompt, messages: prompt,
stream: shouldStream, stream: shouldStream,

View File

@ -1145,6 +1145,7 @@ function _Chat() {
if (payload.url) { if (payload.url) {
accessStore.update((access) => (access.openaiUrl = payload.url!)); accessStore.update((access) => (access.openaiUrl = payload.url!));
} }
accessStore.update((access) => (access.useCustomConfig = true));
}); });
} }
} catch { } catch {

View File

@ -53,6 +53,22 @@ const ACCESS_CODES = (function getAccessCodes(): Set<string> {
} }
})(); })();
function getApiKey(keys?: string) {
const apiKeyEnvVar = keys ?? "";
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
const randomIndex = Math.floor(Math.random() * apiKeys.length);
const apiKey = apiKeys[randomIndex];
if (apiKey) {
// console.log(
// `[Server Config] using ${randomIndex + 1} of ${
// apiKeys.length
// } api key - ${apiKey}`,
// );
}
return apiKey;
}
export const getServerSideConfig = () => { export const getServerSideConfig = () => {
if (typeof process === "undefined") { if (typeof process === "undefined") {
throw Error( throw Error(
@ -80,36 +96,35 @@ export const getServerSideConfig = () => {
// const isAzure = !!process.env.AZURE_URL; // const isAzure = !!process.env.AZURE_URL;
const hasAzure = !!process.env.AZURE_URL; const hasAzure = !!process.env.AZURE_URL;
const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? ""; // const apiKeyEnvVar = process.env.OPENAI_API_KEY ?? "";
const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim()); // const apiKeys = apiKeyEnvVar.split(",").map((v) => v.trim());
const randomIndex = Math.floor(Math.random() * apiKeys.length); // const randomIndex = Math.floor(Math.random() * apiKeys.length);
const apiKey = apiKeys[randomIndex]; // const apiKey = apiKeys[randomIndex];
// console.log( // console.log(
// `[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`, // `[Server Config] using ${randomIndex + 1} of ${apiKeys.length} api key`,
// ); // );
const whiteWebDevEndpoints = (process.env.WHITE_WEBDEV_ENDPOINTS ?? "").split( const allowedWebDevEndpoints = (
",", process.env.WHITE_WEBDEV_ENDPOINTS ?? ""
); ).split(",");
return { return {
baseUrl: process.env.BASE_URL, baseUrl: process.env.BASE_URL,
apiKey, apiKey: getApiKey(process.env.OPENAI_API_KEY),
openaiOrgId: process.env.OPENAI_ORG_ID, openaiOrgId: process.env.OPENAI_ORG_ID,
// isAzure,
// hasAzure,
azureUrl: process.env.AZURE_URL ?? "", azureUrl: process.env.AZURE_URL ?? "",
azureApiKey: process.env.AZURE_API_KEY ?? "", azureApiKey: getApiKey(process.env.AZURE_API_KEY) ?? "",
azureApiVersion: process.env.AZURE_API_VERSION ?? "", azureApiVersion: process.env.AZURE_API_VERSION ?? "",
azureVoiceKey: process.env.AZURE_VOICE_KEY ?? "", azureVoiceKey: process.env.AZURE_VOICE_KEY ?? "",
isGoogle, isGoogle,
googleApiKey: process.env.GOOGLE_API_KEY, googleApiKey: getApiKey(process.env.GOOGLE_API_KEY),
googleUrl: process.env.GOOGLE_URL, googleUrl: process.env.GOOGLE_URL,
isAnthropic, isAnthropic,
anthropicApiKey: process.env.ANTHROPIC_API_KEY, anthropicApiKey: getApiKey(process.env.ANTHROPIC_API_KEY),
anthropicApiVersion: process.env.ANTHROPIC_API_VERSION, anthropicApiVersion: process.env.ANTHROPIC_API_VERSION,
anthropicUrl: process.env.ANTHROPIC_URL, anthropicUrl: process.env.ANTHROPIC_URL,
@ -128,6 +143,6 @@ export const getServerSideConfig = () => {
disableFastLink: !!process.env.DISABLE_FAST_LINK, disableFastLink: !!process.env.DISABLE_FAST_LINK,
customModels, customModels,
defaultModel, defaultModel,
whiteWebDevEndpoints, allowedWebDevEndpoints,
}; };
}; };

View File

@ -368,7 +368,7 @@ export const CHAT_PAGE_SIZE = 15;
export const MAX_RENDER_MSG_COUNT = 45; export const MAX_RENDER_MSG_COUNT = 45;
// some famous webdav endpoints // some famous webdav endpoints
export const internalWhiteWebDavEndpoints = [ export const internalAllowedWebDavEndpoints = [
"https://dav.jianguoyun.com/dav/", "https://dav.jianguoyun.com/dav/",
"https://dav.dropdav.com/", "https://dav.dropdav.com/",
"https://dav.box.com/dav", "https://dav.box.com/dav",

View File

@ -27,6 +27,8 @@ import { estimateTokenLength } from "../utils/token";
import { nanoid } from "nanoid"; import { nanoid } from "nanoid";
import { createPersistStore } from "../utils/store"; import { createPersistStore } from "../utils/store";
import { identifyDefaultClaudeModel } from "../utils/checkers"; import { identifyDefaultClaudeModel } from "../utils/checkers";
import { collectModelsWithDefaultModel } from "../utils/model";
import { useAccessStore } from "./access";
export type ChatMessage = RequestMessage & { export type ChatMessage = RequestMessage & {
date: string; date: string;
@ -96,9 +98,19 @@ const ChatFetchTaskPool: Record<string, any> = {};
function getSummarizeModel(currentModel: string) { function getSummarizeModel(currentModel: string) {
// if it is using gpt-* models, force to use 3.5 to summarize // if it is using gpt-* models, force to use 3.5 to summarize
if (currentModel.startsWith("gpt")) { if (currentModel.startsWith("gpt")) {
return SUMMARIZE_MODEL; const configStore = useAppConfig.getState();
const accessStore = useAccessStore.getState();
const allModel = collectModelsWithDefaultModel(
configStore.models,
[configStore.customModels, accessStore.customModels].join(","),
accessStore.defaultModel,
);
const summarizeModel = allModel.find(
(m) => m.name === SUMMARIZE_MODEL && m.available,
);
return summarizeModel?.name ?? currentModel;
} }
if (currentModel.startsWith("gemini-pro")) { if (currentModel.startsWith("gemini")) {
return GEMINI_SUMMARIZE_MODEL; return GEMINI_SUMMARIZE_MODEL;
} }
return currentModel; return currentModel;

View File

@ -66,14 +66,13 @@ export function collectModelTableWithDefaultModel(
) { ) {
let modelTable = collectModelTable(models, customModels); let modelTable = collectModelTable(models, customModels);
if (defaultModel && defaultModel !== "") { if (defaultModel && defaultModel !== "") {
delete modelTable[defaultModel];
modelTable[defaultModel] = { modelTable[defaultModel] = {
...modelTable[defaultModel],
name: defaultModel, name: defaultModel,
displayName: defaultModel,
available: true, available: true,
describe: "默认模型", describe: "默认模型",
provider: // provider:
modelTable[defaultModel]?.provider ?? customProvider(defaultModel), // modelTable[defaultModel]?.provider ?? customProvider(defaultModel),
isDefault: true, isDefault: true,
}; };
} }

View File

@ -1,4 +1,12 @@
import webpack from "webpack"; import webpack from "webpack";
// debug build
// import { createRequire } from 'module';
// const require = createRequire(import.meta.url);
// const withBundleAnalyzer = require('@next/bundle-analyzer')({
// enabled: process.env.ANALYZE === 'true',
// });
// import CssMinimizerPlugin from "css-minimizer-webpack-plugin"; // import CssMinimizerPlugin from "css-minimizer-webpack-plugin";
const mode = process.env.BUILD_MODE ?? "standalone"; const mode = process.env.BUILD_MODE ?? "standalone";
console.log("[Next] build mode", mode); console.log("[Next] build mode", mode);
@ -81,6 +89,9 @@ const nextConfig = {
experimental: { experimental: {
forceSwcTransforms: true, forceSwcTransforms: true,
}, },
// externals: {
// 'sharp': 'commonjs sharp'
// },
swcMinify: true, swcMinify: true,
}; };

View File

@ -20,8 +20,8 @@
"@fortaine/fetch-event-source": "^3.0.6", "@fortaine/fetch-event-source": "^3.0.6",
"@hello-pangea/dnd": "^16.5.0", "@hello-pangea/dnd": "^16.5.0",
"@next-auth/prisma-adapter": "^1.0.7", "@next-auth/prisma-adapter": "^1.0.7",
"@next/third-parties": "^14.2.1", "@next/third-parties": "14.2.3",
"@prisma/client": "5.13.0", "@prisma/client": "^5.13.0",
"@svgr/webpack": "^8.1.0", "@svgr/webpack": "^8.1.0",
"@vercel/analytics": "^1.1.2", "@vercel/analytics": "^1.1.2",
"@vercel/speed-insights": "^1.0.9", "@vercel/speed-insights": "^1.0.9",
@ -35,7 +35,7 @@
"mermaid": "^10.7.0", "mermaid": "^10.7.0",
"microsoft-cognitiveservices-speech-sdk": "^1.36.0", "microsoft-cognitiveservices-speech-sdk": "^1.36.0",
"nanoid": "^5.0.3", "nanoid": "^5.0.3",
"next": "^14.2.1", "next": "14.2.3",
"next-auth": "^4.24.7", "next-auth": "^4.24.7",
"node-fetch": "^3.3.1", "node-fetch": "^3.3.1",
"nodemailer": "^6.9.13", "nodemailer": "^6.9.13",
@ -53,7 +53,7 @@
"sharp": "^0.33.3", "sharp": "^0.33.3",
"spark-md5": "^3.0.2", "spark-md5": "^3.0.2",
"tailwind-merge": "^2.2.1", "tailwind-merge": "^2.2.1",
"tiktoken": "^1.0.13", "tiktoken": "1.0.14",
"tiny-pinyin": "^1.3.2", "tiny-pinyin": "^1.3.2",
"use-debounce": "^10.0.0", "use-debounce": "^10.0.0",
"zustand": "^4.5.0" "zustand": "^4.5.0"
@ -62,8 +62,8 @@
"@tauri-apps/cli": "1.5.11", "@tauri-apps/cli": "1.5.11",
"@types/bcryptjs": "^2.4.6", "@types/bcryptjs": "^2.4.6",
"@types/cookie": "^0.6.0", "@types/cookie": "^0.6.0",
"@types/node": "^20.11.30", "@types/node": "20.12.8",
"@types/nodemailer": "^6.4.14", "@types/nodemailer": "6.4.15",
"@types/react": "^18.2.70", "@types/react": "^18.2.70",
"@types/react-dom": "^18.2.7", "@types/react-dom": "^18.2.7",
"@types/react-highlight-words": "^0.16.7", "@types/react-highlight-words": "^0.16.7",
@ -73,7 +73,7 @@
"cross-env": "^7.0.3", "cross-env": "^7.0.3",
"date-fns": "^3.6.0", "date-fns": "^3.6.0",
"eslint": "^8.55.0", "eslint": "^8.55.0",
"eslint-config-next": "^14.2.1", "eslint-config-next": "^14.2.3",
"eslint-config-prettier": "^9.1.0", "eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.1.3", "eslint-plugin-prettier": "^5.1.3",
"husky": "^9.0.7", "husky": "^9.0.7",