From cb62cd241181aebd772514ddbedd5741a979a15c Mon Sep 17 00:00:00 2001 From: Diviner Date: Sat, 5 Jul 2025 18:16:35 +0800 Subject: [PATCH 1/3] Update new models for gemini and claude --- app/constant.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/app/constant.ts b/app/constant.ts index d789cbaf7..99749eee9 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -553,7 +553,7 @@ const googleModels = [ "gemini-2.0-flash-thinking-exp-01-21", "gemini-2.0-pro-exp", "gemini-2.0-pro-exp-02-05", - "gemini-2.5-pro-preview-06-05", + "gemini-2.5-pro", ]; const anthropicModels = [ @@ -571,6 +571,8 @@ const anthropicModels = [ "claude-3-5-sonnet-latest", "claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest", + "claude-opus-4-20250514", + "claude-sonnet-4-20250514", ]; const baiduModels = [ From be24b73e2f5f3fa6fae0613d8006a5c420559437 Mon Sep 17 00:00:00 2001 From: Diviner Date: Sat, 5 Jul 2025 19:52:12 +0800 Subject: [PATCH 2/3] Update OpenAI Gemini Claude modle list --- app/constant.ts | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/app/constant.ts b/app/constant.ts index 99749eee9..47283fee7 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -527,10 +527,11 @@ const openaiModels = [ "gpt-4-turbo-2024-04-09", "gpt-4-1106-preview", "dall-e-3", - "o1-mini", - "o1-preview", + "o1", + "o1-pro", "o3-mini", "o3", + "o3-pro", "o4-mini", ]; @@ -553,15 +554,11 @@ const googleModels = [ "gemini-2.0-flash-thinking-exp-01-21", "gemini-2.0-pro-exp", "gemini-2.0-pro-exp-02-05", + "gemini-2.5-flash", "gemini-2.5-pro", ]; const anthropicModels = [ - "claude-instant-1.2", - "claude-2.0", - "claude-2.1", - "claude-3-sonnet-20240229", - "claude-3-opus-20240229", "claude-3-opus-latest", "claude-3-haiku-20240307", "claude-3-5-haiku-20241022", From 175f04816a6ca54b4382f8c6771a220d50e15f72 Mon Sep 17 00:00:00 2001 From: Diviner Date: Sat, 5 Jul 2025 21:56:24 +0800 Subject: [PATCH 3/3] Upload a new script allowing build locally and push to the remote server and running using Docker --- README.md | 4 +++ scripts/deploy-local-build.sh | 53 +++++++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) create mode 100755 scripts/deploy-local-build.sh diff --git a/README.md b/README.md index aaf212d45..68687da76 100644 --- a/README.md +++ b/README.md @@ -443,6 +443,10 @@ docker run -d -p 3000:3000 \ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh) ``` +### Build locally and push to the remote server and running using Docker + +To run the script, use the deploy-local-build.sh file located in the scripts folder as a reference. Be sure to update the environment variables and server-related information as needed. + ## Synchronizing Chat Records (UpStash) | [简体中文](./docs/synchronise-chat-logs-cn.md) | [English](./docs/synchronise-chat-logs-en.md) | [Italiano](./docs/synchronise-chat-logs-es.md) | [日本語](./docs/synchronise-chat-logs-ja.md) | [한국어](./docs/synchronise-chat-logs-ko.md) diff --git a/scripts/deploy-local-build.sh b/scripts/deploy-local-build.sh new file mode 100755 index 000000000..5a97293b9 --- /dev/null +++ b/scripts/deploy-local-build.sh @@ -0,0 +1,53 @@ +# Configuration +SERVER_USER="YOUR_USERNAME" # Replace with your server's username +SERVER_IP="YOUR_SERVER_IP" # Replace with your server's IP address +IMAGE_NAME="nextchat" +TAG="latest" +TAR_FILE="nextchat-image.tar" + +echo "Building NextChat Docker image locally..." + +# Build the Docker image locally for AMD64 platform, change as needed +docker build --platform linux/amd64 -t ${IMAGE_NAME}:${TAG} . + +if [ $? -ne 0 ]; then + echo "Docker build failed!" + exit 1 +fi + +echo "Saving Docker image to tar file..." +# Save the image to a tar file +docker save -o ${TAR_FILE} ${IMAGE_NAME}:${TAG} + +echo "Transferring image to server..." +# Transfer the tar file to server +scp ${TAR_FILE} ${SERVER_USER}@${SERVER_IP}:/tmp/ + +echo "Loading image on server and running container..." +# SSH to server and load the image, then run it, change the environment variables as needed +ssh ${SERVER_USER}@${SERVER_IP} << EOF +# Load the Docker image +docker load -i /tmp/${TAR_FILE} + +# Stop existing container if running +docker stop nextchat 2>/dev/null || true +docker rm nextchat 2>/dev/null || true + +# Run the new container +docker run -d -p 3000:3000 \\ + --name nextchat \\ + -e OPENAI_API_KEY=sk-xxxx \\ + -e CODE=your-password \\ + ${IMAGE_NAME}:${TAG} + +# Clean up the tar file +rm -f /tmp/${TAR_FILE} + +echo "NextChat is now running on port 3000!" +echo "You can access it at: http://${SERVER_IP}:3000" +EOF + +# Clean up local tar file +rm -f ${TAR_FILE} + +echo "Deployment complete!"