diff --git a/README.md b/README.md index aaf212d4565..68687da7610 100644 --- a/README.md +++ b/README.md @@ -443,6 +443,10 @@ docker run -d -p 3000:3000 \ bash <(curl -s https://raw.githubusercontent.com/Yidadaa/ChatGPT-Next-Web/main/scripts/setup.sh) ``` +### Build locally and push to the remote server and running using Docker + +To run the script, use the deploy-local-build.sh file located in the scripts folder as a reference. Be sure to update the environment variables and server-related information as needed. + ## Synchronizing Chat Records (UpStash) | [简体中文](./docs/synchronise-chat-logs-cn.md) | [English](./docs/synchronise-chat-logs-en.md) | [Italiano](./docs/synchronise-chat-logs-es.md) | [日本語](./docs/synchronise-chat-logs-ja.md) | [한국어](./docs/synchronise-chat-logs-ko.md) diff --git a/app/constant.ts b/app/constant.ts index d789cbaf704..47283fee717 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -527,10 +527,11 @@ const openaiModels = [ "gpt-4-turbo-2024-04-09", "gpt-4-1106-preview", "dall-e-3", - "o1-mini", - "o1-preview", + "o1", + "o1-pro", "o3-mini", "o3", + "o3-pro", "o4-mini", ]; @@ -553,15 +554,11 @@ const googleModels = [ "gemini-2.0-flash-thinking-exp-01-21", "gemini-2.0-pro-exp", "gemini-2.0-pro-exp-02-05", - "gemini-2.5-pro-preview-06-05", + "gemini-2.5-flash", + "gemini-2.5-pro", ]; const anthropicModels = [ - "claude-instant-1.2", - "claude-2.0", - "claude-2.1", - "claude-3-sonnet-20240229", - "claude-3-opus-20240229", "claude-3-opus-latest", "claude-3-haiku-20240307", "claude-3-5-haiku-20241022", @@ -571,6 +568,8 @@ const anthropicModels = [ "claude-3-5-sonnet-latest", "claude-3-7-sonnet-20250219", "claude-3-7-sonnet-latest", + "claude-opus-4-20250514", + "claude-sonnet-4-20250514", ]; const baiduModels = [ diff --git a/scripts/deploy-local-build.sh b/scripts/deploy-local-build.sh new file mode 100755 index 00000000000..5a97293b9cd --- /dev/null +++ b/scripts/deploy-local-build.sh @@ -0,0 +1,53 @@ +# Configuration +SERVER_USER="YOUR_USERNAME" # Replace with your server's username +SERVER_IP="YOUR_SERVER_IP" # Replace with your server's IP address +IMAGE_NAME="nextchat" +TAG="latest" +TAR_FILE="nextchat-image.tar" + +echo "Building NextChat Docker image locally..." + +# Build the Docker image locally for AMD64 platform, change as needed +docker build --platform linux/amd64 -t ${IMAGE_NAME}:${TAG} . + +if [ $? -ne 0 ]; then + echo "Docker build failed!" + exit 1 +fi + +echo "Saving Docker image to tar file..." +# Save the image to a tar file +docker save -o ${TAR_FILE} ${IMAGE_NAME}:${TAG} + +echo "Transferring image to server..." +# Transfer the tar file to server +scp ${TAR_FILE} ${SERVER_USER}@${SERVER_IP}:/tmp/ + +echo "Loading image on server and running container..." +# SSH to server and load the image, then run it, change the environment variables as needed +ssh ${SERVER_USER}@${SERVER_IP} << EOF +# Load the Docker image +docker load -i /tmp/${TAR_FILE} + +# Stop existing container if running +docker stop nextchat 2>/dev/null || true +docker rm nextchat 2>/dev/null || true + +# Run the new container +docker run -d -p 3000:3000 \\ + --name nextchat \\ + -e OPENAI_API_KEY=sk-xxxx \\ + -e CODE=your-password \\ + ${IMAGE_NAME}:${TAG} + +# Clean up the tar file +rm -f /tmp/${TAR_FILE} + +echo "NextChat is now running on port 3000!" +echo "You can access it at: http://${SERVER_IP}:3000" +EOF + +# Clean up local tar file +rm -f ${TAR_FILE} + +echo "Deployment complete!"