Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/Cloud-CV/EvalAI
Browse files Browse the repository at this point in the history
  • Loading branch information
Suryansh5545 committed Oct 3, 2023
2 parents c2a4206 + 5cedd72 commit 3b6b31f
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 17 deletions.
12 changes: 6 additions & 6 deletions apps/challenges/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -3506,9 +3506,9 @@ def create_ec2_instance_by_challenge_pk(request, challenge_pk):
Response object -- Response object with appropriate response code (200/400/403/404)
"""
if request.method == "PUT":
ec2_storage = request.data.get("ec2_storage")
worker_instance_type = request.data.get("worker_instance_type")
worker_image_url = request.data.get("worker_image_url")
ec2_storage = request.data.get("ec2_storage", None)
worker_instance_type = request.data.get("worker_instance_type", None)
worker_image_url = request.data.get("worker_image_url", None)
if not request.user.is_staff:
response_data = {
"error": "Sorry, you are not authorized for access EC2 operations."
Expand All @@ -3523,19 +3523,19 @@ def create_ec2_instance_by_challenge_pk(request, challenge_pk):
}
return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)

if not isinstance(ec2_storage, int):
if ec2_storage and not isinstance(ec2_storage, int):
response_data = {
"error": "Passed value of EC2 storage should be integer."
}
return Response(response_data, status=status.HTTP_400_BAD_REQUEST)

if not isinstance(worker_instance_type, str):
if worker_instance_type and not isinstance(worker_instance_type, str):
response_data = {
"error": "Passed value of worker instance type should be string."
}
return Response(response_data, status=status.HTTP_400_BAD_REQUEST)

if not isinstance(worker_image_url, str):
if worker_image_url and not isinstance(worker_image_url, str):
response_data = {
"error": "Passed value of worker image URL should be string."
}
Expand Down
23 changes: 23 additions & 0 deletions scripts/deployment/delete_old_submissions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import os
import shutil
import time

# Define the directory path
tmp_directory = '/tmp'

# Calculate the cutoff time (60 seconds ago)
cutoff_time = time.time() - 60 # 60 seconds

# List all directories in /tmp starting with 'tmp'
tmp_folders = [d for d in os.listdir(tmp_directory) if os.path.isdir(os.path.join(tmp_directory, d)) and d.startswith('tmp')]

for folder in tmp_folders:
folder_path = os.path.join(tmp_directory, folder)

# Check if the folder was created more than 60 seconds ago
folder_creation_time = os.path.getctime(folder_path)
if folder_creation_time < cutoff_time:
shutil.rmtree(folder_path)
print(f"Deleted folder: {folder_path}")
else:
print(f"Skipped folder: {folder_path} (created less than 60 seconds ago)")
28 changes: 17 additions & 11 deletions scripts/deployment/deploy_ec2_worker.sh
Original file line number Diff line number Diff line change
@@ -1,32 +1,32 @@
#!/bin/bash

# Step 1: Updating package repository
echo "Step 1/10: Updating package repository"
echo "Step 1/11: Updating package repository"
sudo apt-get update

# Step 2: Cloning EvalAI repository at /home/ubuntu
echo "Step 2/10: Cloning EvalAI repository"
echo "Step 2/11: Cloning EvalAI repository"
cd /home/ubuntu
git clone https://github.com/Cloud-CV/EvalAI.git
cd EvalAI

# Step 3: Installing awscli
echo "Step 3/10: Installing awscli"
echo "Step 3/11: Installing awscli"
sudo apt install awscli -y

# Step 4: Installing docker-compose
echo "Step 4/10: Installing docker-compose"
echo "Step 4/11: Installing docker-compose"
sudo apt install docker-compose -y
sudo groupadd docker

# Step 5: Adding user to docker group
echo "Step 5/10: Adding user to docker group"
echo "Step 5/11: Adding user to docker group"
sudo apt -V install gnupg2 pass -y
sudo usermod -aG docker $USER
newgrp docker

# Step 6: Configuring AWS credentials
echo "Step 6/10: Configuring AWS credentials"
echo "Step 6/11: Configuring AWS credentials"
aws configure set aws_access_key_id ${AWS_ACCESS_KEY_ID}
aws configure set aws_secret_access_key ${AWS_SECRET_ACCESS_KEY}
aws configure set default.region ${AWS_REGION}
Expand All @@ -37,22 +37,23 @@ export TRAVIS_BRANCH=${ENVIRONMENT}
eval $(aws ecr get-login --no-include-email)

# Step 7: Copying Docker environment file
echo "Step 7/10: Copying Docker environment file"
echo "Step 7/11: Copying Docker environment file"
aws s3 cp s3://cloudcv-secrets/evalai/${ENVIRONMENT}/docker_${ENVIRONMENT}.env ./docker/prod/docker_${ENVIRONMENT}.env

if [ "${CUSTOM_WORKER_IMAGE}" = "" ];
then
# Step 8: Pulling worker Docker image
echo "Step 8/10: Pulling worker Docker image"
echo "Step 8/11: Pulling worker Docker image"
docker-compose -f docker-compose-${ENVIRONMENT}.yml pull worker
else
# if using custom image from worker_image_url
echo "Step 8/11: Pulling worker Docker image"
echo "Using custom worker image: ${CUSTOM_WORKER_IMAGE}"
docker pull ${CUSTOM_WORKER_IMAGE}
fi

# Step 9: Running worker Docker container
echo "Step 9/10: Running worker Docker container"
echo "Step 9/11: Running worker Docker container"
if [ "${CUSTOM_WORKER_IMAGE}" = "" ];
then
# If using default image from Step 8
Expand All @@ -62,9 +63,14 @@ else
docker run --name=worker_${QUEUE} -e CHALLENGE_QUEUE=${QUEUE} -e CHALLENGE_PK=${PK} -d ${CUSTOM_WORKER_IMAGE}
fi

# Step 10: Setting up crontab
echo "Step 10/10: Setting up crontab"
# Step 10: Add submission clearing script
echo "Step 10/11: Add submission clearing script to docker container"
docker cp "scripts/deployment/delete_old_submissions.py" "worker_${QUEUE}:/code/delete_old_submissions.py"

# Step 11: Setting up crontab
echo "Step 11/11: Setting up crontab"
echo "@reboot docker restart worker_${QUEUE}" >> workercron
echo "@reboot docker exec -it worker_${QUEUE} python delete_old_submissions.py" >> workercron
crontab workercron
rm workercron

0 comments on commit 3b6b31f

Please sign in to comment.