-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpgsql_csv_export_to_s3.sh
72 lines (58 loc) · 2.31 KB
/
pgsql_csv_export_to_s3.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
#!/bin/bash
# Constants
AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY
AWS_DEFAULT_REGION=$AWS_DEFAULT_REGION
AWS_S3_BUCKET=$AWS_S3_BUCKET
AWS_CLI_URL="https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip"
EXPORT_DIR="/app/.exports"
TMP_DOWNLOAD_DIR="/tmp"
INSTALL_DIR="/app/.awscli"
# Ensure Exports directory exists
mkdir -p $EXPORT_DIR
# Function to check if a command exists
command_exists () {
type "$1" &> /dev/null ;
}
# Install psql if it's not installed
if ! command_exists psql ; then
echo "psql not found, installing..."
sudo apt-get update
sudo apt-get install postgresql-client -y
fi
# Install AWS CLI if it's not installed
if ! command_exists aws ; then
echo "AWS CLI not found, installing..."
mkdir -p "$TMP_DOWNLOAD_DIR"
curl --silent --show-error -o "$TMP_DOWNLOAD_DIR/awscliv2.zip" "$AWS_CLI_URL"
unzip -qq -d "$INSTALL_DIR" "$TMP_DOWNLOAD_DIR/awscliv2.zip"
rm "$TMP_DOWNLOAD_DIR/awscliv2.zip"
"$INSTALL_DIR/aws/install" -i "$INSTALL_DIR" -b "$INSTALL_DIR"/bin --update
export PATH="$INSTALL_DIR/bin:$PATH"
aws configure set aws_access_key_id $AWS_ACCESS_KEY_ID
aws configure set aws_secret_access_key $AWS_SECRET_ACCESS_KEY
aws configure set default.region $AWS_DEFAULT_REGION
aws --version
fi
# Export tables to CSV
# echo "Exporting tables to CSV..."
# declare -a tables=("users" "companies" "products" "plans" "contacts" "calls" "text_messages")
# for TABLE_NAME in "${tables[@]}"; do
# psql $DATABASE_URL -c "\copy (SELECT * FROM ${TABLE_NAME}) TO '${EXPORT_DIR}/${TABLE_NAME}.csv' WITH CSV HEADER"
# gzip "${EXPORT_DIR}/${TABLE_NAME}.csv"
# done
# Variant to export everything except somes tables
echo "Exporting and compressing tables to CSV..."
declare -a exclude_tables=("ar_metdata")
tables=$(psql $DATABASE_URL -t -c "SELECT tablename FROM pg_tables WHERE schemaname = 'public'" | grep -vFf <(printf "%s\n" "${exclude_tables[@]}"))
for TABLE_NAME in $tables; do
psql $DATABASE_URL -c "\copy (SELECT * FROM ${TABLE_NAME}) TO '${EXPORT_DIR}/${TABLE_NAME}.csv' WITH CSV HEADER"
gzip "${EXPORT_DIR}/${TABLE_NAME}.csv"
done
# Upload to S3
echo "Uploading CSV files to S3..."
aws s3 cp $EXPORT_DIR s3://$AWS_S3_BUCKET --recursive
# Cleanup local files if you don't need to keep them
echo "Cleaning up local files..."
rm -rf $EXPORT_DIR
echo "Done. 🍻"