Skip to content

Commit 32ca122

Browse files
authored
Merge pull request #122 from ursais/max
[IMP] backup: Support different platform for filestore, backup and remote.
2 parents 762dfa7 + 052a26a commit 32ca122

File tree

2 files changed

+159
-37
lines changed

2 files changed

+159
-37
lines changed

backup/entrypoint.sh

+91-27
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ set -e
77

88
# Environment variables
99
: ${RUNNING_ENV:='dev'}
10-
: ${PLATFORM:='do'}
1110
# PostgreSQL
1211
: ${PGHOST:='localhost'}
1312
: ${PGPORT:=5432}
@@ -16,18 +15,45 @@ set -e
1615
: ${PGPASSWORD:='postgres'}
1716
: ${PGDEFAULTDB:='postgres'}
1817
: ${PGSSLMODE:='prefer'}
19-
# AWS / DO
20-
: ${AWS_HOST:='false'}
21-
: ${AWS_REGION:='false'}
22-
: ${AWS_ACCESS_KEY_ID:='false'}
23-
: ${AWS_SECRET_ACCESS_KEY:='false'}
24-
: ${AWS_BUCKETNAME:='false'}
25-
# Azure
26-
: ${AZURE_STORAGE_ACCOUNT_URL:='false'}
18+
# Filestore
19+
: ${FILESTORE_PLATFORM:='aws'}
20+
## AWS / DO
21+
: ${FILESTORE_AWS_HOST:='false'}
22+
: ${FILESTORE_AWS_REGION:='false'}
23+
: ${FILESTORE_AWS_ACCESS_KEY_ID:='false'}
24+
: ${FILESTORE_AWS_SECRET_ACCESS_KEY:='false'}
25+
: ${FILESTORE_AWS_BUCKETNAME:='false'}
26+
## Azure
27+
: ${FILESTORE_AZURE_STORAGE_ACCOUNT_URL:='false'}
28+
# BACKUP
29+
: ${BACKUP_PLATFORM:='aws'}
30+
## AWS / DO
31+
: ${BACKUP_AWS_HOST:='false'}
32+
: ${BACKUP_AWS_REGION:='false'}
33+
: ${BACKUP_AWS_ACCESS_KEY_ID:='false'}
34+
: ${BACKUP_AWS_SECRET_ACCESS_KEY:='false'}
35+
: ${BACKUP_AWS_BUCKETNAME:='false'}
36+
## Azure
37+
: ${BACKUP_AZURE_STORAGE_ACCOUNT_URL:='false'}
38+
# REMOTE
39+
: ${REMOTE_ENABLED:='false'}
40+
: ${REMOTE_PLATFORM:='aws'}
41+
## AWS / DO
42+
: ${REMOTE_AWS_HOST:='false'}
43+
: ${REMOTE_AWS_REGION:='false'}
44+
: ${REMOTE_AWS_ACCESS_KEY_ID:='false'}
45+
: ${REMOTE_AWS_SECRET_ACCESS_KEY:='false'}
46+
: ${REMOTE_AWS_BUCKETNAME:='false'}
47+
## Azure
48+
: ${REMOTE_AZURE_STORAGE_ACCOUNT_URL:='false'}
49+
50+
export FILESTORE_BUCKET=`echo $FILESTORE_AWS_BUCKETNAME | sed -e "s/{db}/$PGDATABASE/g"`
51+
export BACKUP_BUCKET=`echo $BACKUP_AWS_BUCKETNAME | sed -e "s/{db}/$PGDATABASE/g"`
52+
[ "$REMOTE_ENABLED" == "true" ] && export REMOTE_BUCKET=`echo $REMOTE_AWS_BUCKETNAME | sed -e "s/{db}/$PGDATABASE/g"`
2753
# Date in UTC
2854
export TODAY=$(date -u +%Y%m%d)
2955
export YESTERDAY=$(date -d "1 day ago" -u +%Y%m%d)
30-
export LASTWEEK=$(date -d "1 week ago" -u +%Y%m%d)
56+
export LASTMONTH=$(date -d "1 month ago" -u +%Y%m%d)
3157
# For dockerize
3258
export TEMPLATES=/templates
3359

@@ -36,21 +62,51 @@ function config_rclone() {
3662
echo "Configure rclone"
3763
mkdir -p $HOME/.config/rclone
3864
dockerize -template $TEMPLATES/rclone.conf.tmpl:$HOME/.config/rclone/rclone.conf
39-
case "$PLATFORM" in
65+
[ "$DEBUG" == "1" ] && echo "Rclone configuration file:" && cat $HOME/.config/rclone/rclone.conf
66+
# FILESTORE
67+
case "$FILESTORE_PLATFORM" in
68+
"aws")
69+
;;
70+
"azure")
71+
;;
72+
"do")
73+
export FILESTORE_SPACE=`echo $FILESTORE_AWS_HOST | sed -e "s/.$FILESTORE_AWS_REGION.*$//"`
74+
;;
75+
*)
76+
echo "I don't know how to configure rclone for $FILESTORE_PLATFORM."
77+
exit 1
78+
;;
79+
esac
80+
# BACKUP
81+
case "$BACKUP_PLATFORM" in
4082
"aws")
41-
export SPACE=""
4283
;;
4384
"azure")
44-
export SPACE=""
4585
;;
4686
"do")
47-
export SPACE=`echo $AWS_HOST | sed -e "s/.$AWS_REGION.*$//"`
87+
export BACKUP_SPACE=`echo $BACKUP_AWS_HOST | sed -e "s/.$BACKUP_AWS_REGION.*$//"`
4888
;;
4989
*)
50-
echo "I don't know how to configure rclone for $PLATFORM."
90+
echo "I don't know how to configure rclone for $BACKUP_PLATFORM."
5191
exit 1
5292
;;
5393
esac
94+
# REMOTE
95+
if [ "$REMOTE_ENABLED" == "true" ]; then
96+
case "$REMOTE_PLATFORM" in
97+
"aws")
98+
;;
99+
"azure")
100+
;;
101+
"do")
102+
export REMOTE_SPACE=`echo $REMOTE_AWS_HOST | sed -e "s/.$REMOTE_AWS_REGION.*$//"`
103+
;;
104+
*)
105+
echo "I don't know how to configure rclone for $REMOTE_PLATFORM."
106+
exit 1
107+
;;
108+
esac
109+
fi
54110
}
55111

56112
# Common functions
@@ -62,6 +118,7 @@ function restore_odoo_database() {
62118
UPDATE ir_cron SET active = 'f';
63119
UPDATE ir_mail_server SET active = 'f';
64120
UPDATE fetchmail_server SET active = 'f';
121+
UPDATE ir_config_parameter SET value = 'null' WHERE key = 'database.uuid';
65122
" $PGDATABASE
66123
}
67124

@@ -71,13 +128,20 @@ function backup() {
71128
"odoo")
72129
echo "Dump the database $PGDATABASE"
73130
pg_dump --clean $PGDATABASE | gzip > /tmp/$RUNNING_ENV-$PGDATABASE-$TODAY.sql.gz
74-
echo "Push it to the container"
75-
rclone copy /tmp/$RUNNING_ENV-$PGDATABASE-$TODAY.sql.gz remote:/$SPACE/backup/
76-
echo "Duplicate the filestore"
77-
rclone sync remote:/$SPACE/$RUNNING_ENV-$PGDATABASE/ remote:/$SPACE/backup/$RUNNING_ENV-$PGDATABASE-$TODAY/
78-
echo "Cleanup last week backup"
79-
rclone purge remote:/$SPACE/backup/$RUNNING_ENV-$PGDATABASE-$LASTWEEK/
80-
rclone purge remote:/$SPACE/backup/$RUNNING_ENV-$PGDATABASE-$LASTWEEK.sql.gz
131+
echo "Push it to backup"
132+
rclone copy /tmp/$RUNNING_ENV-$PGDATABASE-$TODAY.sql.gz backup:/$BACKUP_SPACE/$BACKUP_BUCKET/
133+
echo "Sync the filestore to backup"
134+
rclone sync filestore:/$FILESTORE_SPACE/$FILESTORE_BUCKET/ backup:/$BACKUP_SPACE/$BACKUP_BUCKET/$RUNNING_ENV-$PGDATABASE-$TODAY/
135+
echo "Cleanup last month copy on backup"
136+
rclone purge backup:/$BACKUP_SPACE/$BACKUP_BUCKET/$RUNNING_ENV-$PGDATABASE-$LASTMONTH/
137+
rclone purge backup:/$BACKUP_SPACE/$BACKUP_BUCKET/$RUNNING_ENV-$PGDATABASE-$LASTMONTH.sql.gz
138+
if [ $REMOTE_ENABLED == 'true' ]; then
139+
echo "Push, sync and cleanup to/on remote"
140+
rclone copy /tmp/$RUNNING_ENV-$PGDATABASE-$TODAY.sql.gz remote:/$REMOTE_SPACE/$REMOTE_BUCKET/
141+
rclone sync filestore:/$FILESTORE_SPACE/$FILESTORE_BUCKET/ remote:/$REMOTE_SPACE/$REMOTE_BUCKET/$RUNNING_ENV-$PGDATABASE-$TODAY/
142+
rclone purge remote:/$REMOTE_SPACE/$REMOTE_BUCKET/$RUNNING_ENV-$PGDATABASE-$LASTMONTH/
143+
rclone purge remote:/$REMOTE_SPACE/$REMOTE_BUCKET/$RUNNING_ENV-$PGDATABASE-$LASTMONTH.sql.gz
144+
fi
81145
;;
82146
*)
83147
echo "Backup profile does not exist. I don't know how to backup $1."
@@ -93,12 +157,10 @@ function restore() {
93157
dropdb --if-exists $PGDATABASE
94158
echo "Create $PGDATABASE database"
95159
createdb $PGDATABASE
96-
echo "Delete current $PGDATABASE filestore"
97-
rclone purge remote:/$SPACE/$RUNNING_ENV-$PGDATABASE/
98160
echo "Download yesterday's backup"
99-
rclone copy remote:/$SPACE/backup/production-master-$YESTERDAY.sql.gz /tmp/
100-
echo "Copy the filestore"
101-
rclone sync remote:/$SPACE/backup/production-master-$YESTERDAY/ remote:/$SPACE/$RUNNING_ENV-$PGDATABASE/
161+
rclone copy backup:/$BACKUP_SPACE/$BACKUP_BUCKET/production-master-$YESTERDAY.sql.gz /tmp/
162+
echo "Sync the filestore"
163+
rclone sync backup:/$BACKUP_SPACE/$BACKUP_BUCKET/production-master-$YESTERDAY/ filestore:/$FILESTORE_SPACE/$FILESTORE_BUCKET/
102164
echo "Restore database dump"
103165
restore_odoo_database
104166
;;
@@ -109,6 +171,8 @@ function restore() {
109171
esac
110172
}
111173

174+
[ "$DEBUG" == "1" ] && env | sort
175+
112176
config_rclone
113177

114178
$1 $2

backup/templates/rclone.conf.tmpl

+68-10
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,82 @@
1-
{{ if contains "aws" .Env.PLATFORM }}
2-
[remote]
1+
{{ if contains "aws" .Env.FILESTORE_PLATFORM }}
2+
[filestore]
3+
type = s3
4+
provider = AWS
5+
env_auth = false
6+
access_key_id = {{ default "" .Env.FILESTORE_AWS_ACCESS_KEY_ID }}
7+
secret_access_key = {{ default "" .Env.FILESTORE_AWS_SECRET_ACCESS_KEY }}
8+
region = {{ default "" .Env.FILESTORE_AWS_REGION }}
9+
location_constraint = {{ default "" .Env.FILESTORE_AWS_REGION }}
10+
acl = private
11+
{{ end }}
12+
{{ if contains "azure" .Env.FILESTORE_PLATFORM }}
13+
[filestore]
14+
type = azureblob
15+
sas_url = {{ default "" .Env.FILESTORE_AZURE_STORAGE_ACCOUNT_URL }}
16+
{{ end }}
17+
{{ if contains "do" .Env.FILESTORE_PLATFORM }}
18+
[filestore]
19+
type = s3
20+
provider = DigitalOcean
21+
env_auth = true
22+
access_key_id = {{ default "" .Env.FILESTORE_AWS_ACCESS_KEY_ID }}
23+
secret_access_key = {{ default "" .Env.FILESTORE_AWS_SECRET_ACCESS_KEY }}
24+
endpoint = {{ default "" .Env.FILESTORE_AWS_REGION }}.digitaloceanspaces.com
25+
acl = private
26+
{{ end }}
27+
28+
{{ if contains "aws" .Env.BACKUP_PLATFORM }}
29+
[backup]
330
type = s3
431
provider = AWS
32+
env_auth = false
33+
access_key_id = {{ default "" .Env.BACKUP_AWS_ACCESS_KEY_ID }}
34+
secret_access_key = {{ default "" .Env.BACKUP_AWS_SECRET_ACCESS_KEY }}
35+
region = {{ default "" .Env.BACKUP_AWS_REGION }}
36+
location_constraint = {{ default "" .Env.BACKUP_AWS_REGION }}
37+
acl = private
38+
{{ end }}
39+
{{ if contains "azure" .Env.BACKUP_PLATFORM }}
40+
[backup]
41+
type = azureblob
42+
sas_url = {{ default "" .Env.BACKUP_AZURE_STORAGE_ACCOUNT_URL }}
43+
{{ end }}
44+
{{ if contains "do" .Env.BACKUP_PLATFORM }}
45+
[backup]
46+
type = s3
47+
provider = DigitalOcean
548
env_auth = true
6-
access_key_id = {{ default "" .Env.AWS_ACCESS_KEY_ID }}
7-
secret_access_key = {{ default "" .Env.AWS_SECRET_ACCESS_KEY }}
49+
access_key_id = {{ default "" .Env.BACKUP_AWS_ACCESS_KEY_ID }}
50+
secret_access_key = {{ default "" .Env.BACKUP_AWS_SECRET_ACCESS_KEY }}
51+
endpoint = {{ default "" .Env.BACKUP_AWS_REGION }}.digitaloceanspaces.com
852
acl = private
953
{{ end }}
10-
{{ if contains "azure" .Env.PLATFORM }}
54+
55+
{{ if .Env.REMOTE_ENABLED }}
56+
{{ if contains "aws" .Env.REMOTE_PLATFORM }}
57+
[remote]
58+
type = s3
59+
provider = AWS
60+
env_auth = false
61+
access_key_id = {{ default "" .Env.REMOTE_AWS_ACCESS_KEY_ID }}
62+
secret_access_key = {{ default "" .Env.REMOTE_AWS_SECRET_ACCESS_KEY }}
63+
region = {{ default "" .Env.REMOTE_AWS_REGION }}
64+
location_constraint = {{ default "" .Env.REMOTE_AWS_REGION }}
65+
acl = private
66+
{{ end }}
67+
{{ if contains "azure" .Env.REMOTE_PLATFORM }}
1168
[remote]
1269
type = azureblob
13-
sas_url = {{ default "" .Env.AZURE_STORAGE_ACCOUNT_URL }}
70+
sas_url = {{ default "" .Env.REMOTE_AZURE_STORAGE_ACCOUNT_URL }}
1471
{{ end }}
15-
{{ if contains "do" .Env.PLATFORM }}
72+
{{ if contains "do" .Env.REMOTE_PLATFORM }}
1673
[remote]
1774
type = s3
1875
provider = DigitalOcean
1976
env_auth = true
20-
access_key_id = {{ default "" .Env.AWS_ACCESS_KEY_ID }}
21-
secret_access_key = {{ default "" .Env.AWS_SECRET_ACCESS_KEY }}
22-
endpoint = {{ default "" .Env.AWS_REGION }}.digitaloceanspaces.com
77+
access_key_id = {{ default "" .Env.REMOTE_AWS_ACCESS_KEY_ID }}
78+
secret_access_key = {{ default "" .Env.REMOTE_AWS_SECRET_ACCESS_KEY }}
79+
endpoint = {{ default "" .Env.REMOTE_AWS_REGION }}.digitaloceanspaces.com
2380
acl = private
2481
{{ end }}
82+
{{ end }}

0 commit comments

Comments
 (0)