-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathbootstrap.sh
executable file
·246 lines (200 loc) · 5.79 KB
/
bootstrap.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
#!/bin/sh
# setup app parameters
APP_NAME="${APP_NAME:-predict}"
NAMESPACE="${NAMESPACE:-edge-failure-prediction}"
# setup database parameters
DB_APP_NAME="${DB_APP_NAME:-predict-db}"
DB_HOSTNAME="${DB_HOSTNAME:-${DB_APP_NAME}.${NAMESPACE}.svc.cluster.local}"
DB_DATABASE="${DB_DATABASE:-predict-db}"
DB_USERNAME="${DB_USERNAME:-predict-db}"
DB_PASSWORD="${DB_PASSWORD:-failureislame}"
DB_PORT="${DB_PORT:-5432}"
DB_DATA_PATH="${DB_DATA_PATH:-/var/lib/pgsql/data}"
DB_TABLE="${DB_TABLE:-waterpump}"
# setup kafka parameters
KAFKA_HOSTNAME="${KAFKA_HOSTNAME:-kafka-cluster-kafka-bootstrap.edge-kafka.svc.cluster.local}"
# other parameters
GIT_URL=https://github.com/Enterprise-Neurosystem/edge-failure-prediction.git
GIT_BRANCH="main"
DB_PATH=data
APP_LABEL="app.kubernetes.io/part-of=${APP_NAME}"
CONTEXT_DIR="src"
ocp_init(){
oc whoami || exit 0
echo "NAMESPACE: ${NAMESPACE}"
echo "Press Ctrl + C if this is not correct"
sleep 5
# update openshift context to project
oc project ${NAMESPACE} || oc new-project ${NAMESPACE}
}
is_sourced() {
if [ -n "$ZSH_VERSION" ]; then
case $ZSH_EVAL_CONTEXT in *:file:*) return 0;; esac
else # Add additional POSIX-compatible shell names here, if needed.
case ${0##*/} in dash|-dash|bash|-bash|ksh|-ksh|sh|-sh) return 0;; esac
fi
return 1 # NOT sourced.
}
ocp_setup_db_instance(){
# setup postgres
oc new-app \
--name ${DB_APP_NAME} \
-n ${NAMESPACE} \
-l ${APP_LABEL} \
--image-stream=postgresql:12-el8
# setup postgres env
oc set env \
deployment/${DB_APP_NAME} \
-n ${NAMESPACE} \
-e POSTGRESQL_DATABASE=${DB_DATABASE} \
-e POSTGRESQL_USER=${DB_USERNAME} \
-e POSTGRESQL_PASSWORD=${DB_PASSWORD}
# make db persistent
oc set volume \
deployment/${DB_APP_NAME} \
--add \
--name=${DB_APP_NAME} \
--mount-path=${DB_DATA_PATH} \
-t pvc \
--claim-size=1G \
--claim-name=${DB_APP_NAME} \
--overwrite
}
ocp_setup_db_data(){
# SQL_EXISTS=$(printf '\dt "%s"' "${DB_TABLE}")
# psql -d "${DB_DATABASE}" -c "${SQL_EXISTS}"
until oc -n "${NAMESPACE}" exec deployment/"${DB_APP_NAME}" -- psql --version >/dev/null 2>&1
do
sleep 10
done
POD=$(oc -n "${NAMESPACE}" get pod -l deployment="${DB_APP_NAME}" -o name | sed 's#pod/##')
echo "copying data to database container..."
echo "POD: ${POD}"
oc -n "${NAMESPACE}" cp "${DB_PATH}"/db.sql "${POD}":/tmp
oc -n "${NAMESPACE}" cp "${DB_PATH}"/sensor.csv.zip "${POD}":/tmp
cat << COMMAND | oc -n "${NAMESPACE}" exec "${POD}" -- sh -c "$(cat -)"
# you can run the following w/ oc rsh
# this hack just saves you time
cd /tmp
unzip -o sensor.csv.zip
echo 'GRANT ALL ON TABLE waterpump TO "'"${DB_USERNAME}"'" ;' >> db.sql
psql -d ${DB_DATABASE} -f db.sql
COMMAND
}
ocp_print_db_info(){
# print db hostname for workshop
echo "The web app requires a running postgres db to function"
echo "The following is the hostame is for the database inside OpenShift"
echo "DB_HOSTNAME: ${DB_HOSTNAME}"
}
ocp_setup_db(){
[ -n "${NON_INTERACTIVE}" ] && input=yes
if [ ! -n "$input" ]; then
echo "As a participant in a workshop, the expected answer is: No"
read -r -p "Setup sensor database in OpenShift? [y/N] " input
fi
case $input in
[yY][eE][sS]|[yY])
ocp_setup_db_instance
ocp_setup_db_data
ocp_print_db_info
;;
[nN][oO]|[nN])
echo
;;
*)
echo
;;
esac
}
ocp_setup_app(){
# setup prediction app
oc new-app \
${GIT_URL}#${GIT_BRANCH} \
--name ${APP_NAME} \
-l ${APP_LABEL} \
-n ${NAMESPACE} \
--image-stream=python:3.8-ubi8 \
--context-dir ${CONTEXT_DIR}
# setup database parameters
oc set env \
deployment/${APP_NAME} \
-n ${NAMESPACE} \
-e DB_HOSTNAME=${DB_HOSTNAME} \
-e DB_DATABASE=${DB_DATABASE} \
-e DB_USERNAME=${DB_USERNAME} \
-e DB_PASSWORD=${DB_PASSWORD}
oc set env \
deployment/${APP_NAME} \
-n ${NAMESPACE} \
-e KAFKA_HOSTNAME=${KAFKA_HOSTNAME}
# create route
oc expose service \
${APP_NAME} \
-n ${NAMESPACE} \
-l ${APP_LABEL} \
--overrides='{"spec":{"tls":{"termination":"edge","insecureEdgeTerminationPolicy":"Redirect"}}}'
# kludge - some versions of oc don't work
oc patch route \
${APP_NAME} \
-n ${NAMESPACE} \
--type=merge \
-p '{"spec":{"tls":{"termination":"edge","insecureEdgeTerminationPolicy":"Redirect"}}}'
# kludge - fix timeout for app
oc annotate route \
${APP_NAME} \
-n ${NAMESPACE} \
haproxy.router.openshift.io/timeout=5m \
--overwrite
}
ocp_setup_kafka(){
until oc apply -k gitops/kafka ; do : ; done
}
container_setup_db_instance(){
PODMAN_CMD=docker
which podman && PODMAN_CMD=podman
which getenforce && SELINUX=":z"
# remove old container
${PODMAN_CMD} stop "${DB_APP_NAME}"
sleep 1
# run db; remove on stop
# requires login
# registry.redhat.io/rhel8/postgresql-12:latest
${PODMAN_CMD} run \
--name "${DB_APP_NAME}" \
-d --rm \
-p "${DB_PORT}":5432 \
-v "$(pwd):/opt/app-root/src${SELINUX}" \
-e POSTGRESQL_DATABASE="${DB_DATABASE}" \
-e POSTGRESQL_PASSWORD="${DB_PASSWORD}" \
-e POSTGRESQL_USER="${DB_USERNAME}" \
quay.io/sclorg/postgresql-12-c8s:latest
# wait for container to start
sleep 10
# run db data setup
${PODMAN_CMD} exec \
-it \
"${DB_APP_NAME}" \
/bin/bash -c ". scripts/bootstrap.sh; container_setup_db_data"
}
container_setup_db_data(){
cd ${DB_PATH}
cp sensor.csv.zip db.sql /tmp
cd /tmp
unzip -o sensor.csv.zip
echo 'GRANT ALL ON TABLE waterpump TO "'"${DB_USERNAME}"'" ;' >> db.sql
psql -d "${DB_DATABASE}" -f db.sql
}
container_setup_db(){
container_setup_db_instance
echo "To stop the container and clean up run:
podman rm predict-db
"
}
main(){
ocp_init
ocp_setup_db
ocp_setup_app
ocp_setup_kafka
}
is_sourced || main