Skip to content

Commit

Permalink
Update etcd backup to use swift s3 endpoint (rackerlabs#549)
Browse files Browse the repository at this point in the history
Updates etcd backup script and updates the docs
to reflect the changes. This switches to use the newer
boto3 and uses swift endpoint to backup etcd backups.

JIRA:OSPC-368
  • Loading branch information
sulochan authored Nov 18, 2024
1 parent b733d5e commit 9f923a6
Show file tree
Hide file tree
Showing 4 changed files with 103 additions and 53 deletions.
6 changes: 3 additions & 3 deletions Containerfiles/etcd-backup/Containerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ ENV ETCDCTL_ENDPOINTS "https://127.0.0.1:2379"
ENV ETCDCTL_CACERT "/etc/ssl/etcd/ssl/ca.crt"
ENV ETCDCTL_KEY "/etc/ssl/etcd/ssl/healthcheck-client.key"
ENV ETCDCTL_CERT "/etc/ssl/etcd/ssl/healthcheck-client.crt"
ENV S3_ENDPOINT "10.74.8.135"
ENV S3_PORT "8081"
ENV S3_HOST "https://swift.api.sjc3.rackspacecloud.com"
ENV S3_REGION "SJC3"
ENV S3_ACCESS_KEY "abcd"
ENV S3_SECRET_KEY "abcd"

RUN apk add --update --no-cache bash ca-certificates tzdata openssl
RUN pip install boto
RUN pip install boto3 botocore

RUN wget https://github.com/etcd-io/etcd/releases/download/${ETCD_VERSION}/etcd-${ETCD_VERSION}-linux-amd64.tar.gz \
&& tar xzf etcd-${ETCD_VERSION}-linux-amd64.tar.gz \
Expand Down
136 changes: 93 additions & 43 deletions Containerfiles/etcd-backup/backup.py
Original file line number Diff line number Diff line change
@@ -1,73 +1,123 @@
import os
import sys
import boto
import boto.s3.connection
import boto3
from botocore.client import Config
from botocore.exceptions import NoCredentialsError, PartialCredentialsError, ClientError


def get_env_variables():
"""Get environment variables."""
access_key = os.getenv("ACCESS_KEY")
secret_key = os.getenv("SECRET_KEY")
host = os.getenv("S3_HOST")
region = os.getenv("S3_REGION", "SJC3")

port_str = os.getenv("S3_PORT", "8081")
try:
port = int(port_str)
except ValueError:
raise ValueError(
f"Environment variable 'S3_PORT' has an invalid value: {port_str}"
if not all([access_key, secret_key, host]):
print(
"Error: Missing one or more environment variables: ACCESS_KEY, SECRET_KEY, S3_HOST"
)
sys.exit(1)

# Properly convert the 'S3_HOST_SSL' environment variable to a boolean
secure_str = os.getenv("S3_HOST_SSL", "false").lower()
secure = secure_str in ["true", "1", "t", "y", "yes"]

return access_key, secret_key, host, port, secure
return access_key, secret_key, host, region


def create_s3_connection(access_key, secret_key, host, port, secure):
"""Create S3 connection."""
conn = boto.connect_s3(
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
host=host,
port=port,
is_secure=secure,
calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)
return conn
def create_s3_connection(access_key, secret_key, host, region):
"""Create S3 connection using Boto3 with error handling."""
try:
s3_client = boto3.client(
"s3",
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
endpoint_url=host,
region_name=region,
config=Config(signature_version="s3v4"), # Swift often requires SigV4
)
# Test connection
s3_client.list_buckets()
return s3_client
except NoCredentialsError:
print("Error: No credentials provided.")
except PartialCredentialsError:
print("Error: Incomplete credentials provided.")
except ClientError as e:
print(f"Client error while creating connection: {e}")
except Exception as e:
print(f"Unexpected error creating S3 connection: {e}")

return None


def upload_file_to_bucket(conn, file_to_upload, bucket_name):
"""Upload a file to a specific S3 bucket using Boto3."""
try:
# Check if the bucket exists
conn.head_bucket(Bucket=bucket_name)
except ClientError as e:
print(f"Bucket '{bucket_name}' does not exist or is not accessible.")
return

key = os.path.basename(file_to_upload)

def upload_file_to_bucket(conn, file_to_upload):
"""Upload a file to a specific S3 bucket."""
bucket_name = "etcd-backup-bucket"
bucket = conn.get_bucket(bucket_name)
key = bucket.new_key(os.path.basename(file_to_upload))
key.set_contents_from_filename(file_to_upload)
try:
conn.upload_file(file_to_upload, bucket_name, key)
print(f"File '{file_to_upload}' uploaded successfully to {bucket_name}/{key}.")
except FileNotFoundError:
print(f"Error: File '{file_to_upload}' not found.")
except ClientError as e:
print(f"Client error during upload: {e}")
except Exception as e:
print(f"Unexpected error uploading file: {e}")


def list_all_buckets(conn):
"""List all buckets."""
for bucket in conn.get_all_buckets():
print(
"{name}\t{created}".format(
name=bucket.name,
created=bucket.creation_date,
)
)
"""List all buckets using Boto3."""
try:
response = conn.list_buckets()
for bucket in response["Buckets"]:
print(f"{bucket['Name']}\t{bucket['CreationDate']}")
except ClientError as e:
print(f"Client error listing buckets: {e}")
except Exception as e:
print(f"Unexpected error listing buckets: {e}")


def create_bucket_if_not_exists(conn, bucket_name):
"""Create the bucket if it does not exist."""
try:
conn.head_bucket(Bucket=bucket_name)
except ClientError as e:
error_code = e.response["Error"]["Code"]
if error_code == "404":
try:
conn.create_bucket(Bucket=bucket_name)
print(f"Bucket '{bucket_name}' created successfully.")
except ClientError as create_error:
print(f"Client error creating bucket: {create_error}")
except Exception as create_error:
print(f"Unexpected error creating bucket: {create_error}")
else:
print(f"Error accessing bucket '{bucket_name}': {e}")
except Exception as e:
print(f"Unexpected error checking bucket: {e}")


def main():
"""Main function."""
if len(sys.argv) != 2:
print("Usage: python your_script.py <file_to_upload>")
print("Usage: python backup.py <file_to_upload>")
sys.exit(1)

file_to_upload = sys.argv[1]
access_key, secret_key, host, port, secure = get_env_variables()
conn = create_s3_connection(access_key, secret_key, host, port, secure)
upload_file_to_bucket(conn, file_to_upload)
list_all_buckets(conn)
access_key, secret_key, host, region = get_env_variables()
conn = create_s3_connection(access_key, secret_key, host, region)

if not conn:
print("Failed to create S3 connection.")
sys.exit(1)

create_bucket_if_not_exists(conn, "etcd-backups")
upload_file_to_bucket(conn, file_to_upload, "etcd-backups")
# list_all_buckets(conn)


if __name__ == "__main__":
Expand Down
12 changes: 6 additions & 6 deletions base-kustomize/backups/etcd/etcd-backup.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ kind: CronJob
metadata:
name: etcd-backup
spec:
schedule: "0 */3 * * *"
schedule: "0 1 * * *"
successfulJobsHistoryLimit: 1
failedJobsHistoryLimit: 1
concurrencyPolicy: Allow
Expand All @@ -13,7 +13,7 @@ spec:
spec:
initContainers:
- name: etcd-backup
image: csengteam/etcd-backup:v0.0.4
image: csengteam/etcd-backup:v0.0.5
env:
- name: ETCDCTL_API
valueFrom:
Expand Down Expand Up @@ -48,10 +48,10 @@ spec:
readOnly: true
- mountPath: /data/etcd-backup
name: etcd-backup
restartPolicy: OnFailure
restartPolicy: Never
containers:
- name: backup-to-s3
image: csengteam/etcd-backup:v0.0.4
image: csengteam/etcd-backup:v0.0.5
env:
- name: ACCESS_KEY
valueFrom:
Expand All @@ -68,11 +68,11 @@ spec:
secretKeyRef:
name: etcd-backup-secrets
key: S3_HOST
- name: S3_PORT
- name: S3_REGION
valueFrom:
secretKeyRef:
name: etcd-backup-secrets
key: S3_PORT
key: S3_REGION
command: ["/bin/bash", "-c"]
args: ["python /backup.py /data/etcd-backup/etcd-snapshot-$(date +%Y-%m-%d).db"]
volumeMounts:
Expand Down
2 changes: 1 addition & 1 deletion docs/etcd-backup.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ kubectl --namespace openstack \
--from-literal=ACCESS_KEY="sadbq4bcva2392dasflkdsp" \
--from-literal=SECRET_KEY="aldskflkjpoq32ibdsfko23bnalkfdao2" \
--from-literal=S3_HOST="127.0.0.1" \
--from-literal=S3_PORT="8081" \
--from-literal=S3_REGION="SJC3" \
--from-literal=ETCDCTL_API="3" \
--from-literal=ETCDCTL_ENDPOINTS="https://127.0.0.1:2379" \
--from-literal=ETCDCTL_CACERT="/etc/ssl/etcd/ssl/ca.pem" \
Expand Down

0 comments on commit 9f923a6

Please sign in to comment.