Skip to content

Instantly share code, notes, and snippets.

@chirag-chhajed
Created July 26, 2024 06:52
Show Gist options
  • Save chirag-chhajed/898106b597bd7f157a363f5c53b9f240 to your computer and use it in GitHub Desktop.
Save chirag-chhajed/898106b597bd7f157a363f5c53b9f240 to your computer and use it in GitHub Desktop.
Sample bash script for AWS to take backups of your MySQL DB
#!/bin/bash
# Sample backup file
# aws cli needs to be installed first and configured
# https://sst.dev/chapters/create-an-iam-user.html
# above link will help you to create an IAM user and configure aws cli
# This script only works on the same machine where the MySQL server is running,
# If you want to use this script on a different machine, you need to also add the hostname and port to the mysqldump command
# e.g mysqldump -h hostname -P port -u $DB_USER -p$DB_PASS $db_name | gzip > $backup_file
# because at default it assumes that the MySQL server is running on the same machine
# MySQL credentials
DB_USER="your_db_user"
DB_PASS="your_db_password"
DB_NAME_1="DB_NAME_1"
DB_NAME_2="DB_NAME_2"
DB_NAME_3="DB_NAME_3"
# Backup directory
BACKUP_DIR="/storage/backup"
# AWS S3 details
S3_BUCKET="your_aws_s3_bucket_name"
S3_PREFIX="mysql_backups" # Optional: S3 prefix for organizing backups
# Create backup directory if it doesn't exist
mkdir -p $BACKUP_DIR
# Generate timestamp
TIMESTAMP=$(date +"%Y%m%d_%H%M%S")
# Function to backup a database and upload to S3
backup_and_upload() {
local db_name=$1
local backup_file="$BACKUP_DIR/${db_name}_${TIMESTAMP}.sql.gz"
echo "Backing up $db_name..."
mysqldump -u $DB_USER -p$DB_PASS $db_name | gzip > $backup_file
echo "Uploading $db_name backup to S3..."
aws s3 cp $backup_file s3://${S3_BUCKET}/${S3_PREFIX}/${db_name}_${TIMESTAMP}.sql.gz
if [ $? -eq 0 ]; then
echo "Successfully backed up and uploaded $db_name"
else
echo "Error backing up or uploading $db_name"
fi
}
# Backup and upload each database
backup_and_upload $DB_NAME_1
backup_and_upload $DB_NAME_2
backup_and_upload $DB_NAME_3
# Delete local backups older than 7 days
echo "Cleaning up local backups older than 7 days..."
find $BACKUP_DIR -name "*.sql.gz" -type f -mtime +7 -delete
# Delete S3 objects older than 7 days
echo "Cleaning up S3 backups older than 7 days..."
aws s3 ls s3://${S3_BUCKET}/${S3_PREFIX}/ | while read -r line;
do
createDate=`echo $line|awk {'print $1" "$2'}`
createDate=`date -d"$createDate" +%s`
olderThan=`date -d"-7 days" +%s`
if [[ $createDate -lt $olderThan ]]
then
fileName=`echo $line|awk {'print $4'}`
if [[ $fileName != "" ]]
then
aws s3 rm s3://${S3_BUCKET}/${S3_PREFIX}/$fileName
echo "Deleted old S3 backup: $fileName"
fi
fi
done
echo "Backup process completed"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment