Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/bin/bash
- # This bash script exports all mysql databases and archives it with all webserver files.
- # Created archive will be encrypted with gpg key.
- # After sending to cloud storage all created files will be deleted
- # This script uses: tar, gpg, mysql and aws3 s3
- # ** IMPORTANT** :
- # ** You need to have generated gpg key !!! Otherwise comment encryption part.
- # ** Execute this script with proper rights. It means that if in webserver dir
- # you have files with permisions only for root user, then use sudo.
- # CRONTAB example. Backup daily at 04:00 AM
- # 0 4 * * * /usr/local/bin/backup.sh >/dev/null 2>&1
- # DATA
- DB_USER="ENTER_VALUE"
- DB_PASSWORD="ENTER_VALUE"
- ENCRYPTION_RECIPIENT="you@gpg-key.com"
- # PATH'S
- BUCKET="ENTER_VALUE" #name of the bucket without s3://
- OUTPUT_PATH="/srv/backup"
- TEMP_PATH="$OUTPUT_PATH/temp" #where to store data temporary before archiving and encrypting
- WWW_PATH="/srv/www"
- # OTHER DATA
- DATE=`date +%Y-%m-%d`
- LOG_DATE=`date +[%Y-%m-%d:%H:%M:%S]`
- # CREATE directories if not exist
- mkdir -p "$TEMP_PATH"
- # GET database table names
- databases=`mysql --user=$DB_USER --password=$DB_PASSWORD -e "SHOW DATABASES;" | tr -d "| " | grep -v Database`
- echo "$LOG_DATE DUMPING DATABASE TABLES"
- for db in $databases; do
- # Ignore default phpmyadmin tables
- if [[ "$db" != "information_schema" ]] && [[ "$db" != _* ]] && [[ "$db" != mysql* ]] && [[ "$db" != "performance_schema" ]] ; then
- name=$TEMP_PATH/$DATE-$db
- echo "$LOG_DATE Dumped: $name"
- mysqldump --force --opt --user=$DB_USER --password=$DB_PASSWORD --databases $db > $name.sql
- fi
- done
- echo "$LOG_DATE DONE!"
- # COMPRESS AND ARCHIVE dumped sql's and webserver directories
- echo "$LOG_DATE Archieving data"
- tar -czf $OUTPUT_PATH/$DATE-webserver.tar.gz -C $TEMP_PATH . -C $WWW_PATH .
- echo "$LOG_DATE DONE!"
- # ENCRYPT the Archive
- echo "$LOG_DATE ENCRYPTING WEBSERVER"
- /usr/bin/gpg --yes -e -a -r "$ENCRYPTION_RECIPIENT" $OUTPUT_PATH/$DATE-webserver.tar.gz
- echo "$LOG_DATE DONE!"
- # SEND to bucket
- aws s3 cp $OUTPUT_PATH/$DATE-webserver.tar.gz.asc s3://$BUCKET
- # REMOVE old backup data on the Webserver due tu security
- rm -f $TEMP_PATH/*.sql > /dev/null 2>&1
- rm -f $OUTPUT_PATH/*.tar.gz > /dev/null 2>&1
- rm -f $OUTPUT_PATH/*.tar.gz.asc > /dev/null 2>&1
Add Comment
Please, Sign In to add comment