Advertisement
Guest User

Untitled

a guest
Mar 18th, 2019
74
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.15 KB | None | 0 0
  1. #!/bin/bash
  2.  
  3. # Usage:
  4. # bash s3BackUp.sh YOUR_BACKUP_DIRECTORY BACKUP_NAME YOUR_BUCKET MAIL_FROM MAIL_TO (OPTIONAL: S3_FOLDER PROFILE)
  5. # bash s3BackUp.sh /var/www/webdisk/example.com/ example my_bucket info@example.com soporte@example.com backup default
  6.  
  7. # Arguments:
  8. readonly BACKUP_PATH_NO_REMOVE=$1
  9. readonly BACKUP_NAME=$2
  10. readonly S3_BUCKET_NAME=$3
  11. readonly MAIL_FROM=${4}
  12. readonly MAIL_TO=${5}
  13. readonly S3_FOLDER=${6-backup}
  14. readonly PROFILE=${7-default}
  15.  
  16. # Default:
  17. readonly PREFIX=backup_
  18. readonly DATE=`date +%d-%m-%Y`
  19. readonly BACKUP_FILE_NAME=${PREFIX}${BACKUP_NAME}_${DATE}.tgz
  20. readonly BACKUP_PATH_FILE=${HOME}/${BACKUP_FILE_NAME}
  21. readonly S3_BUCKET_BACKUP=s3://${S3_BUCKET_NAME}/${S3_FOLDER}/
  22. readonly S3_OUTPUT_BACKUP=${S3_FOLDER}/${BACKUP_FILE_NAME}
  23. readonly TEMP_PARTS=${HOME}/temp-parts
  24. readonly TEMP_EXTRACT=${HOME}/temp-extract
  25.  
  26. #####
  27. function finish() {
  28. rm -r ${TEMP_PARTS} 2> /dev/null
  29. rm -r ${TEMP_EXTRACT} 2> /dev/null
  30. rm ${BACKUP_PATH_FILE} 2> /dev/null
  31. }
  32. trap finish EXIT
  33. #####
  34. function s3-send-mail() {
  35. local from=$1
  36. local to=$2
  37. local subject=$3
  38. local text=$4
  39. aws ses send-email \
  40. --from $from \
  41. --destination "ToAddresses=${to}" \
  42. --message "Subject={Data=${subject},Charset=utf8},Body={Text={Data=${text},Charset=utf8},Html={Data=${text},Charset=utf8}}"
  43. }
  44. #####
  45. function s3-multipart-upload() {
  46. sudo apt-get install -y -qq jq
  47. local filePath=$1 # file to upload
  48. local bucket=$2 # name of S3 bucket
  49. local s3Folder=$3 # destination of the file in S3
  50. local dirParts=$4 # local folder where you create the parts of the file to send
  51. local profile=${5-default} # configuration profile of aws-cli
  52.  
  53. #Set to 90 MBs as default, 100 MBs is the limit for AWS files
  54. mbSplitSize=90
  55. ((partSize = $mbSplitSize * 1000000))
  56.  
  57. # Get main file size
  58. echo "Preparing $filePath for multipart upload"
  59. fileSize=`wc -c $filePath | awk '{print $1}'`
  60. ((parts = ($fileSize+$partSize-1) / partSize))
  61.  
  62. # Get main file hash
  63. mainMd5Hash=`openssl md5 -binary $filePath | base64`
  64.  
  65. # Make directory to store temporary parts
  66. echo "Splitting $filePath into $parts temporary parts"
  67. rm -r ${dirParts}
  68. mkdir -p ${dirParts}
  69. cd ${dirParts}
  70. split -b $partSize $filePath
  71.  
  72. # Create mutlipart upload
  73. echo "Initiating multipart upload for $filePath"
  74. uploadId=`aws s3api create-multipart-upload --bucket $bucket --key $s3Folder --metadata md5=$mainMd5Hash --profile $profile | jq -r '.UploadId'`
  75.  
  76. # Generate fileparts.json file that will be used at the end of the multipart upload
  77. jsonData="{\"Parts\":["
  78. for file in *
  79. do
  80. ((index++))
  81. echo "Uploading part $index of $parts..."
  82. hashData=`openssl md5 -binary $file | base64`
  83. eTag=`aws s3api upload-part --bucket $bucket --key $s3Folder --part-number $index --body $file --upload-id $uploadId --profile $profile | jq -r '.ETag'`
  84. jsonData+="{\"ETag\":$eTag,\"PartNumber\":$index}"
  85.  
  86. if (( $index == $parts ))
  87. then
  88. jsonData+="]}"
  89. else
  90. jsonData+=","
  91. fi
  92. done
  93. jq -n $jsonData > fileparts.json
  94.  
  95. # Complete multipart upload, check ETag to verify success
  96. mainEtag=`aws s3api complete-multipart-upload --multipart-upload file://fileparts.json --bucket $bucket --key $s3Folder --upload-id $uploadId --profile $profile | jq -r '.ETag'`
  97. if [[ $mainEtag != "" ]];
  98. then
  99. echo "Successfully uploaded: $filePath to S3 bucket: $bucket"
  100. else
  101. echo "Something went wrong! $filePath was not uploaded to S3 bucket: $bucket"
  102. # SEND FAULT REPORT
  103. s3-send-mail MAIL_FROM MAIL_TO "ALERT BACKUP FAULT - ${BACKUP_NAME}" "Verify the sending of file parts to the AWS S3 service"
  104. exit 1
  105. fi
  106.  
  107. # Clean up files
  108. rm -r ${TEMP_PARTS}
  109. cd ..
  110. }
  111. #####
  112. function main() {
  113. # Release file in local and remote destination
  114. rm ${BACKUP_PATH_FILE} 2> /dev/null
  115.  
  116. # CREATE BACKUP ARCHIVE
  117. tar czvf ${BACKUP_PATH_FILE} ${BACKUP_PATH_NO_REMOVE}
  118.  
  119. # VERIFY BACKING ARCHIVE
  120. rm -r ${TEMP_EXTRACT}
  121. mkdir -p ${TEMP_EXTRACT}
  122.  
  123. tar xzf ${BACKUP_PATH_FILE} --directory ${TEMP_EXTRACT} > /dev/null
  124. if [ $? != 0 ]; then
  125. echo "File is corrupted ... $1"
  126. # SEND FAULT REPORT
  127. s3-send-mail MAIL_FROM MAIL_TO "ALERT BACKUP FAULT - ${BACKUP_NAME}" "Check backup compression"
  128. exit 1
  129. fi
  130.  
  131. rm -r ${TEMP_EXTRACT}
  132.  
  133. # SEND NEW BACKUP TO S3
  134. s3-multipart-upload ${BACKUP_PATH_FILE} ${S3_BUCKET_NAME} ${S3_OUTPUT_BACKUP} ${TEMP_PARTS} ${PROFILE}
  135.  
  136. # DELETE OLD BACKUP IN S3
  137. aws s3 ls ${S3_BUCKET_BACKUP} | while read -r line;
  138. do
  139. strCreateDate=`echo $line|awk {'print $1" "$2'}`
  140. if date -d "${strCreateDate}" >/dev/null 2>&1
  141. then
  142. createDate=`date --date "$strCreateDate" +%s`
  143. olderThan=`date --date "7 days ago" +%s`
  144. if [[ $createDate -lt $olderThan ]]
  145. then
  146. filePath=`echo $line|awk {'print $4'}`
  147.  
  148. if [[ $filePath != "" ]]
  149. then
  150. aws s3 rm ${S3_BUCKET_BACKUP}$filePath
  151. fi
  152. fi
  153. fi
  154. done;
  155. }
  156. #####
  157.  
  158. main
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement