Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import backup_prop, logging.handlers, os, subprocess, shutil, sys, re, ftplib, tarfile
- from smtplib import SMTP_SSL as SMTP
- from email.mime.text import MIMEText
- from subprocess import CalledProcessError
- from datetime import datetime
- from argparse import ArgumentParser
- from ftplib import FTP
- ############## Defenitions #####################
- LOG_DIR = 'backup_logs'
- BACKUP_OPT_DIR = 'backup_utils'
- BACKUP_OPT_FILE = 'backup_utils/backup_opt'
- TAR_BACKUP_META = 'backup_utils/backup_meta.snar'
- KP_TAR_BACKUP_META = 'backup_utils/kp_backup_meta.snar'
- BACKUP_INPROGRESS = '.in_progress'
- DATE_PATTERN = '%Y-%m-%d_%H-%M'
- FTP_SIZES_REG_PATTERN = '.* (\d*) Kbytes used .* (\d*) Kb'
- class BackupFailedException(Exception):pass
- emailSent = False;
- def sendMail(msg_body="Backup has failed!"):
- msg = MIMEText(msg_body)
- msg['Subject'] = 'Cassiopeia Central server backup has failed!'
- msg['From'] = backup_prop.from_addr
- smtp = SMTP()
- smtp.set_debuglevel(True)
- smtp.connect(backup_prop.smtp_server)
- if backup_prop.is_smtp_authorization:
- smtp.login(backup_prop.smtp_login, backup_prop.smtp_pass)
- smtp.sendmail(backup_prop.from_addr, backup_prop.smtp_send_to, msg.as_string())
- smtp.close()
- emailSent = True;
- def removeFiles(fileList):
- for f in fileList:
- if os.path.isfile(f):
- try:
- logger.debug('Removing file: %s'%f)
- os.remove(f)
- except:
- logger.error('Error while removing file %s'%f)
- def removeOldBackup(ftp):
- logger.debug('Removing old backup')
- ftp.cwd('/backups')
- try:
- files = ftp.nlst()
- files.sort()
- except ftplib.error_perm, resp:
- if str(resp) == "550 No files found":
- print "no files in this directory"
- else:
- raise
- logger.debug('Backup %s will be removed', files[0])
- removeDir(ftp, files[0])
- def removeDir(ftp, dirName):
- ftp.cwd(dirName)
- lines = [] # each level has own lines
- ftp.dir(lines.append) # list current remote dir
- for line in lines:
- parsed = line.split() # split on whitespace
- permiss = parsed[0] # assume 'drw... ... filename'
- fname = parsed[-1]
- if fname in ('.', '..'): # some include cwd and parent
- continue
- elif permiss[0] != 'd': # simple file: delete
- print('file', fname)
- ftp.delete(fname)
- else: # directory: recur, del
- print('directory', fname)
- removeDir(ftp, fname) # clean subdirectory
- print('directory exited')
- ftp.cwd('..')
- ftp.rmd(dirName)
- def unlockFs():
- logger.debug("Unlocking File Store")
- try:
- subprocess.check_call(['ssh', backup_prop.remote_user_host, 'rm -f '+backup_prop.remote_fs_path+'/.read-only'])
- except CalledProcessError as cpe:
- logger.error('Error while unlocking File Store: %s'%cpe)
- sendMail("Backup failed! Was unable to unlock File Store!")
- raise BackupFailedException('Error while unlocking File Store!')
- logger.debug("File Store has been unlocked")
- def uploadToFtp(requiredSize, backupDir, backupTag, backupName):
- freeSpace = 0
- logger.debug('Uploading to ftp')
- logger.debug('Start checking free space')
- # add extra 3Gb to required size as a safe buffer to avoid ftp overload
- while freeSpace <= (requiredSize + 3221225472):
- logger.debug('Connecting to FTP to get login message')
- ftp = FTP(backup_prop.ftp)
- welcomeMes = ftp.login(ftp_user, ftp_pass)
- sizes = re.search(FTP_SIZES_REG_PATTERN, welcomeMes)
- logger.debug('Used space %s Kb', sizes.group(1))
- logger.debug('All space %s Kb', sizes.group(2))
- freeSpace = (int(sizes.group(2)) - int(sizes.group(1))) * 1024
- logger.debug('Available space %s bytes', freeSpace)
- if freeSpace < (requiredSize + 3221225472):
- logger.debug('Not enough space for new backup: available - %s, required - %s', freeSpace, requiredSize)
- removeOldBackup(ftp)
- logger.debug('Disconnecting FTP')
- ftp.quit()
- logger.debug('Checking free space finished')
- logger.debug('Connecting to ftp to upload backup files')
- ftp = FTP(backup_prop.ftp)
- ftp.login(ftp_user, ftp_pass)
- try:
- ftp.cwd('backups')
- except:
- ftp.mkd('backups')
- ftp.cwd('backups')
- try:
- ftp.mkd(backupTag)
- except:
- pass
- ftp.cwd(backupTag)
- ftp.mkd(backupName)
- ftp.cwd(backupName)
- for f in os.listdir(backupDir):
- fileName = backupDir + f
- logger.debug('Sending file %s'%fileName)
- sendFile = open(fileName, 'r')
- send_cmd = 'STOR %s' % f
- ftp.storbinary(send_cmd, sendFile)
- sendFile.close()
- logger.debug('Disconnecting from ftp')
- ftp.quit()
- ftp_user = backup_prop.ftp_user
- ftp_pass = backup_prop.ftp_pass
- db_user = backup_prop.db_user
- db_pass = backup_prop.db_pass
- ############## Configure logging #####################
- if not os.path.isdir(LOG_DIR):
- os.makedirs(LOG_DIR)
- logHandler = logging.handlers.TimedRotatingFileHandler(LOG_DIR + '/backup.log', 'd', 1, backupCount=30)
- logFormatter = logging.Formatter('%(levelname)s:%(asctime)s - %(message)s')
- logHandler.setFormatter(logFormatter)
- logging.basicConfig(format='%(levelname)s:%(asctime)s - %(message)s', level=logging.DEBUG)
- logger = logging.getLogger(__name__)
- logger.addHandler(logHandler)
- logHandler.doRollover()
- ################# Check if another backup is in progress #####################
- print os.path
- if os.path.isfile(BACKUP_INPROGRESS):
- logger.error('Another backup process is in progress! Skipping')
- sendMail("Backup skipped: another backup process in progress!")
- sys.exit(1)
- open(BACKUP_INPROGRESS,'w').close()
- ################# Locking main File Store on remote machine ####################
- try:
- logger.debug("Locking FIle Store")
- try:
- subprocess.check_call(['ssh', backup_prop.remote_user_host, 'touch '+backup_prop.remote_fs_path+'/.read-only'])
- except CalledProcessError as cpe:
- logger.error('Error while locking File Store: %s'%cpe)
- sendMail("Backup failed! Was unable to lock File Store!")
- raise BackupFailedException('Error while locking File Store!')
- logger.debug("File Store has been locked")
- logger.debug("Performing File Store sync")
- try:
- subprocess.check_call(['rsync', '--delete', '-ae', 'ssh', backup_prop.remote_user_host + ':' + backup_prop.remote_fs_path+'/', backup_prop.file_store_path+'/'])
- except CalledProcessError as cpe:
- logger.error('File Store sync failed: %s'%cpe)
- sendMail("Backup failed! Was unable to sync the File Store! Check if the File Store was unlocked!")
- unlockFs()
- raise BackupFailedException("Error during file store sync!")
- logger.debug("File Store sync completed")
- logger.debug("Dumping MySQL slave")
- try:
- removeFiles(['fulldb.sql'])
- logger.debug("Stopping MySQL slave")
- subprocess.check_call(['mysql', '--user='+backup_prop.db_user, '--password='+backup_prop.db_pass, '-e', 'STOP SLAVE SQL_THREAD;'])
- logger.debug("MySQL slave was stopped, File Store can be unlocked")
- unlockFs()
- logger.debug("Performing databases dump")
- subprocess.check_call([ 'mysqldump', '--all-databases', '--user='+backup_prop.db_user, '--password='+backup_prop.db_pass, '--result-file=fulldb.sql'])
- logger.debug("Dump completed, trying to start MySQL slave")
- subprocess.check_call(['mysql', '--user='+backup_prop.db_user, '--password='+backup_prop.db_pass, '-e', 'START SLAVE SQL_THREAD;'])
- logger.debug("MySQL slave successfully started")
- except CalledProcessError as cpe:
- logger.error('MySQL dump failed: %s'%cpe)
- sendMail("Backup failed! Was unable to dump MySQL! Check if the File Store was unlocked and MySQL slave is up'n'running!")
- logger.debug("Trying to unlock File Store after failure")
- unlockFs()
- logger.debug()
- raise BackupFailedException("Error during MySQL dump!")
- logger.debug("Dumping MySQL slave compleded!")
- ################### KIPOD-PROXY BACKUPING PSQL ############################
- logger.debug("Perfoming kipod-porxy psql databases dump")
- try:
- removeFiles(['kp_fulldb.sql'])
- subprocess.check_call(['pg_dump',
- '--dbname=postgresql://' + backup_prop.kp_db_user + ':' + backup_prop.kp_db_pass + '@127.0.0.1:5432/' + backup_prop.kp_db_name,
- '--exclude-table=' + backup_prop.kp_exluded_table,
- '--file=kp_fulldb.sql'])
- logger.debug("Dumping PSQL kipod-proxy dadabases completed!")
- except CalledProcessError as cpe:
- logger.error('PSQL dump failed: %s' % cpe)
- # sendMail("Backup failed! Was unable to dump PSQL!")
- logger.debug()
- raise BackupFailedException("Error during PSQL dump!")
- ################# Checking backup level #####################
- logger.debug("Start new back up")
- logger.debug("Checking backup level")
- now = datetime.today()
- weekday = now.weekday()
- if weekday == 6 or not os.path.isfile(BACKUP_OPT_FILE) or not os.path.isfile(TAR_BACKUP_META) \
- or not os.path.isfile(KP_TAR_BACKUP_META):
- logger.debug("Full backup will be made this time")
- incremental = False
- else:
- try:
- backupOptFile = open(BACKUP_OPT_FILE, 'r')
- backupOpts = backupOptFile.readlines()
- currentBackupLevel = int(backupOpts[0]) + 1
- incremental = True
- backupTag = backupOpts[1]
- backupOptFile.close()
- shutil.copy(TAR_BACKUP_META, TAR_BACKUP_META + '.bak')
- shutil.copy(KP_TAR_BACKUP_META, KP_TAR_BACKUP_META + '.bak')
- except:
- backupOptFile.close()
- logger.error('Error while parsing backup options file: %s. Try to remove [%s] directory and start backup again '
- '(full backup will be performed)', BACKUP_OPT_FILE, BACKUP_OPT_DIR)
- sendMail("Backup failed! Error while parsing backup options file!")
- raise BackupFailedException("Error while parsing backup options file!")
- if not incremental:
- currentBackupLevel = 0
- backupTag = now.strftime(DATE_PATTERN)
- backupName = '%s_full' % backupTag
- shutil.rmtree(BACKUP_OPT_DIR, True)
- os.mkdir(BACKUP_OPT_DIR)
- else:
- backupName = '%s_level_%s' % (backupTag, currentBackupLevel)
- logger.debug("Backup level: %s", currentBackupLevel)
- logger.debug("Backup tag(timestamp of the last full backup): %s", backupTag)
- logger.debug("Back up file name: %s", backupName)
- localBackupPath = backup_prop.local_backup_path+'/'+backupTag
- try:
- os.makedirs(localBackupPath)
- except:
- pass
- logger.debug("Starting backup process")
- logger.debug("Starting backup cas.synesis.ru files")
- logFile = open(LOG_DIR + '/tar_mysql.log', 'w')
- try:
- out = subprocess.check_call(
- ['./backup_script.sh', backup_prop.file_store_path, localBackupPath, backupName, TAR_BACKUP_META],
- stdout=logFile, stderr=logFile)
- except CalledProcessError as cpe:
- logHandler.stream.seek(0, 2)
- logger.error('Backup script process exit with error: %s.' % cpe)
- sendMail("Backup failed! Error while creating tar backup!")
- raise BackupFailedException('Error while creating tar backup!')
- finally:
- logFile.close()
- logHandler.stream.seek(0, 2)
- logger.debug("Starting backup kipod-proxy files")
- kp_logFile = open(LOG_DIR + '/kp_tar_mysql.log', 'w')
- try:
- out = subprocess.check_call(
- ['./kp_backup_script.sh', backup_prop.kp_file_store_path, localBackupPath, backupName, KP_TAR_BACKUP_META],
- stdout=kp_logFile, stderr=kp_logFile)
- except CalledProcessError as cpe:
- logHandler.stream.seek(0, 2)
- logger.error('Backup script process exit with error: %s.' % cpe)
- sendMail("Backup failed! Error while creating tar backup!")
- raise BackupFailedException('Error while creating tar backup!')
- finally:
- kp_logFile.close()
- logHandler.stream.seek(0, 2)
- logger.debug("Removing old backups if any")
- backupList = os.listdir(backup_prop.local_backup_path)
- # soring reverse so the old backups in the end of the list
- backupList.sort(reverse=True)
- if len(backupList) > 2:
- logger.debug("There are more than 2 backups, removing old ones")
- try:
- for i in range(2, len(backupList)):
- logger.debug("Removing backup - " + backupList[i])
- shutil.rmtree(backup_prop.local_backup_path+'/'+backupList[i])
- except:
- logger.error("Backup completed but failed to remove old bakup files!")
- sendMail("Backup completed but failed to remove old bakup files!")
- raise BackupFailedException("Backup completed but failed to remove old bakup files!")
- logger.debug("Backup process finished")
- ################# Calculating backup size #########################
- logger.debug("Calculating backup size")
- backupDir = '%s/%s/' % (localBackupPath, backupName)
- totalSize = 0
- for f in os.listdir(backupDir):
- size = os.path.getsize(backupDir + f)
- totalSize += size
- logger.debug('Total backup size: %s bytes', totalSize)
- ################ Connecting to ftp #######################
- if backup_prop.is_ftp_upload:
- uploadToFtp(totalSize,backupDir,backupTag,backupName)
- # Refreshing last backup level value
- backupOptFile = open(BACKUP_OPT_FILE, 'w')
- backupOptFile.writelines([str(currentBackupLevel) + "\n", backupTag])
- backupOptFile.close()
- logger.debug("Backup completed!")
- #sendMail("Backup completed!")
- except Exception as e:
- logger.error('Backup failed: %s'%e)
- if not emailSent:
- sendMail("Backup failed!")
- if os.path.isfile(TAR_BACKUP_META + '.bak'):
- shutil.copy(TAR_BACKUP_META + '.bak', TAR_BACKUP_META)
- if os.path.isfile(KP_TAR_BACKUP_META + '.bak'):
- shutil.copy(KP_TAR_BACKUP_META + '.bak', KP_TAR_BACKUP_META)
- finally:
- logger.debug("Cleaning temp files")
- removeFiles((TAR_BACKUP_META + '.bak','stderr','stdout',BACKUP_INPROGRESS,'fulldb.sql',KP_TAR_BACKUP_META + '.bak',
- 'kp_fulldb.sql'))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement