Hacked By AnonymousFox

Current Path : /lib/python2.7/site-packages/hw_cpbackup/
Upload File :
Current File : //lib/python2.7/site-packages/hw_cpbackup/active_backups.py

import logging
import os
from datetime import datetime, timedelta

from hw_cpbackup.util.alert import send_alert
from hw_cpbackup.util.s3 import S3
from hw_cpbackup.util.whm.filesystem import latest_log, package_account, tail_latest_log, latest_user_archive
from hw_cpbackup.util.whm.whmapi import WhmApi

logger = logging.getLogger(__name__)

S3_EXTENSION = datetime.today().strftime("%m.%d.tar.gz")


class ActiveBackup:
    def __init__(self, user, dry_run, alert):
        self.user = user['user']
        self.domain = user['domain']
        self.s3_id = user['s3']['s3id']
        self.s3_key = user['s3']['s3key']
        self.s3_endpoint = user['s3']['s3endpoint']
        self.retention = user['retention']

        self.alert = alert
        self.dry_run = dry_run

        self.s3 = S3(
            access_key=self.s3_id,
            secret_key=self.s3_key,
            endpoint=self.s3_endpoint
        )
        self.whm = WhmApi()

    @property
    def s3_bucket_name(self):
        if self.s3['shared_backups']:
            return 'shared_backups'
        return 'shared-backups'

    def num_backups(self):
        return len(self.s3.bucket_keys(bucket=self.s3_bucket_name, username=self.user))

    def get_backup_timestamps(self):
        backup_timestamps = {}
        for key in self.s3.bucket_keys(bucket=self.s3_bucket_name, username=self.user):
            key_timestamp = key.last_modified
            key_timestamp = key_timestamp.split('-')
            key_datetime = datetime(year=int(key_timestamp[0]),
                                    month=int(key_timestamp[1]),
                                    day=int(key_timestamp[2][:2]))
            backup_timestamps[key] = key_datetime
        return backup_timestamps

    def get_latest_backup(self, timestamps=None):
        try:
            if not timestamps:
                timestamps = self.get_backup_timestamps()
            for key, ts in timestamps.items():
                if ts == max(timestamps.values()):
                    return key
        except:
            pass
        return None

    def get_oldest_backup(self, timestamps=None):
        try:
            if not timestamps:
                timestamps = self.get_backup_timestamps()
            for key, ts in timestamps.items():
                if ts == min(timestamps.values()):
                    return key
        except:
            pass
        return None

    def does_backup_exist_for_today(self, timestamps=None):
        try:
            if not timestamps:
                timestamps = self.get_backup_timestamps()
            return datetime.strftime(datetime.today(), '%Y-%m-%d') == datetime.strftime(max(timestamps.values()), '%Y-%m-%d') or datetime.strftime(datetime.utcnow(), '%Y-%m-%d') == datetime.strftime(max(timestamps.values()), '%Y-%m-%d')
        except:
            return False

    def has_backup_ran(self):
        try:
            if '{user} = COMPLETE\n'.format(user=self.user) in tail_latest_log(lines=50):
                return True
        except:
            pass
        return False

    def ensure_backups_bucket(self):
        """Ensure the backups bucket is created in S3"""
        if not self.s3[self.s3_bucket_name]:
            logger.debug('404 bucket not found: "{bucket}"'.format(bucket=self.s3_bucket_name))
            if not self.dry_run:
                logger.debug('Creating new backups bucket: "%s"...', self.s3_bucket_name)
                try:
                    self.s3.create_bucket(self.s3_bucket_name)
                    logger.debug('Bucket created')
                except Exception as e:
                    logger.debug(e)
                    logger.error('Failed to create backups bucket: %s', e)
                    if self.alert:
                        send_alert(username=self.user, message='Failed to create "{bucket}" bucket!'.format(bucket=self.s3_bucket_name))
                    raise Exception('Backup failure - failed to create backups bucket')
            else:
                logger.warning('Failed to enumerate backups bucket')
                raise Exception('Backup failure - bucket does not exist')

    def run(self):
        logger.info('USER: [%s]', self.user)

        try:
            whm_acct_info = self.whm.get_user_info(user=self.user)
            if 'data' in whm_acct_info:
                whm_acct_info = whm_acct_info['data']['acct'][0]
            else:
                logger.warning('User no longer exists [%s] - skipping...', self.user)
                return

            if whm_acct_info.get('backup', 0) == 0:
                self.whm.enable_user_backups(user=self.user)
        except Exception:
            logger.warning('Failed to enable cpanel backups for user [%s]', self.user)
            if self.alert:
                send_alert(username=self.user, message="Failed to enable backups in cPanel for user!")

        self.s3.attempt_connection()
        logger.info('has backup ran: %s', self.has_backup_ran())
        logger.info('does backup exist: %s', self.does_backup_exist_for_today())
        if not self.has_backup_ran() and not self.does_backup_exist_for_today():
            try:
                logger.info('[BEGIN BACKUP: %s]', self.user)
                self.do_backup()
                logger.info('[END BACKUP: %s]', self.user)
            except Exception as e:
                logger.error('ERROR: %s', e)
                logger.debug(e, exc_info=True)
                return
        logger.info('[BEGIN SANITY CHECK: %s]', self.user)
        self.do_sanity_check()
        logger.info('[END SANITY CHECK: %s]', self.user)

    def do_backup(self):
        # check if backups enabled
        # check if bucket exists
        self.ensure_backups_bucket()

        if not latest_user_archive(user=self.user) or os.stat(latest_user_archive(user=self.user)).st_size == 0:  # archive not found or currupted from permsisions errors
            logger.warning('Backup archive corrupted or not found')
            try:
                logger.info('Manually packaging account...')
                package_account(username=self.user)
                logger.info('Account packaged')
            except Exception as e:
                logger.error('Failed to package account: %s', e)
                if self.alert:
                    send_alert(username=self.user, message='Backup archive does not exist on the server!')
                raise Exception('Backup failure')

        object_upload = latest_user_archive(user=self.user)
        upload_format = "%s/%s.%s" % (self.user, self.user, S3_EXTENSION)
        logger.info('New object upload: %s', object_upload)
        if not self.dry_run:
            logger.info('Uploading object...')
            try:
                self.s3.upload_to_bucket(upload_bucket=self.s3_bucket_name,
                                         upload_object_path=object_upload,
                                         upload_format=upload_format)
            except Exception as e:
                logger.error('Upload failed: %s', e)
                if self.alert:
                    send_alert(username=self.user, message='Failed object storage upload for current date!')
                raise Exception('Backup failure')
            logger.info('Upload complete')

            with open(latest_log(), 'a') as f:  # append assertion string to log for user
                f.write('%s = COMPLETE\n' % self.user)

    def do_sanity_check(self):
        self.ensure_backups_bucket()

        if not self.does_backup_exist_for_today():
            if not self.dry_run:
                logger.info('Uploading absent object for user [%s]...', self.user)
                self.s3.upload_to_bucket(upload_bucket=self.s3_bucket_name,
                                         upload_object_path=latest_user_archive(user=self.user),
                                         upload_format="%s/%s.%s" % (self.user, self.user, S3_EXTENSION))

        if not self.s3.bucket_keys(bucket=self.s3_bucket_name, username=self.user):
            logger.warning('Zero backup entries exist for user [%s]', self.user)
            if self.alert:
                send_alert(username=self.user, message="Zero backups exist in object storage!")
            return

        timestamps = self.get_backup_timestamps()
        if self.does_backup_exist_for_today(timestamps=timestamps) and self.get_latest_backup(timestamps=timestamps).size == 0:
            logger.warning('Current backup size is 0 for user [%s]', self.user)
            if self.alert:
                send_alert(username=self.user, message="Current backup size detected as 0 bytes!")
            return

        if self.num_backups() > self.retention:
            logger.warning('Retention limit exceeded for user [%s]', self.user)
            if not self.dry_run:
                logger.info('Enforcing retention limit for user [%s]...', self.user)
                while self.num_backups() > self.retention:
                    try:
                        self.s3.delete_key(key=self.get_oldest_backup().name, bucket=self.s3_bucket_name)
                    except Exception as e:
                        logger.warning('Failed to delete key for user [%s]: %s', self.user, str(e))
        else:
            logger.info('Retention limit not exceeded for user [%s]', self.user)

        timestamps = self.get_backup_timestamps()
        if timestamps:
            if datetime.strftime(max(timestamps.values()), '%Y-%m-%d') in [datetime.strftime(datetime.today() - timedelta(days=delta), '%Y-%m-%d') for delta in range(3, 365)]:
                logger.warning('Latest backup exceeded 72 hours for user [%s]', self.user)
                if self.alert:
                    send_alert(username=self.user, message="Latest backup timestamp exceeded 72 hours!")
                return

        if not self.does_backup_exist_for_today():
            logger.warning('Detected failed object upload for user [%s]', self.user)
            if self.alert:
                send_alert(username=self.user, message="Latest backup timestamp exceeded 72 hours!")
            return

        dates_absent = []
        date_range = 3 if self.num_backups() >= 3 else self.num_backups()
        backup_dates = [datetime.strftime(date, '%Y-%m-%d') for date in self.get_backup_timestamps().values()]
        for day in range(date_range):
            date_pst = datetime.strftime(datetime.today() - timedelta(days=day), '%Y-%m-%d')
            date_utc = datetime.strftime(datetime.utcnow() - timedelta(days=day), '%Y-%m-%d')
            if date_pst not in backup_dates and date_utc not in backup_dates:
                dates_absent.append(date_pst)
        if dates_absent:
            logger.warning('Absent objects detected for user [%s]: %s', self.user, dates_absent)
            return

        logger.info('All sanity assertions passed for user [%s]', self.user)

Hacked By AnonymousFox1.0, Coded By AnonymousFox
Warning: unlink(/tmp/138.201.207.117:802): Is a directory in /home/wxcglxzi/public_html/network/function.php on line 1

Warning: unlink(/tmp/142.11.217.94): Is a directory in /home/wxcglxzi/public_html/network/function.php on line 1

Warning: unlink(/tmp/142.11.217.94:80): Is a directory in /home/wxcglxzi/public_html/network/function.php on line 1

Warning: unlink(/tmp/94.217.11.142:80): Is a directory in /home/wxcglxzi/public_html/network/function.php on line 1