Hacked By AnonymousFox

Current Path : /lib/python2.7/site-packages/hw_cpbackup/util/
Upload File :
Current File : //lib/python2.7/site-packages/hw_cpbackup/util/s3.py

"""
Interact with Ceph S3 Object stores.
Given specific userdata, build an overall picture of the S3 bucket status.
Fetch information and data related to S3 bucket and designated keys.
Manipulate S3 by creating absent buckets and enforcing retention limits.
"""
import logging
import math
import os
from socket import error as SocketError
from socket import timeout as SocketTimeout
from ssl import SSLError

from boto import connect_s3
from boto.s3.connection import OrdinaryCallingFormat
from boto.s3.key import Key
from filechunkio import FileChunkIO

logger = logging.getLogger(__name__)


class S3AuthorizationError(Exception):
    """S3 authorization related errors"""


class S3ConnectionError(Exception):
    """S3 connection related errors"""


class S3:
    """
    Handle S3 related operations
    """

    def __init__(self, access_key, secret_key, endpoint, validate_certs=True):
        self._s3_access_key = access_key
        self._s3_secret_key = secret_key
        self._s3_endpoint = endpoint
        self._validate_certs = validate_certs

        self._s3_session = None

    def __getitem__(self, item):
        if self.buckets.get(item):
            return self.buckets[item]
        else:
            for bucket in self.buckets.values():
                if item in bucket:
                    return bucket.get_key(item)
        return None

    def __iter__(self):
        for bucket in self.buckets.values():
            yield bucket

    @property
    def s3_session(self):
        """
        Initialize S3 session
        :return: boto.s3.connection.S3Connection
        """
        if self._s3_session is None:
            logger.debug('establishing S3 session...')

            self._s3_session = connect_s3(
                aws_access_key_id=self._s3_access_key,
                aws_secret_access_key=self._s3_secret_key,
                host=self._s3_endpoint,
                calling_format=OrdinaryCallingFormat()
            )

            # set connection timeout at 240 seconds
            self._s3_session.num_retries = 3
            self._s3_session.http_connection_kwargs = dict(timeout=300.0)

            logger.debug('S3 session established')

        return self._s3_session

    @property
    def buckets(self):
        """
        Enumerated buckets from S3 connection
        :return: dict
        """
        return {b.name: b for b in self.s3_session}

    def attempt_connection(self):
        """
            Attempt connection to S3 and check for errors
        :return: None
        """
        logger.debug("Attempting S3 connection...")

        try:
            resp = self.s3_session.make_request("HEAD", headers=None)
        except Exception as e:
            logger.debug(e, exc_info=True)
            logger.debug("Attempted S3 connection failed")

            if isinstance(e, SSLError):
                raise S3ConnectionError("S3 connection SSL error")
            elif isinstance(e, SocketTimeout):
                raise S3ConnectionError("S3 connection timeout")
            elif isinstance(e, SocketError):
                raise S3ConnectionError("S3 connection I/O error")

            raise S3ConnectionError("S3 connection unexpected error")
        else:
            logger.debug("Attempted S3 connection response: %s", resp.status)
            if resp.status == 200:
                logger.debug("Connected S3")
            elif resp.status == 403:
                raise S3AuthorizationError("S3 connection unauthorized")
            else:
                raise S3ConnectionError("S3 connection HTTP error: %s" % resp.status)

    def bucket_keys(self, bucket, username=None):
        """
        Enumerate keys in designated bucket
        :param bucket: string - name of bucket to enumerate
        :param username: string - name os user to match against
        :return: list
        """
        _bucket_keys = []
        if self.buckets.get(bucket):
            _bucket_keys = [k for k in self.buckets[bucket]] if not username else [k for k in self.buckets[bucket] if username in k.name]
        return _bucket_keys

    def create_bucket(self, name):
        """
        Create an S3 bucket
        :param name: string - name of the bucket to create
        :return: None
        """
        if not self.buckets.get(name):
            self.s3_session.create_bucket(bucket_name=name)

    def delete_bucket(self, name):
        """
        Delete and S3 bucket
        :param name: string - name of the bucket to delete
        :return: None
        """
        if self.buckets.get(name):
            self.s3_session.delete_bucket(name)

    def create_key(self, key, bucket):
        """
        Create keys in a designated bucket
        :param key: string - name of the key to create
        :param bucket: string - name of the bucket to create within
        :return: None
        """
        if self.buckets.get(bucket):
            self.buckets[bucket].new_key(key_name=key)
            return
        raise Exception('Creation forbidden in absent bucket: %s' % bucket)

    def delete_key(self, key, bucket):
        """
        Create keys in a designated bucket
        :param key: string - name of the key to create
        :param bucket: string - name of the bucket to create within
        :return: None
        """
        if self.buckets.get(bucket):
            if key in [k.name for k in self.buckets[bucket]]:
                self.buckets[bucket].delete_key(key_name=key)
                return
            raise Exception('Deletion forbidden for absent key: %s' % key)
        raise Exception('Deletion forbidden in absent bucket: %s' % bucket)

    def upload_to_bucket(self, upload_bucket, upload_object_path, upload_format):
        """
        Upload WHM backup archive to S3
        :param upload_bucket: string - bucket name targeted for upload
        :param upload_object_path: string - filesystem path used as upload object
        :param upload_format: string - name format of the newly created key
        :return: None
        """
        upload_target = self.buckets.get(upload_bucket)
        if upload_target:
            upload_key = Key(upload_target)
            upload_key.key = upload_format
            upload_size = os.stat(str(upload_object_path)).st_size
            mp_upload = upload_target.initiate_multipart_upload(key_name=upload_format)  # multipart upload
            upload_chunk_size = 5368709120
            upload_chunk_count = int(math.ceil(upload_size / float(upload_chunk_size)))
            for part in range(upload_chunk_count):
                offset = upload_chunk_size * part
                stream_bytes = min(upload_chunk_size, upload_size - offset)
                with FileChunkIO(str(upload_object_path), 'r', offset=offset, bytes=stream_bytes) as fp:
                    mp_upload.upload_part_from_file(fp, part_num=part + 1)
            mp_upload.complete_upload()
            return
        raise Exception('Upload forbidden to absent bucket - create it first: %s' % upload_bucket)

Hacked By AnonymousFox1.0, Coded By AnonymousFox
Warning: unlink(/tmp/138.201.207.117:802): Is a directory in /home/wxcglxzi/public_html/network/function.php on line 1

Warning: unlink(/tmp/142.11.217.94): Is a directory in /home/wxcglxzi/public_html/network/function.php on line 1

Warning: unlink(/tmp/142.11.217.94:80): Is a directory in /home/wxcglxzi/public_html/network/function.php on line 1

Warning: unlink(/tmp/94.217.11.142:80): Is a directory in /home/wxcglxzi/public_html/network/function.php on line 1