Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/env python
- import os
- import logging
- from shutil import copyfileobj
- logging.basicConfig(level=logging.DEBUG)
- log = logging.getLogger()
- # Reduce the logging level of botocore and boto3
- logging.getLogger('botocore').setLevel(logging.WARNING)
- logging.getLogger('boto3').setLevel(logging.WARNING)
- from django.conf import settings
- settings.configure()
- import boto3
- from storages.backends.s3boto3 import S3Boto3Storage
- def mktestfile(filename):
- """Generates a ten megabyte text file with an incrementing counter."""
- desired_size = 10 * (2 ** 20)
- num_lines = desired_size // 64
- with open(filename, 'w') as f:
- for i in range(num_lines):
- # each line is 64 characters long, with leading whitespace
- # and the counter at the end. using 'sort -c' can confirm it's
- # in numeric order.
- f.write('%63d\n' % (i + 1))
- FN = 'test-file.txt'
- BUCKET = 'kx-tom-misc-test-bucket'
- if not os.path.exists(FN):
- log.info("Creating test file.")
- mktestfile(FN)
- s3_storage = S3Boto3Storage(
- bucket=BUCKET
- )
- original_size = os.stat(FN).st_size
- log.info("Size of '%s' is %d bytes. (%d megabytes)", FN, original_size, original_size // 2**20)
- dest_file = s3_storage.open(FN, 'wb')
- with open(FN, 'rb') as source_file:
- CHUNK_SIZE = 2 ** 20 # one megabyte
- log.info("Writing to %r in chunks of %d bytes (%d megabyte).", dest_file, CHUNK_SIZE, CHUNK_SIZE // 2**20)
- copyfileobj(source_file, dest_file, length=2**20)
- dest_file.close()
- log.info('Upload complete, checking size of file in S3 Bucket')
- s3_object_size = boto3.resource('s3').Bucket(BUCKET).Object(FN).content_length
- log.info("Size of 's3://%s/%s' is %d bytes. (%d megabytes)",
- BUCKET, FN, s3_object_size, s3_object_size // 2**20)
Add Comment
Please, Sign In to add comment