Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import sys, StringIO
- import argparse
- from boto.s3.connection import S3Connection
- def read_in_chunks(file_object, chunk_size=1024):
- """Lazy function (generator) to read a file piece by piece.
- Default chunk size: 1k."""
- while True:
- data = file_object.read(chunk_size)
- if not data:
- break
- yield data
- def upload(bucket_name, key_name):
- conn = S3Connection()
- b = conn.create_bucket(bucket_name)
- mp = b.initiate_multipart_upload(key_name)
- for index,piece in enumerate(read_in_chunks(sys.stdin,5242880), start=1):
- fp = StringIO.StringIO(piece)
- mp.upload_part_from_file(fp, index)
- mp.complete_upload()
- def main():
- parser = argparse.ArgumentParser(description='Process streams to s3')
- parser.add_argument('bucket_name',help='bucket to upload to')
- parser.add_argument('key_name',help='key to upload to')
- args = parser.parse_args()
- return args
- if __name__ == "__main__":
- args = main()
- upload(args.bucket_name,args.key_name)
Add Comment
Please, Sign In to add comment