Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import hashlib
- from io import DEFAULT_BUFFER_SIZE
- def hash_md5_sha(file):
- md5 = hashlib.md5()
- sha = hashlib.sha1()
- with open(file, mode='rb') as fl:
- chunk = fl.read(DEFAULT_BUFFER_SIZE)
- while chunk:
- md5.update(chunk)
- sha.update(chunk)
- chunk = fl.read(DEFAULT_BUFFER_SIZE)
- return md5.hexdigest(), sha.hexdigest()
- def read_file(file):
- with open(file, mode='rb') as fl:
- chunk = fl.read(DEFAULT_BUFFER_SIZE)
- while chunk:
- chunk = fl.read(DEFAULT_BUFFER_SIZE)
- return
- files = ['sample{}'.format(i) for i in range(1, 41)] * 8
- from timeit import timeit
- def time_single_core(func):
- total = 0
- for filename in files:
- total += timeit(stmt='{}(\'{}\')'.format(func, filename), globals=globals(), number = 1)
- print ('Total using {}: {}'.format(func, total))
- from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
- time_single_core('read_file')
- time_single_core('hash_md5_sha')
- def time_multi_thread(func):
- def job(func):
- executor = ThreadPoolExecutor(4)
- executor.map(func, files)
- executor.shutdown()
- globs = globals()
- globs['job'] = job
- total = timeit(stmt = "job({})".format(func), globals=globals(), number = 1)
- print ('Total (multi-thread) using {}: {}'.format(func, total))
- time_multi_thread('read_file')
- time_multi_thread('hash_md5_sha')
- def time_multi_core(func):
- def job(func):
- executor = ProcessPoolExecutor(4)
- executor.map(func, files)
- executor.shutdown()
- globs = globals()
- globs['job'] = job
- total = timeit(stmt = "job({})".format(func), globals=globs, number = 1)
- print ('Total (multi-core) using {}: {}'.format(func, total))
- time_multi_core('read_file')
- time_multi_core('hash_md5_sha')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement