Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import time
- import asyncio
- import aiohttp
- import json
- from datetime import datetime
- async def fetch_url(url):
- async with aiohttp.ClientSession() as session:
- async with session.get(url['url']) as resp:
- content = await resp.text()
- print('{}: {}'.format(url['title'], resp.status))
- # return {'title': url['title'], 'status': resp.status, 'content': content}
- def chunks(l, n):
- """Yield successive n-sized chunks from l."""
- for i in range(0, len(l), n):
- yield l[i:i + n]
- if __name__ == '__main__':
- loop = asyncio.get_event_loop()
- with open('urls.json', 'r') as f:
- urls = json.loads(f.read())
- start = int(time.time())
- for chunked_url in chunks(urls, 5):
- tasks = asyncio.gather(*[fetch_url(url) for url in chunked_url])
- for i in loop.run_until_complete(tasks):
- pass
- end = int(time.time())
- print('took time: {} seconds'.format(int(end - start)))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement