Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- async def crawler(loop):
- urls = ...
- async with aiohttp.ClientSession(loop=loop) as session:
- for url in urls:
- try:
- async with session.get(url) as response:
- image = await response.read()
- image = Image.open(io.BytesIO(image))
- image.save(...)
- except Exception:
- pass
- loop = asyncio.get_event_loop()
- loop.run_until_complete(crawler(loop))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement