Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import aiohttp
- import asyncio
- import time
- async def fetch_page(session, url):
- # Make GET request using session
- async with session.get(url) as response:
- # Return HTML content
- return await response.text()
- async def main():
- # Initialize a list of URLs
- urls = ["https://scrapeme.live/shop/", "https://scrapeme.live/shop/page/2/", "https://scrapeme.live/shop/page/3/"]
- # Time Tracking: Start Time
- start_time = time.time()
- # Create an AIOHTTP session
- async with aiohttp.ClientSession() as session:
- # Initialize tasks list
- tasks = []
- # Loop through URLs and append tasks
- for url in urls:
- tasks.append(fetch_page(session, url))
- # Group and Execute tasks concurrently
- htmls = await asyncio.gather(*tasks)
- # Time Tracking: End Time
- end_time = time.time()
- # Print or process the fetched HTML content
- for url, html in zip(urls, htmls):
- print(f"Content from {url}:\n{html}\n")
- # Calculate and print the time taken
- print(f"Time taken: {end_time - start_time} seconds")
- # Run the main function
- asyncio.run(main())
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement