Advertisement
dereksir

Untitled

Nov 23rd, 2023 (edited)
91
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.16 KB | None | 0 0
  1. import aiohttp
  2. import asyncio
  3. import time
  4.  
  5. async def fetch_page(session, url):
  6.     # Make GET request using session
  7.     async with session.get(url) as response:
  8.         # Return HTML content
  9.         return await response.text()
  10.  
  11. async def main():
  12.     # Initialize a list of URLs
  13.     urls = ["https://scrapeme.live/shop/", "https://scrapeme.live/shop/page/2/", "https://scrapeme.live/shop/page/3/"]
  14.  
  15.     # Time Tracking: Start Time
  16.     start_time = time.time()
  17.  
  18.     # Create an AIOHTTP session
  19.     async with aiohttp.ClientSession() as session:
  20.  
  21.         # Initialize tasks list
  22.         tasks = []
  23.  
  24.         # Loop through URLs and append tasks
  25.         for url in urls:
  26.             tasks.append(fetch_page(session, url))
  27.  
  28.         # Group and Execute tasks concurrently
  29.         htmls = await asyncio.gather(*tasks)
  30.  
  31.     # Time Tracking: End Time
  32.     end_time = time.time()
  33.  
  34.     # Print or process the fetched HTML content
  35.     for url, html in zip(urls, htmls):
  36.         print(f"Content from {url}:\n{html}\n")
  37.  
  38.     # Calculate and print the time taken
  39.     print(f"Time taken: {end_time - start_time} seconds")
  40.  
  41. # Run the main function
  42. asyncio.run(main())
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement