Advertisement
Thrist00w

q3_solution

May 2nd, 2025 (edited)
148
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.11 KB | None | 0 0
  1. import requests
  2. from bs4 import BeautifulSoup
  3. import os
  4. import json
  5.  
  6. url = 'https://en.wikipedia.org/wiki/ASEAN'
  7. filename = 'urban_areas.json'
  8.  
  9. response = requests.get(url)
  10. soup = BeautifulSoup(response.text, 'html.parser')
  11.  
  12. table = soup.find('table', class_='sortable wikitable plainrowheaders')
  13.  
  14. countries_dictionary = {}
  15.  
  16. for row in table.find_all('tr')[1:]:
  17.     cols = row.find_all('td')
  18.     city = row.find('th').text.strip()
  19.  
  20.     population = int(cols[0].text.strip().replace(',', ''))
  21.     area = float(cols[1].text.strip().replace(',', ''))
  22.     country = cols[2].text.strip()
  23.  
  24.     city_data = {
  25.         'City': city,
  26.         'Population': population,
  27.         'Area (km²)': area,
  28.         'Population Density': round(population / area, 2)
  29.     }
  30.  
  31.     countries_dictionary.setdefault(country, []).append(city_data)
  32.  
  33. if os.path.exists(filename):
  34.     with open(filename, 'r') as file:
  35.         existing_data = json.load(file)
  36. else:
  37.     existing_data = {}
  38.  
  39. if existing_data != countries_dictionary:
  40.     with open(filename, 'w') as file:
  41.         json.dump(countries_dictionary, file, indent=1)
  42.  
  43.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement