Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import json
- import requests
- from bs4 import BeautifulSoup
- def get_file_data(file_path):
- try:
- with open(file_path, 'r', encoding="utf-8-sig") as f:
- data = json.load(f)
- return data
- except Exception as e:
- raise Exception(e).with_traceback(e.__traceback__)
- def get_site_info(site_name):
- try:
- available_site_data = get_file_data(path)["available"]
- site_data = None
- for site in available_site_data:
- if site["name"] != site_name:
- continue
- site_data = site
- return site_data
- except Exception as e:
- raise Exception(e).with_traceback(e.__traceback__)
- def get_request(site):
- r = requests.get(site)
- return r
- if __name__ == '__main__':
- path = "../config/documentation.json"
- file_data = get_file_data(path)
- site_data = get_site_info("Disnake")
- site_name = site_data["name"]
- site_link = site_data["link"]
- req = get_request(site_link)
- soup = BeautifulSoup(req.content, 'html.parser')
- print(soup)
Advertisement
Add Comment
Please, Sign In to add comment