Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/env python3
- import argparse, bs4, os, requests
- parser = argparse.ArgumentParser()
- parser.add_argument("tag", help="Tag of danbooru gallery")
- parser.add_argument("extra_tags", nargs="*", help="Optional other tags")
- parser.add_argument("-d", help="Directory to download to")
- args = parser.parse_args()
- if args.d:
- if not os.path.exists(args.d):
- os.makedirs(args.d)
- os.chdir(args.d)
- base_url = "http://danbooru.donmai.us?tags=" + args.tag
- if args.extra_tags:
- for tag in args.extra_tags:
- base_url = base_url + "+" + tag
- html = requests.get(base_url).text
- soup = bs4.BeautifulSoup(html, "html.parser")
- list_of_pages = []
- for x in soup.find_all("li"):
- try:
- x = int(x.string)
- list_of_pages.append(x)
- except:
- pass
- max_page = max(list_of_pages) + 1
- for page_number in range(1, max_page):
- url = base_url + "&page=" + str(page_number)
- html = requests.get(url).text
- soup = bs4.BeautifulSoup(html, "html.parser")
- for image in soup.find_all("article"):
- link = image.get("data-file-url")
- filename = image.get("data-md5") + "." + image.get("data-file-ext")
- if not os.path.exists(filename):
- raw_data = requests.get(link)
- with open(filename, "wb") as anime:
- anime.write(raw_data.content)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement