Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import requests
- import os
- from multiprocessing.pool import Pool
- root_directory = 'C:\\Projects\\Python\\Json\\rarity\\'
- def download_shit(url):
- name = url.split('/', 8)[8]
- f = open(root_directory + name, "wb")
- f.write(requests.get(url).content)
- f.close()
- def check_path(name):
- if not os.path.exists(name):
- os.makedirs(name)
- check_path('rarity')
- if __name__ == "__main__":
- for i in range(1, 350):
- print('getting images for page: ', i)
- data = requests.get('https://derpibooru.org/search.json?q=rarity&page=' + str(i), verify=False).json()
- images = []
- for j in range(0, len(data['search'])):
- images.append("http://" + (data['search'][j]['image'][2:]))
- print('downloading images for page: ', i)
- with Pool(15) as p:
- p.map(download_shit, images)
Advertisement
Add Comment
Please, Sign In to add comment