Guest User

Untitled

a guest
Mar 7th, 2016
81
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.86 KB | None | 0 0
  1. import requests
  2. import os
  3. from multiprocessing.pool import Pool
  4.  
  5.  
  6. root_directory = 'C:\\Projects\\Python\\Json\\rarity\\'
  7.  
  8. def download_shit(url):
  9. name = url.split('/', 8)[8]
  10. f = open(root_directory + name, "wb")
  11. f.write(requests.get(url).content)
  12. f.close()
  13.  
  14. def check_path(name):
  15. if not os.path.exists(name):
  16. os.makedirs(name)
  17.  
  18.  
  19. check_path('rarity')
  20.  
  21. if __name__ == "__main__":
  22.  
  23. for i in range(1, 350):
  24. print('getting images for page: ', i)
  25. data = requests.get('https://derpibooru.org/search.json?q=rarity&page=' + str(i), verify=False).json()
  26. images = []
  27.  
  28. for j in range(0, len(data['search'])):
  29. images.append("http://" + (data['search'][j]['image'][2:]))
  30.  
  31. print('downloading images for page: ', i)
  32. with Pool(15) as p:
  33. p.map(download_shit, images)
Advertisement
Add Comment
Please, Sign In to add comment