Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/python
- import os
- import shutil
- import subprocess
- import sys
- import urllib
- import wget
- from bs4 import BeautifulSoup
- def add_http_prefix(src):
- if "http:" not in src:
- return "http:" + src
- return src
- def clear_folder(folder):
- try:
- shutil.rmtree(folder)
- except:
- pass
- os.mkdir(folder)
- if len(sys.argv) != 2:
- print("Expecting URL as CLI parameter.")
- exit(-1)
- folder = "/tmp/images/"
- url = sys.argv[1]
- req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
- html = urllib.request.urlopen(req).read()
- soup = BeautifulSoup(html, features='lxml')
- thread = soup.findAll("div", {"class": "thread"})[0]
- image_list = thread.findAll("a", {"class": "fileThumb"})
- print("Found {} images, downloading them to: {}".format(len(image_list), folder))
- clear_folder(folder)
- for img in image_list:
- src = add_http_prefix(img['href'])
- wget.download(src, out=folder)
- bashCommand = "sxiv -t -a {}".format(folder)
- process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
- output, error = process.communicate()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement