Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # anonfiles downloader v0.2
- import sys
- import bs4
- import urllib
- import urllib.request
- import subprocess
- def main():
- if len(sys.argv) != 2:
- print(f"{sys.argv[0]} <url>")
- sys.exit(1)
- url = sys.argv[1]
- bs = bs4.BeautifulSoup(urllib.request.urlopen(urllib.request.Request(url, headers={'User-Agent': "blacks"})).read(), features="lxml")
- urlelement = bs.find(id="download-url")
- if urlelement == None:
- print("invalid url")
- else:
- url = urlelement["href"]
- print(url)
- path = urllib.parse.urlparse(urlelement["href"]).path
- with open("temp_urls.txt", "w") as temp_urls:
- for index in range(101, 140):
- temp_urls.write(f"https://cdn-{index}.anonfiles.com{path}\t")
- subprocess.run(["aria2c", "--max-concurrent-downloads=20", "--max-connection-per-server=16", "--split=32", "--continue=true", "--input-file=temp_urls.txt"])
- if __name__ == '__main__':
- main()
Add Comment
Please, Sign In to add comment