Advertisement
Sweetening

Auto Dorking Bypassing Sorry Filter

Apr 24th, 2024
91
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.57 KB | None | 0 0
  1. #!/usr/bin/env python3
  2.  
  3. import os
  4. import time
  5. import random
  6. import argparse
  7. from concurrent.futures import ThreadPoolExecutor, as_completed
  8. from bs4 import BeautifulSoup
  9. import requests
  10. from termcolor import colored
  11.  
  12.  
  13. def get_proxies():
  14. proxies = []
  15. if not os.path.exists("proxies.txt"):
  16. url = "https://api.proxyscrape.com/v2/?request=getproxies&protocol=http&timeout=10000&country=all&ssl=all&anonymity=all&limit=5000"
  17. proxies = requests.get(url).text.split("\n")
  18. with open("proxies.txt", "w") as f:
  19. f.write("\n".join(proxies))
  20. else:
  21. with open("proxies.txt", "r") as f:
  22. proxies = f.read().split("\n")
  23. return proxies
  24.  
  25.  
  26. def test_proxy(proxy, user_agent, verbose):
  27. test_url = "https://bing.com"
  28. headers = {"User-Agent": user_agent}
  29. try:
  30. proxies = {"http": f"http://{proxy}", "https": f"http://{proxy}"}
  31. response = requests.get(test_url, headers=headers, proxies=proxies, timeout=3)
  32. print(colored(f"Scraping good proxies...","blue"))
  33. if response.status_code == 200:
  34. print(colored(f"Good proxy found: {proxy}", "green"))
  35. return True
  36. except requests.exceptions.ConnectTimeout:
  37. if verbose:
  38. print(colored(f"Connection timeout for proxy: {proxy}", "red"))
  39. except requests.exceptions.ProxyError:
  40. if verbose:
  41. print(colored(f"Proxy error for proxy: {proxy}", "red"))
  42. except requests.exceptions.RequestException as e:
  43. if verbose:
  44. print(colored(f"Request exception for proxy: {proxy}, error: {e}", "red"))
  45. return False
  46.  
  47.  
  48. def filter_working_proxies(proxies, user_agents, verbose):
  49. working_proxies = []
  50. user_agent = random.choice(user_agents)
  51. with ThreadPoolExecutor(max_workers=50) as executor:
  52. futures_to_proxies = {executor.submit(test_proxy, proxy, user_agent, verbose): proxy for proxy in proxies}
  53. for future in as_completed(futures_to_proxies):
  54. if future.result():
  55. working_proxies.append(futures_to_proxies[future])
  56. return working_proxies
  57.  
  58.  
  59. def get_user_agents():
  60. with open("useragents.txt", "r") as f:
  61. return f.read().split("\n")
  62.  
  63.  
  64. def google_search(query, user_agent, proxy):
  65. url = f"https://www.google.com/search?q={query}"
  66. headers = {"User-Agent": user_agent}
  67. proxies = {"http": f"http://{proxy}", "https": f"http://{proxy}"}
  68. response = requests.get(url, headers=headers, proxies=proxies, timeout=10)
  69. soup = BeautifulSoup(response.text, "html.parser")
  70. return [result["href"] for result in soup.select(".yuRUbf a")]
  71.  
  72. def search_dork(dork, proxies, user_agents, verbose, max_retries=3, backoff_factor=1.0):
  73. print(colored(f"Searching for dork: {dork}", "yellow"))
  74.  
  75. def try_search_dork(dork, proxy, user_agent):
  76. try:
  77. results = google_search(dork, user_agent, proxy)
  78. return results
  79. except requests.exceptions.RequestException as e:
  80. if verbose:
  81. print(colored(f"Error with proxy {proxy}: {e}, rotating proxy...", "magenta"))
  82. return None
  83.  
  84. retries = 0
  85. while retries <= max_retries:
  86. proxy = random.choice(proxies)
  87. user_agent = random.choice(user_agents)
  88. results = try_search_dork(dork, proxy, user_agent)
  89.  
  90. if results is not None:
  91. if results:
  92. with open(f"results/{dork}_results.txt", "w") as f:
  93. f.write("\n".join(results[:20]))
  94. print(colored(f"Saved top 20 results for dork '{dork}'", "green"))
  95. else:
  96. print(colored(f"No results found for dork '{dork}'", "red"))
  97. break
  98.  
  99. retries += 1
  100. time.sleep(backoff_factor * (2 ** (retries - 1)) + random.uniform(1, 5))
  101.  
  102.  
  103. def main():
  104. parser = argparse.ArgumentParser()
  105. parser.add_argument("-v", "--verbose", help="Display errors with proxies.", action="store_true")
  106. args = parser.parse_args()
  107.  
  108. dorks = []
  109. with open("dorks.txt", "r") as f:
  110. dorks = f.read().split("\n")
  111.  
  112. user_agents = get_user_agents()
  113. proxies = filter_working_proxies(get_proxies(), user_agents, args.verbose)
  114.  
  115. if not os.path.exists("results"):
  116. os.makedirs("results")
  117.  
  118. with ThreadPoolExecutor(max_workers=20) as executor:
  119. futures = {executor.submit(search_dork, dork, proxies, user_agents, args.verbose): dork for dork in dorks}
  120. for future in as_completed(futures):
  121. future.result()
  122.  
  123. if __name__ == "__main__":
  124. main()
  125.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement