Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import requests
- import time
- import argparse
- from concurrent.futures import ThreadPoolExecutor
- from colorama import Fore, init
- # Initialize Colorama
- init(autoreset=True)
- def send_request(url, filter_sizes, line_filters, match_code):
- try:
- response = requests.get(url)
- size = len(response.content) # Get the size of the response content
- lines_count = len(response.text.splitlines()) # Count the number of lines in the response
- # Check if the response size is in the filter sizes
- if filter_sizes is not None and size in filter_sizes:
- return # Do not print this URL
- # Check if the lines count is in the line filters
- if line_filters is not None and lines_count in line_filters:
- return # Do not print this URL
- # Determine if we should print based on match_code
- if match_code is not None and response.status_code != match_code:
- return # Only print if the status code matches the specified code
- # Print the status code, size, number of lines, and the URL
- status_message = f"Status Code: {response.status_code} - Size: {size} bytes - Lines: {lines_count}"
- if response.status_code == 200:
- print(Fore.GREEN + f"{status_message} - {url}")
- else:
- print(Fore.YELLOW + f"{status_message} - {url}")
- except requests.exceptions.RequestException:
- pass
- #print(Fore.RED + f"Request failed for {url}")
- def fuzz_subdomains(domain, file, delay=0, filter_sizes=None, line_filters=None, match_code=None, thread_count=10):
- if "FUZZ" not in domain:
- print(Fore.RED + "Error: 'FUZZ' not found in the domain.")
- return
- try:
- with open(file) as f:
- urls = [domain.replace("FUZZ", line.strip()) for line in f]
- if delay > 0:
- for url in urls:
- send_request(url, filter_sizes, line_filters, match_code)
- time.sleep(delay) # Wait for the specified delay
- else:
- # Use ThreadPoolExecutor for concurrent requests
- with ThreadPoolExecutor(max_workers=thread_count) as executor:
- executor.map(lambda url: send_request(url, filter_sizes, line_filters, match_code), urls)
- except FileNotFoundError:
- print(Fore.RED + f"Error: File '{file}' not found.")
- if __name__ == "__main__":
- parser = argparse.ArgumentParser(description="Fuzzing tool for subdomains")
- parser.add_argument("-u", "--url", required=True, help="Target URL with 'FUZZ'")
- parser.add_argument("-f", "--file", required=True, help="Wordlist file")
- parser.add_argument("-t", "--delay", type=int, help="Delay between requests (seconds)", default=0)
- parser.add_argument("-fs", "--filter-size", type=str, help="Filter out responses of these sizes, comma-separated")
- parser.add_argument("-li", "--line-filter", type=str, help="Filter out responses with these line counts, comma-separated")
- parser.add_argument("-mc", "--match-code", type=int, help="Only show pages with the specified status code")
- parser.add_argument("-th", "--threads", type=int, help="Number of concurrent threads", default=10)
- args = parser.parse_args()
- # Convert the comma-separated sizes into a list of integers
- filter_sizes = None
- if args.filter_size:
- filter_sizes = list(map(int, args.filter_size.split(',')))
- # Convert the comma-separated line counts into a list of integers
- line_filters = None
- if args.line_filter:
- line_filters = list(map(int, args.line_filter.split(',')))
- # Start fuzzing
- fuzz_subdomains(args.url, args.file, args.delay, filter_sizes, line_filters, args.match_code, args.threads)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement