Advertisement
MikiSoft

GitHub dangling commit bruteforcer - by softzer0

Feb 6th, 2024 (edited)
899
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 7.97 KB | Source Code | 0 0
  1. import argparse
  2. import asyncio
  3. import aiohttp
  4. from datetime import timedelta
  5. import itertools
  6. import string
  7. import time
  8. from tqdm import tqdm
  9.  
  10. DEFAULTS = {
  11.     "retries": 5,
  12.     "delay": 1,
  13.     "timeout": 15,
  14.     "max_workers": 20,
  15.     "proxy_type": "socks5"
  16. }
  17.  
  18. HEADERS = {'Accept': 'application/vnd.github.v3+json'}
  19.  
  20. async def check_url(session, progress, proxy_obj, repository_path, hex_string, retries=None, delay=None):
  21.     progress.set_description(f"Current: {hex_string}")
  22.  
  23.     url = f"https://api.github.com/repos/{repository_path}/commits/{hex_string}"
  24.  
  25.     get_proxy = lambda: proxy_obj['type'] + '://' + proxy_obj['proxies'][proxy_obj['index']] if proxies else None
  26.  
  27.     attempt = 0
  28.     while retries == -1 or attempt < retries:
  29.         try:
  30.             async with session.head(url, proxy=get_proxy()) as response:
  31.                 if response.status == 200:
  32.                     url = f"https://github.com/{repository_path}/commit/{hex_string}"
  33.                     async with session.get(url, proxy=get_proxy()) as response:
  34.                         text = await response.text()
  35.                         if "This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository." in text:
  36.                             return f"Found: {url}"
  37.                 elif response.headers.get('X-RateLimit-Remaining', '0') == '0':
  38.                     # Rotate proxy
  39.                     proxy_obj['index'] = (proxy_obj['index'] + 1) % len(proxy_obj['proxies']) if proxy_obj['proxies'] else 0
  40.                     # If we've gone through all proxies, wait until the rate limit resets
  41.                     if proxy_obj['index'] == 0:
  42.                         reset_time = int(response.headers.get('X-RateLimit-Reset', 0))
  43.                         sleep_time = int(max(reset_time - time.time(), 0))
  44.                         while sleep_time > 0:
  45.                             progress.set_description(f"Proxy and rate limit reached, sleeping for {timedelta(seconds=sleep_time)}")
  46.                             await asyncio.sleep(1)
  47.                             sleep_time -= 1
  48.                 else:
  49.                     break
  50.         except aiohttp.ClientError as e:
  51.             if delay:
  52.                 await asyncio.sleep(delay)
  53.             attempt += 1
  54.         # except:
  55.         #     return
  56.  
  57.     if attempt == retries:
  58.         return f"Failed to connect to {url} - skipping."
  59.  
  60. async def worker(queue, semaphore, session, progress, proxy_type, proxies, repository_path, retries, delay, output_file):
  61.     proxy_obj = {'proxies': proxies, 'index': 0, 'type': proxy_type}
  62.     while True:
  63.         async with semaphore:
  64.             hex_string = await queue.get()
  65.             if hex_string is None:
  66.                 break
  67.             result = await check_url(session, progress, proxy_obj, repository_path, hex_string, retries, delay)
  68.             if not isinstance(result, Exception):
  69.                 if result is not None:
  70.                     tqdm.write(result)
  71.                     # Write the result to the output file
  72.                     if output_file is not None:
  73.                         output_file.write(result + "\n")
  74.                     # Update the progress bar
  75.                 progress.update(1)
  76.             queue.task_done()
  77.  
  78. async def main(args, hex_combinations, total_combinations, cookies, proxies, output_file):
  79.     # Create a session
  80.     async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(args.timeout) if args.timeout else None, cookies=cookies, headers=HEADERS) as session:
  81.         # Create a progress bar
  82.         progress = tqdm(total=total_combinations, desc="Starting", bar_format='{l_bar}{bar}| {n_fmt}/{total_fmt}')
  83.  
  84.         # Create a semaphore
  85.         semaphore = asyncio.Semaphore(args.max_workers)
  86.  
  87.         # Create a bounded queue with a maximum size equal to the number of workers
  88.         queue = asyncio.Queue(maxsize=args.max_workers)
  89.  
  90.         # Create workers
  91.         workers = [asyncio.create_task(worker(queue, semaphore, session, progress, args.proxy_type, proxies, args.repository_path, args.retries, args.delay, output_file)) for _ in range(args.max_workers)]
  92.  
  93.         # Enqueue tasks
  94.         for hex_string in hex_combinations:
  95.             await queue.put(''.join(hex_string))
  96.             await queue.join()
  97.  
  98.         # Indicate that there are no more tasks to process
  99.         for _ in range(args.max_workers):
  100.             await queue.put(None)
  101.  
  102.         # Wait for all workers to finish processing tasks
  103.         await asyncio.gather(*workers, return_exceptions=True)
  104.  
  105.         # Close the progress bar
  106.         progress.close()
  107.  
  108.         # Close the output file
  109.         if output_file is not None:
  110.             output_file.close()
  111.  
  112. def custom_combinations(chars, repeat):
  113.     chars = sorted(chars, key=lambda c: (c.isdigit(), c))
  114.     for r in range(repeat, 0, -1):
  115.         for combination in itertools.combinations(chars, r):
  116.             for fill in itertools.product(chars, repeat=repeat-r):
  117.                 yield from set(itertools.permutations(combination + fill, repeat))
  118.  
  119. if __name__ == "__main__":
  120.     # Create an argument parser
  121.     parser = argparse.ArgumentParser(description="GitHub dangling commit bruteforcer. by softzer0 & ChatGPT")
  122.  
  123.     # Add arguments
  124.     parser.add_argument("repository_path", type=str, help="Path to the repository, in format: owner/repository")
  125.     parser.add_argument("--proxies_file", type=str, default=None, help="Proxies to use for requests. Should be in format IP:port.")
  126.     parser.add_argument("--proxy_type", choices=['http', 'https', 'socks5', 'socks4'], default=DEFAULTS['proxy_type'], help=f"The type of the proxy. Default is {DEFAULTS['proxy_type']}.")
  127.     parser.add_argument('--token', type=str, help="GitHub personal access token to use in API requests.")
  128.     parser.add_argument("--cookies_file", type=str, default=None, help="File containing cookies to use for requests.")
  129.     parser.add_argument("--log_file", type=str, default=None, help="File to write the results to. If not specified, they won't be written to a file.")
  130.     parser.add_argument("--timeout", type=int, default=DEFAULTS['timeout'], help=f"Timeout in seconds for each request. Default is {DEFAULTS['timeout']}. Set to 0 for no timeout.")
  131.     parser.add_argument("--retries", type=int, default=DEFAULTS['retries'], help=f"Number of retries for each URL. Default is {DEFAULTS['retries']}. Set to -1 to disable, 0 for unlimited.")
  132.     parser.add_argument("--delay", type=float, default=DEFAULTS['delay'], help=f"Delay in seconds between each retry. Default is {DEFAULTS['delay']}. Set to 0 for no delay.")
  133.     parser.add_argument("--max_workers", type=int, default=DEFAULTS['max_workers'], help=f"Number of worker instances. Default is {DEFAULTS['max_workers']}.")
  134.  
  135.     # Parse the arguments
  136.     args = parser.parse_args()
  137.  
  138.     output_file = None
  139.  
  140.     cookies = aiohttp.CookieJar().load(args.cookies_file) if args.cookies_file else None
  141.    
  142.     if args.token:
  143.         HEADERS['Authorization'] = f"token {args.token}"
  144.  
  145.     proxies = None
  146.     if args.proxies_file:
  147.         with open(args.proxies_file, "r") as f:
  148.             proxies = f.readlines()
  149.  
  150.     # Set up logging
  151.     if args.log_file:
  152.         output_file = open(args.log_file, "w")
  153.  
  154.     # Generate all possible combinations of a 7 character long hex string
  155.     hex_combinations = custom_combinations(string.hexdigits.lower(), 7)
  156.  
  157.     # Total number of combinations
  158.     total_combinations = sum(len(string.hexdigits.lower()) ** r for r in range(1, 8))
  159.  
  160.     try:
  161.         loop = asyncio.new_event_loop()
  162.         asyncio.set_event_loop(loop)
  163.         loop.run_until_complete(main(args, hex_combinations, total_combinations, cookies, proxies, output_file))
  164.     except KeyboardInterrupt:
  165.         tqdm.write("Stopping...")
  166.         tasks = asyncio.all_tasks(loop)
  167.         for task in tasks:
  168.             task.cancel()
  169.         loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True))
  170.     finally:
  171.         loop.close()
  172.         tqdm.write("Stopped.")
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement