Advertisement
Guest User

Fork of TheNewBoston(Bucky) Website scanner.

a guest
Oct 17th, 2015
418
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 3.52 KB | None | 0 0
  1. #coding: utf-8;
  2. #Credit: Creuilcreuil;
  3.  
  4. from tld import get_tld;
  5. import urllib.request;
  6. import argparse;
  7. import io;
  8. import os;
  9.  
  10. ROOT_DIR = 'Websites';
  11.  
  12. def create_dir(directory, quiet=False):
  13.     if not os.path.exists( directory ):
  14.         if not quiet:
  15.             print( "Creating new directory, '{}'.".format( directory ) );
  16.  
  17.         os.makedirs( directory );
  18.  
  19.     else:
  20.         if not quiet:
  21.             print( "Directory allready exists, '{}'.".format( directory ) );
  22.  
  23. def write_file(path, data, quiet=False):
  24.     if not quiet:
  25.         print( "Writing '{}'.".format( path ) );
  26.  
  27.     with open( path, 'w' ) as file:
  28.         file.write( data );
  29.  
  30. def get_domain_name(url, quiet=False):
  31.     if not quiet:
  32.         print( "Getting domain name,'{}'.".format( url ) );
  33.  
  34.     return get_tld( url );
  35.  
  36. def get_ip_address(url, quiet=False):
  37.     if not quiet:
  38.         print( "Getting ip address from '{}'.".format( url ) );
  39.  
  40.     process = os.popen( 'host {}'.format( url ) );
  41.     result = str( process.read() );
  42.     marker = result.find( 'has address' ) + 12;
  43.  
  44.     return result[marker:].splitlines()[0];
  45.  
  46. def get_nmap(options, ip, quiet=False):
  47.     if not quiet:
  48.         print( "Performing 'nmap' scan on {}".format( ip ) );
  49.  
  50.     process = os.popen( "nmap {0} {1}".format( options, ip ) );
  51.     result = str( process.read() );
  52.  
  53.     return result;
  54.  
  55. def get_robots_txt(url, quiet=False):
  56.     if not quiet:
  57.         print( "Downloading 'robot.txt' from '{}'".format( url ) );
  58.  
  59.     if not url.endswith( '/' ):
  60.         url += '/';
  61.  
  62.     request = urllib.request.urlopen( '{}robots.txt'.format( url ), data=None );
  63.     data = io.TextIOWrapper( request, encoding='utf-8' );
  64.  
  65.     return data.read();
  66.  
  67. def get_whois(domain_name, quiet=False):
  68.     if not quiet:
  69.         print( "Getting 'whois' info from '{}'.".format( domain_name ) );
  70.  
  71.     process = os.popen( 'whois {}'.format( domain_name ) );
  72.     result = str( process.read() );
  73.  
  74.     return result;
  75.  
  76. def gather_info(url, quiet=False):
  77.  
  78.     print( "Gathering info from '{}'.".format( url ) );
  79.  
  80.     domain_name = get_domain_name( url, quiet );
  81.     ip_address = get_ip_address( domain_name, quiet );
  82.     nmap = get_nmap( '-F', ip_address, quiet );
  83.     robots_txt = get_robots_txt( url, quiet );
  84.     whois = get_whois( domain_name, quiet );
  85.  
  86.     data = {
  87.         'domain_name':domain_name, 'ip_address':ip_address, 'nmap':nmap,
  88.         'robots_txt':robots_txt, 'whois':whois,
  89.     };
  90.  
  91.     create_report( data, quiet );
  92.  
  93. def create_report( data, quiet ):
  94.     project_dir = '{0}/{1}'.format( ROOT_DIR, data['domain_name'] );
  95.     create_dir( project_dir, quiet );
  96.  
  97.     print( "Savign report in '{}'.".format( project_dir ) );
  98.  
  99.     for key, value in data.items():
  100.         file = '{0}/{1}.txt'.format( project_dir, key );
  101.  
  102.         if not quiet:
  103.             print( 'Savign {}'.format( file ) );
  104.  
  105.         write_file( file, value, quiet );
  106.  
  107.  
  108.     print( "Done with '{}'.\n".format( data['domain_name'] ) );
  109.  
  110. def Main():
  111.     create_dir( ROOT_DIR );
  112.  
  113.     parser = argparse.ArgumentParser();
  114.  
  115.     parser.add_argument('url_list', help='website to gather info.', type=str );
  116.     parser.add_argument('-l', '--list', help='<url> is website list.', action='store_true' );
  117.     parser.add_argument('-q', '--quiet', help='silent mode.', action='store_true' );
  118.  
  119.     args = parser.parse_args();
  120.  
  121.     if args.url_list and not args.list:
  122.         gather_info( args.url, args.quiet );
  123.  
  124.     elif args.url_list and args.list:
  125.         print( "Loading website list '{}'".format( args.url_list ) );
  126.  
  127.         with open( args.url_list, 'r' ) as file:
  128.             data = file.read().split('\n')[:-1];
  129.  
  130.             for line in data:
  131.                 gather_info( line, args.quiet );
  132.  
  133.         print( "Done with website list '{}'.\n".format( args.url_list ) );
  134.  
  135. if __name__ == '__main__':
  136.     Main();
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement