Advertisement
Guest User

Robots_file v0.1

a guest
Jun 28th, 2010
252
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.18 KB | None | 0 0
  1. #!/usr/bin/python
  2. # script coded by datahack from pentest101.blogspot.com
  3. # Robots_files very smart script to find allowed directories and sitemaps in robots.txt
  4. # we are not responsible for any bad usage
  5. # have fun
  6.  
  7. import urllib
  8. import re
  9. import sys
  10.  
  11.  
  12. def usage():
  13.         print "#### script coded by Pentest101 Team ####"
  14.         print "#### informations gathering          ####"
  15.         print "[+] usage : ./robots_file.py [options] [site] (without /)"  
  16.         print "[+] options:"
  17.         print "         -A : Allowed"
  18.         print "         -D : Disalowed"
  19.         print "         -S : Sitemap"
  20.         sys.exit()
  21.  
  22.  
  23. def allow(site):
  24.     urllib.urlretrieve(site + '/robots.txt','/tmp/robots.txt')
  25.     f = open('/tmp/robots.txt')
  26.         print "---Allowed Directorys : ",site
  27.         print ""
  28.     for dir in f.readlines():          
  29.                 if 'Allow' in dir:
  30.             dir = re.sub('\n$','',dir)
  31.             dir_2 = re.search(r'/\S+',dir)
  32.             print site+dir_2.group()
  33.     f.close()
  34.     print "coded by Pentest101 team"
  35.  
  36. def disallow(site):
  37.         urllib.urlretrieve(site + '/robots.txt','/tmp/robots.txt')
  38.         f = open('/tmp/robots.txt')
  39.         print "---Disallowed Directorys : ",site
  40.         print ""
  41.         for dir in f.readlines():
  42.                 if 'Disallow' in dir:
  43.                         dir = re.sub('\n$','',dir)
  44.                         dir_2 = re.search(r'/\S+',dir)
  45.                         print site+dir_2.group()
  46.         f.close()
  47.         print "coded by Pentest101 team"
  48.  
  49. def sitemap(site):
  50.     urllib.urlretrieve(site + '/robots.txt','/tmp/robots.txt')
  51.         f = open('/tmp/robots.txt')
  52.         print "---Sitemap : ",site
  53.         print ""
  54.         for dir in f.readlines():
  55.                 if 'Sitemap' in dir:
  56.                         dir = re.sub('\n$','',dir)
  57.                         dir_2 = re.search(r'http://\S+',dir)
  58.             print dir_2.group()
  59.     f.close()
  60.     print "coded by Pentest101 team"
  61. def main():
  62.     try:
  63.         if sys.argv[1] == "-A":
  64.             allow(sys.argv[2])
  65.         elif sys.argv[1] == "-D":
  66.                         disallow(sys.argv[2])
  67.         elif sys.argv[1] == "-S":
  68.                         sitemap(sys.argv[2])
  69.         else:
  70.             usage()
  71.     except:
  72.         usage()
  73. # good to go   
  74. main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement