Data hosted with ♥ by Pastebin.com - Download Raw - See Original
  1. import urllib2
  2. import json
  3. import sys
  4. import getopt
  5. import os
  6. import pycurl
  7.  
  8. if len(sys.argv) < 3:
  9.     print "Search2Audit 1.0 by @YJesus\n"
  10.     print "www.securitybydefault.com\n"
  11.     print "Usage search2audit.py <API KEY> <URL>\n"
  12.     sys.exit(0)
  13.  
  14.  
  15. siteurl= sys.argv[2]
  16. key= sys.argv[1]
  17.  
  18.  
  19. c = pycurl.Curl()
  20. c.setopt(c.PROXY, '127.0.0.1:8080')
  21. c.setopt(pycurl.WRITEFUNCTION, lambda x: None)
  22. c.setopt(pycurl.SSL_VERIFYPEER, 0)
  23. c.setopt(pycurl.SSL_VERIFYHOST, 0)
  24.  
  25. c.setopt(pycurl.USERAGENT, 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:8.0) Gecko/20100101 Firefox/8.0')
  26.  
  27. c.setopt(pycurl.HTTPHEADER, ["Proxy-Connection:"])
  28.  
  29.  
  30. user_agent = 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; FDM; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 1.1.4322)'
  31. creds = (':%s' % key).encode('base64')[:-1]
  32. auth = 'Basic %s' % creds
  33.  
  34. request = urllib2.Request('https://api.datamarket.azure.com/Data.ashx/Bing/Search/Composite?Sources=%27web%27&Query=%27site:'+siteurl+'%27&$format=json')
  35.  
  36. request.add_header('Authorization', auth)
  37. request.add_header('User-Agent', user_agent)
  38.  
  39. requestor = urllib2.build_opener()
  40. result = requestor.open(request)
  41.  
  42. search_results = result.read()
  43.    
  44. results = json.loads(search_results)
  45.  
  46. total = results['d']['results'][0]["WebTotal"]
  47.  
  48. print "Num total links", int(total)
  49.  
  50. offset = int(total)/50
  51. offset=offset+1
  52.  
  53. index = 0
  54.  
  55.  
  56. for i in range(offset):
  57.  
  58.     request = urllib2.Request('https://api.datamarket.azure.com/Data.ashx/Bing/Search/Web?Query=%27site:'+siteurl+'%27&$skip='+str(index)+'&$format=json')
  59.     request.add_header('Authorization', auth)
  60.     request.add_header('User-Agent', user_agent)
  61.  
  62.     requestor = urllib2.build_opener()
  63.     result = requestor.open(request)
  64.  
  65.     search_results = result.read()
  66.    
  67.     results = json.loads(search_results)
  68.    
  69.    
  70.     for result in results['d']['results']:
  71.        
  72.         urltoadd= result.get(u'Url')
  73.        
  74.         print urltoadd
  75.        
  76.         urltoadd=str(urltoadd)
  77.        
  78.         c.setopt(c.URL, urltoadd)
  79.         c.perform()
  80.    
  81.     index+=50