Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # gdns.pl
- # Index all subdomains from a domain without using zone transfer or dns resolving.
- # Intended use for this script is to enumerate all subdomains for a given domain.
- # Author: DiabloHorn
- # Date: 16-02-2008
- # Version: 0.3
- #changes
- # fixed the - bug
- # fixed the hang bug when it reached the end of the google results
- #
- ###TODO###
- # implement rotating proxy support
- ###POSSIBLE BUGS###
- # the need to ctrl+c if results are less then 100
- use LWP::UserAgent;
- use HTML::LinkExtor;
- use URI::URL;
- $numArgs = $#ARGV + 1;
- if($numArgs != 1){
- print "gdns.pl <base domain>\n";
- print "Example: gdns.pl kd-team.com\n";
- print "Should find all subdomains indexed by google.";
- exit(1);
- }
- #intentionally not using quotemeta, cause it fucks up in the search
- $searchitem = $ARGV[0];
- #if you change this....change the regexes.
- my $baseSEngine = "http://www.google.com";
- #start url for searching
- $url = URI->new("$baseSEngine/search?hl=en&q=site%3A$searchitem");
- $ua = LWP::UserAgent->new;
- $ua->agent('Opera/9.20 (Windows NT 6.0; U; en)'); #this should help us a little to fool google.
- print "[*] starting subdomain search on $searchitem\n";
- #hash containing all found sub domains
- my %allurls = ();
- #hash containing all the "next" urls from google
- my %nexturls = ();
- #callback for each request to parse the page
- sub callback {
- my($tag, %attr) = @_;
- #for this poc we are only interested in the <a href> tags
- return if $tag ne 'a';
- my @links = values %attr;
- foreach $link(@links){
- #extract all urls that contain the base domain
- if($link =~ m!(^(http://|https://|ftp://|irc://)(([a-zA-Z0-9\-\.]*)(\.+))*$searchitem)!io){
- if (!exists $allurls{$1}){
- $allurls{$1} = $1;
- print "$1\n";
- }
- }
- #extract the google next urls
- if($link =~ m!/search\?q=site:$searchitem&hl=\w+&start=\d+&sa=\w!io){
- if (!exists $nexturls{$link}){
- $nexturls{$link} = $link;
- }
- }
- }
- }
- #setup the callback
- $p = HTML::LinkExtor->new(\&callback);
- # Request document and parse it as it arrives
- $res = $ua->request(HTTP::Request->new(GET => $url),sub {$p->parse($_[0])});
- $visitedGURLS = 0;
- #for the moment beeing assume 10000 results.
- while(1){
- if($visitedGURLS == scalar keys(%nexturls)){
- last;
- }
- foreach $nurl(sort keys(%nexturls)){
- my $value = $nexturls{$nurl};
- #prevent parsing pages twice
- if($value ne "visited"){
- my $temp = URI->new($baseSEngine.$value);
- #you can comment this out if you only want clean finds.
- #print "[*] searching next page $temp\n";
- $res = $ua->request(HTTP::Request->new(GET => $temp),sub {$p->parse($_[0])});
- $nexturls{$nurl} = "visited";
- $visitedGURLS++;
- sleep 3; #try and prevent getting blocked by google
- }
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement