Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/perl -w
- #Code by s4m3l0
- use strict;
- use warnings;
- use threads;
- use threads::shared;
- use Thread::Queue;
- use Thread::Semaphore;
- use Bloom::Filter;
- use URI;
- use URI::URL;
- use Web::Scraper;
- use LWP::UserAgent;
- use HTTP::Cookies;
- use IO::Socket;
- use String::Diff;
- use String::Diff qw(diff_fully diff diff_merge diff_regexp);
- use URI::Split qw(uri_split uri_join);
- $ua = LWP::UserAgent->new(
- agent => 'Mozilla/5.0 (X11; Linux i686; rv:5.0.1) Gecko/20100101 Firefox/5.0.1',
- timeout => 10,
- );
- if ($^O =~ /MSwin/) {system("cls");} else {system("clear");}
- print " Do you want to do reverseip? (y/n)>";
- $chreverse = <STDIN>;
- if ($chreverse =~ /y/) {
- reverseip();
- print " Do you want to do sqli? (y/n)>";
- $chsqli = <STDIN>;
- if ($chsqli =~ /y/) {
- print "Scanning for vulnerabilities in --------------> $addr";
- foreach $urlsqli (@webtarget) {
- sqli();
- }
- }
- } else {
- print "Website you want to scan..\? > ";
- $urlsqli=<STDIN>;
- print "Scanning for vulnerabilities in --------------> $addr";
- sqli();
- }
- sub reverseip {
- if ($^O =~ /MSwin/) {system("cls");} else {system("clear");}
- print "IP/Website you want to reverse..\? > ";
- $website=<STDIN>;
- chomp($website);
- $weblist = 0;
- $step = 0;
- $rob = 0;
- $same = 0;
- $youg = 0;
- if ($website =~/http:\/\//) { $website =~ s/http:\/\///;}
- if ($website =~/\/$/g) { $website =~ s/\/$//;}
- my($addr)=inet_ntoa((gethostbyname($website))[4]);
- print "IP target : $addr <-------------------------------------------------------------\n";
- $ua = LWP::UserAgent->new(
- agent => 'Mozilla/5.0 (X11; Linux i686; rv:5.0.1) Gecko/20100101 Firefox/5.0.1',
- timeout => 30,
- );
- #--------------------Stephack.com----------------------#
- print "Stephack.com <-------------------------------------\n";
- $res = $ua->get('http://stephack.com/re_ip/'.$addr);
- $ct = $res->content();
- while ($ct =~ /http:\/\/(.*)<br/g) {
- $webtarget[$weblist] = $1;
- ++$weblist;
- ++$step;
- }
- print $step." list for stephack.com\n";
- #--------------------Stephack.com----------------------#
- #--------------------Robtex.com------------------------#
- print "Robtex.com <---------------------------------------\n";
- $res = $ua->get('http://www.robtex.com/ip/'.$addr.'.html');
- $ct = $res->content();
- while ($ct =~ /<span id=\"dns\d+"><a href=\"\/dns\/(.+?)\.html\"/g) {
- $check = $1;
- $check =~ s/^mail\.//;
- $check =~ s/^\*\.//;
- $check =~ s/^\.//;
- $check =~ s/^www\.//;
- $webtarget[$weblist] = $check;
- ++$weblist;
- ++$rob;
- }
- print $rob." list for robtex.com\n";
- #---------------------Robtex.com------------------------#
- #---------------------sameip.org------------------------#
- print "sameip.org <---------------------------------------\n";
- $res = $ua->get('http://www.sameip.org/ip/'.$addr);
- $ct = $res->content();
- while ($ct =~ /<a href=\"http:\/\/www\..+?\" rel=\'nofollow\' title=\"visit .+?\" target=\"_blank\">(.+?)<\/a>/g) {
- $webtarget[$weblist] = $1;
- ++$weblist;
- ++$same;
- }
- print $same." list for sameip.org\n";
- #---------------------sameip.org------------------------#
- #---------------------yougetsignal.com------------------------#
- print "yougetsignal.com <---------------------------------\n";
- my $res = $ua->post('http://www.yougetsignal.com/tools/web-sites-on-web-server/php/get-web-sites-on-web-server-json-data.php', {
- remoteAddress => $addr,
- });
- $ct = $res->content();
- if ($ct !~ /Daily reverse IP check limit reached/g) {
- while ($ct =~ /\[\"(.+?)\"\,/g) {
- $checky = $1;
- $checky =~ s/^www\.//;
- $webtarget[$weblist] = $checky;
- ++$weblist;
- ++$youg;
- }
- print $youg." list for yougetsignal.com\n";
- } else { print 'Daily reverse IP check limit reached'; }
- #---------------------yougetsignal.com------------------------#
- @webtarget = dul(@webtarget);
- @webtarget = sort (@webtarget);
- print "\n"."-------------------------------------------------------------------------------"."\n";
- foreach my $list (@webtarget) {
- # if ($list !~ /^www\./) { $list = 'www.'.$list; }
- # if ($list !~ /^http:\/\//) { $list = 'http://'.$list; }
- print $list."\n";
- open (weblist, ">>$website");
- print weblist "$list\r\n";
- close(weblist);
- }
- $numweb = scalar(@webtarget);
- open (weblist, ">>$website");
- print weblist "\r\nTotal results of $addr is : $numweb site (sorted)\r\nPowered By s4m3l0\r\n";
- close(weblist);
- print "-------------------------------------------------------------------------------"."\n";
- print "Total ".scalar(@webtarget)." website\n";
- }
- sub dul {
- return keys %{{ map { $_ => 1 } @_ }};
- }
- #crawling with signed cookie
- my $cookie_jar = './cookie.lwp';
- my $tmp_ua = LWP::UserAgent->new;
- $tmp_ua->timeout(15);
- $tmp_ua->protocols_allowed(['http','https']);
- $tmp_ua->agent("Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 2.0.50727;.NET CLR 3.0.04506.30; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)");
- $tmp_ua->cookie_jar(HTTP::Cookies->new('file'=>'./cookie.lwp','autosave'=>1));
- push @{$tmp_ua->requests_redirectable}, 'POST';
- my $max_threads = 15;
- #my $base_url = $ARGV[0] || 'http://china.ccsafe.com/';
- my $base_url = $ARGV[0] || 'http://ccsafe.com';
- my $host = URI::URL->new($base_url)->host;
- print $host."\n";
- my $queue = Thread::Queue->new( );
- my $semaphore = Thread::Semaphore->new( $max_threads );
- my $mutex = Thread::Semaphore->new( 1 );
- #my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime;
- #my $logfile = "crawler".($year+1900).($mon+1).$mday.$hour.$min.$sec.".log";
- #open(BANLOG,">>$logfile") or die("can't open logfile:$!\n");
- my $filter = shared_clone( Bloom::Filter->new(capacity => 1000000, error_rate => 0.001) );
- $queue->enqueue( $base_url );
- $filter->add( $base_url );
- my @tmp_url = ();
- push(@tmp_url,$base_url);
- while( 1 ) {
- # join all threads which can be joined
- #my $joined = 0;
- foreach ( threads->list(threads::joinable) )
- {
- #$joined ++;
- $_->join( );
- }
- #print $joined, " joinedn";
- # if there are no url need process.
- my $item = $queue->pending();
- if( $item == 0 )
- {
- my $active = threads->list(threads::running);
- # there are no active thread, we finish the job
- if( $active == 0 )
- {
- print "All done!\n";
- last;
- }
- # we will get some more url if there are some active threads, just wait for them
- else
- {
- #print "[MAIN] 0 URL, but $active active threadn";
- sleep 1;
- next;
- }
- }
- # if there are some url need process
- #print "[MAIN] $item URLn";
- $semaphore->down;
- #print "[MAIN]Create thread.n";
- threads->create( \&ProcessUrl );
- }
- # join all threads which can be joined
- foreach ( threads->list() )
- {
- $_->join( );
- }
- sub ProcessUrl
- {
- my $scraper = scraper
- {
- process '//a', 'links[]' => '@href';
- };
- my $res;
- my $link;
- while( my $url = $queue->dequeue_nb() )
- {
- eval
- {
- $scraper->user_agent($tmp_ua);
- $res = $scraper->scrape( URI->new($url) )->{'links'};
- };
- if( $@ )
- {
- warn "$@\n";
- next;
- }
- next if (! defined $res );
- #print "there are ".scalar(threads->list(threads::running))." threads, ", $queue->pending(), " urls need process.n";
- foreach( @{$res} )
- {
- $link = $_->as_string;
- $link = URI::URL->new($link, $url);
- # not http and not https?
- next if( $link->scheme ne 'http' && $link->scheme ne 'https' );
- # another domain?
- #next if( $link->host ne $host );
- #search for the sub domain
- next if(!($link->host =~ /$host/));
- $link = $link->abs->as_string;
- if( $link =~ /(.*?)#(.*)/ )
- {
- $link = $1;
- }
- next if( $link =~ /.(jpg|png|bmp|mp3|wma|wmv|gz|zip|rar|iso|pdf)$/i );
- print "test:$link\n";
- #EscapeUrl,skip query form values
- my $tmp_link = &EscapeUrl($link);
- #print "Escape:".$tmp_link."\n";
- $mutex->down();
- my $tmp_mark = 0;
- #print "test start:$link\n";
- if( ! $filter->check($tmp_link) )
- {
- #print "Test filter ok:$tmp_link\n";
- #DiffUrl,diff $link from queue with number
- foreach(@tmp_url)
- {
- #print "Test Queue:".$tmpurl."\n";
- #print "test-1:$_\ntest-2:$tmp_link\n";
- if(&DiffUrl($_,$link))
- {
- $tmp_mark = 2;
- last;
- }
- }
- if( $tmp_mark != 2 )
- {
- $queue->enqueue($link);
- #print "add queue:$link\n";
- $filter->add($tmp_link);
- print "add filter:$tmp_link\n";
- #print BANLOG $filter->key_count(), " ", $link, "\n";
- #print $filter->key_count(), " ", $link, "\n";
- push(@tmp_url,$link);
- }
- else
- {
- print "pass:$link\n";
- }
- }
- #print "pass:$link\n";
- $mutex->up();
- undef $link;
- }
- undef $res;
- }
- undef $scraper;
- $semaphore->up( );
- }
- #skip arg
- sub EscapeUrl
- {
- my $urlold = shift;
- my ($scheme,$auth,$path,$query,$frag) = uri_split($urlold);
- my $urlnew = uri_join($scheme,$auth,$path);
- my $u = URI->new($urlold);
- my @tmp_array = $u->query_form();
- my $tmp = '';
- my $i = 0;
- for($i=0;$i<@tmp_array;$i+=2)
- {
- $tmp .=$tmp_array[$i]."=&";
- }
- if(@tmp_array != 0)
- {
- $tmp =~ s/&$//;
- $urlnew .= "?".$tmp;
- }
- undef $u;
- #print $urlnew."\n";
- return $urlnew;
- }
- sub DiffUrl
- {
- my $urlold = shift;
- my $urlnew = shift;
- my $urloldx = &EscapeUrl($urlold);
- my $urlnewx = &EscapeUrl($urlnew);
- my($old,$new) = String::Diff::diff($urloldx,$urlnewx);
- #my($old,$new) = String::Diff::diff($urlold,$urlnew);
- if (($old =~ m/(\[\d+\])/i) && ($new =~ m/{\d+}/i))
- #if ($new =~ m/{\d+}/i)
- {
- #print "test num success.\n";
- return 1;
- }
- else
- {
- #print "test num failed.\n";
- return 0;
- }
- }
- sub sqli {
- if($urlsqli !~/^http:\/\//) {
- $urlsqli = 'http://www.' . $urlsqli;
- }
- $res = $ua->get($urlsqli);
- $ct = $res->content();
- if ($ct =~ /\.php/g) {
- print "--------------------->" . $urlsqli . "<---------------------\n";
- while ($ct =~ /<a href=\"(.*?)\"/g) {
- $checkphp = $1;
- if ($checkphp =~ /\.php/g) {
- print $checkphp."\n";
- }
- }
- print "--------------------->"."End"."<---------------------\n";
- }
- }
- =bin foreach $path(@imgs) {
- chomp($path);
- $webcl=$path;
- $webcl = trim($webcl);
- $url = $webcl;
- if($url=~/facebook/ | $url=~/mailto/) {
- next loop;
- }
- if($url=~/=/) { } else { next loop; }
- $incount=$incount+1;
- if($incount=~/36/) {
- next loop2;
- }
- if ($url =~ m/=/sim) {
- $url =~ s/=/='/g;
- }
- print "\n $url";
- my $req = HTTP::Request->new( GET => $url );
- my $response = $ua->request( $req );
- if( $response->content =~ /SQL/ || $response->content =~ /\/var\/www\//) {
- open OUTFILE, ">>", "scanned.txt" or die $!;
- print OUTFILE "$url \n";
- if(($count+1)%2) {
- print HTML "\t\t\n<tr><td><a href=\"$url\"><font color=\"#66FF66\"><strong>$url</strong></font></a> \n </br> \n </br> \n";
- } else {
- print HTML "\t\t\n<tr><td><a href=\"$url\"><font color=\"#66FF66\"><strong>$url</strong></font></a> \n </br> \n </br> \n";
- }
- $count++;
- print "\n [+]", $url, "(",$count,"/",$incount,")";
- close OUTFILE;
- }
- }
- print "\n\n $count vulnerable links found in $linds. Extracted link count: $linkdo \n";
- }
- =cut
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement