Government Security
Network Security Resources

Jump to content

Gdns

- - - - - windows bug dns proxy hash stealth
  • Please log in to reply
No replies to this topic

#1 Guest_DiabloHorn_*

Guest_DiabloHorn_*
  • Guests

Posted 17 February 2008 - 07:22 AM

use google to index stuff like:
admin.domain.com
proxy.domain.com

this can be used when you want absolute stealth
- no zonetransfer
- no dns queries

Yes this will NOT grab subdomains not indexed by google...but it's a start :)

# gdns.pl 
# Index all subdomains from a domain without using zone transfer or dns resolving.
# Intended use for this script is to enumerate all subdomains for a given domain.
# Author:  DiabloHorn (KD-Team)
# Greetz: paradox, pointdxtr
# Date: 16-02-2008
# Version: 0.1

###TODO###
# implement rotating proxy support
# implement proper spidering
# implement stop if results < 100


###POSSIBLE BUGS###
# inproper parsing of subdomains containing a hyphen -
# the need to ctrl+c if results are less then 100

use LWP::UserAgent;
use HTML::LinkExtor;
use URI::URL;

$numArgs = $#ARGV + 1;

if($numArgs != 1){
	print "gdns.pl <base domain>\n";
	print "Example: gdns.pl kd-team.com\n";
	print "Should find all subdomains indexed by google.";
	exit(1);
}
#intentionally not using quotemeta, cause it fucks up in the search
$searchitem = $ARGV[0];

#if you change this....change the regexes.
my $baseSEngine = "http://www.google.com";

#start url for searching
$url = URI->new("$baseSEngine/search?hl=en&q=site%3A$searchitem");
$ua = LWP::UserAgent->new;
$ua->agent('Opera/9.25 (Windows NT 6.0; U; en)'); #this should help us a little to fool google.

print "[*] starting subdomain search on $searchitem\n";

#hash containing all found sub domains
my %allurls = ();
#hash containing all the "next" urls from google
my %nexturls = {};
my $i = 0;

#callback for each request to parse the page
sub callback {
 my($tag, %attr) = @_;
 #for this poc we are only interested in the <a href> tags
 return if $tag ne 'a';
 my @links = values %attr;
 foreach $link(@links){
	#extract all urls that contain the base domain
	if($link =~ m!(^(http://|https://|ftp://|irc://)((\w+)(\.+))*$searchitem)!io){
		if (!exists $allurls{$1}){
			$allurls{$1} = $1;
			print "$1\n";
		}
	}
	
	#extract the google next urls
	if($link =~ m!/search\?q=site:$searchitem&hl=\w+&start=\d+&sa=\w!io){
		if (!exists $nexturls{$link}){
			$nexturls{$link} = $link;
		}
	}
 }
 
}

#setup the callback
$p = HTML::LinkExtor->new(\&callback);

# Request document and parse it as it arrives
$res = $ua->request(HTTP::Request->new(GET => $url),sub {$p->parse($_[0])});

#for the moment beeing assume 100 results.
while($i <= 100){
	foreach $nurl(sort keys(%nexturls)){
		my $value = $nexturls{$nurl};
		#prevent parsing pages twice
		if($value ne "visited"){
			my $temp = URI->new($baseSEngine.$value);
			#you can comment this out if you only want clean finds.
			print "[*] searching next page $temp\n";
			$res = $ua->request(HTTP::Request->new(GET => $temp),sub {$p->parse($_[0])});
			$nexturls{$nurl} = "visited";
			sleep 3; #try and prevent getting blocked by google
		}
	}	
}






Also tagged with one or more of these keywords: windows, bug, dns, proxy, hash, stealth