Here is a script that helps to summarize an URL or a list of URLs to find the real servers. It follows those 301, 302, 303, or 200 with meta refresh, eliminates any 401, 402, 403, or 404 pages.
Below is the script:
#!/usr/bin/python
import sys, argparseimport httplib
import urllib2, cookielib, ConfigParser, osfrom re import searchimport refrom urllib2 import Request, urlopen, URLError, HTTPErrorimport socket
desc='To get the REAL URL from FQDN or list of FQDN.'vers='%(prog)s 0.2'debug=1finallist = []
def web_check_on (host, uri): server = 'http://' + host headers = {'User-Agent' : 'RedTeam (Jet)'} req = Request(server, None, headers)
try: response = urlopen(req) except HTTPError as e: #print 'The server couldn\'t fulfill the request.' return "[FAILED."+str(e.code)+"] "+server #print 'Error code: ', e.code except URLError as e: if hasattr(e, 'reason'): #print 'We failed to reach a server.' return "[FAILED]" #print 'Reason: ', e.reason elif hasattr(e, 'code'): #print 'The server couldn\'t fulfill the request.' return "[FAILED]"+e.code #print 'Error code: ', e.code else: # everything is fine final_server = response.geturl() response.close() return final_server
def getipaddrs(hostname): result = socket.getaddrinfo(hostname, None, 0, socket.SOCK_STREAM) return [x[4][0] for x in result]
def net_check_on(hostname): try: hostips = getipaddrs(hostname) except socket.gaierror, e: return "[FAILED.dns]"
tcpport = 80 for hostip in hostips: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(0.2) port = sock.connect_ex((hostip, tcpport)) if port == 0: return hostip # return the 1st IP for web_check_on return "[FAILED.tcpport] " + ','.join(hostips) def check_unique(seq): seen = set() seen_add = seen.add return [ x for x in seq if not (x in seen or seen_add(x)) ]
if __name__ == "__main__": #parser = argparse.ArgumentParser(description=desc, version=vers) parser = argparse.ArgumentParser(description=desc)
group1 = parser.add_argument_group('Input FQDN (compulsory)') mgroup = group1.add_mutually_exclusive_group(required=True) mgroup.add_argument('-d', dest='fqdn', help='FQDN') mgroup.add_argument('-f', dest='ifile', help='Input file with list of FQDN')
group2 = parser.add_argument_group('Input URI (optional)') group2.add_argument('-p', action='store', dest='uri', default='/', help='wwwroot path. [/]') group2.add_argument('-u', action='store_true', dest='uniqueonly', default=True, help='Print summary that shows unique URL only in x/y/z format (x=unique, y=connected, z=total).') group2.add_argument('-D', action='store_true', dest='debug', default=False, help='Debug mode.') group2.add_argument('--version', action='version', version='%(prog)s 1.0')
args = parser.parse_args()
debug = args.debug host = args.fqdn uri = args.uri
if args.fqdn: fullurl = '"' + host + uri +'"' realserver = web_check_on(host, uri) if debug: print fullurl, realserver else: print realserver
if args.ifile: num = 0 with open(args.ifile, 'r') as f: lines = [line.rstrip('\n') for line in f] # strip newline char for line in lines: num = num + 1 #print line #(httpcode, realserver) = checking_on(line, uri) #(httpcode, realserver) = web_check_on(line, uri) line2 = net_check_on(line) if "FAILED" not in line2: realserver = web_check_on(line, uri) finallist.append(realserver) else: realserver = "" if debug: print line + uri + " [" + line2 + "] ", realserver #else: # print realserver
if args.uniqueonly: finallist2 = check_unique(finallist) print '\n=====' print '\n'.join(finallist2) print '----' + str(len(finallist2)) + '/' + str(len(finallist)) + '/' + str(num) + '----'