diff --git a/.gitignore b/.gitignore index c0de4e10..25605ca9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.DS_Store # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/datasploit.py b/datasploit.py index 813e11c9..325dc5c1 100755 --- a/datasploit.py +++ b/datasploit.py @@ -11,9 +11,10 @@ parser = optparse.OptionParser() parser.add_option('-a', '--active', action="store", dest="domain", help="Launches Active Scans (work in progress)", default="spam") -parser.add_option('-o', '--output', action="store", dest="output", help="Save output in either JSON or HTML") +parser.add_option('--json', action="store_true", dest="output", help="Save output in JSON") +parser.add_option("-f", "--file", dest="filename", help="File listing of domains, IP addresses, emails, and/or usernames", default=None, metavar="FILE") options, args = parser.parse_args() -print options, args + def printart(): print "\t " @@ -29,8 +30,13 @@ def printart(): def main(user_input, output = None): - printart() - print "User Input: %s" % user_input + if not options.filename: + printart() + print "User Input: %s" % user_input + else: + print "=============================================================" + print "User Input: %s" % user_input + print "=============================================================" if re.match('[^@]+@[^@]+\.[^@]+', user_input): print "Looks like an EMAIL, running emailOsint...\n" @@ -47,9 +53,21 @@ def main(user_input, output = None): if __name__ == "__main__": - try: - user_input = args[0] - except: - print "\n[-] Invalid Input. Exiting now..\n" - sys.exit(0) - main(user_input, options.output) + output = "JSON" if options.output else None + if options.filename: + printart() + with open(options.filename, "r") as infile: + for line in infile: + try: + user_input = line.replace("\r","").replace("\n","").strip() + except: + print "\n[-] Invalid Input. Exiting now..\n" + sys.exit(0) + main(user_input, output) + else: + try: + user_input = sys.argv[1] + except: + print "\n[-] Invalid Input. Exiting now..\n" + sys.exit(0) + main(user_input, output) diff --git a/domain/domain_GooglePDF.py b/domain/domain_GooglePDF.py index 27271ab0..735cbf49 100755 --- a/domain/domain_GooglePDF.py +++ b/domain/domain_GooglePDF.py @@ -24,6 +24,7 @@ def googlesearch(query, ext): 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', 'Accept-Encoding': 'none', 'Accept-Language': 'en-US,en;q=0.8', + 'Referer': 'www.datasploit.info/hello', 'Connection': 'keep-alive' } req = urllib2.Request(getrequrl, headers=hdr) diff --git a/domain/domain_censys.py b/domain/domain_censys.py index 53e30852..080f2f9a 100755 --- a/domain/domain_censys.py +++ b/domain/domain_censys.py @@ -3,7 +3,7 @@ import re, sys, json, time, requests import config as cfg -ENABLED = False +ENABLED = True def censys_search(domain): @@ -32,6 +32,7 @@ def censys_search(domain): temp_dict["ip"] = ip temp_dict["protocols"] = protoList + print temp_dict if '80' in protoList: new_dict = view(ip, temp_dict) diff --git a/domain/domain_dnsrecords.py b/domain/domain_dnsrecords.py index a8a763fd..74c38985 100755 --- a/domain/domain_dnsrecords.py +++ b/domain/domain_dnsrecords.py @@ -33,7 +33,19 @@ def parse_dns_records(domain): dict_dns_record['Name Server Records'] = fetch_dns_records(domain, "NS") dict_dns_record['CNAME Records'] = fetch_dns_records(domain, "CNAME") dict_dns_record['AAAA Records'] = fetch_dns_records(domain, "AAAA") - return dict_dns_record + return sanitize_object(dict_dns_record) + + +def sanitize_object(obj): + # Ensure that all returned data is JSON serializable + tmpList = [] + for key in obj: + if isinstance(obj[key], list): + for o in obj[key]: + tmpList.append('%s' % o) + obj[key] = tmpList[:] + del tmpList[:] + return obj def banner(): diff --git a/domain/domain_subdomains.py b/domain/domain_subdomains.py index 18cb4429..9651722a 100755 --- a/domain/domain_subdomains.py +++ b/domain/domain_subdomains.py @@ -99,13 +99,17 @@ def subdomains_from_google_ct(domain, subdomain_list): next = '' while True: url = 'https://www.google.com/transparencyreport/jsonp/ct/search?domain=%s&incl_exp=true&incl_sub=true&token=%s&c=' % (domain,next) - obj = json.loads('('.join(requests.get(url).text.split('(')[1:])[:-3]) - for x in obj['results']: - if x['subject'].endswith(domain): - subdomain_list = check_and_append_subdomains(x['subject'], subdomain_list) - if 'nextPageToken' not in obj: - break - next = obj['nextPageToken'] + res = requests.get(url) + if res.status_code == '200': + obj = json.loads('('.join(res.text.split('(')[1:])[:-3]) + for x in obj['results']: + if x['subject'].endswith(domain): + subdomain_list = check_and_append_subdomains(x['subject'], subdomain_list) + if 'nextPageToken' not in obj: + break + next = obj['nextPageToken'] + else: + return None return subdomain_list diff --git a/domain/domain_whois.py b/domain/domain_whois.py index a3a8ef9f..4902f85c 100755 --- a/domain/domain_whois.py +++ b/domain/domain_whois.py @@ -23,12 +23,24 @@ def banner(): print colored(style.BOLD + '---> Finding Whois Information.' + style.END, 'blue') +def pretty(d, indent=0): + for key, value in d.items(): + print('\t' * indent + str(key)+":") + if isinstance(value, dict): + pretty(value, indent+1) + elif isinstance(value,list): + for v in value: + print('\t' * (indent+1) + str(v)) + else: + print('\t' * (indent+1) + str(value)) + + def main(domain): return whoisnew(domain) def output(data, domain=""): - print data + pretty(data) print "\n-----------------------------\n" diff --git a/emails/email_basic_checks.py b/emails/email_basic_checks.py index 64b2d740..1f4a7268 100755 --- a/emails/email_basic_checks.py +++ b/emails/email_basic_checks.py @@ -8,7 +8,7 @@ import re from termcolor import colored -ENABLED = True +ENABLED = False class style: diff --git a/emails/email_clearbit.py b/emails/email_clearbit.py index 5e726d7a..86f4e063 100755 --- a/emails/email_clearbit.py +++ b/emails/email_clearbit.py @@ -8,7 +8,7 @@ from termcolor import colored # Control whether the module is enabled or not -ENABLED = False +ENABLED = True class style: diff --git a/emails/email_haveibeenpwned.py b/emails/email_haveibeenpwned.py index bd8288a8..0796faa2 100755 --- a/emails/email_haveibeenpwned.py +++ b/emails/email_haveibeenpwned.py @@ -40,7 +40,7 @@ def output(data, email=""): for x in data: print "Title: %s\nBreachDate: %s\nPwnCount: %s\nDescription: %s\nDataClasses: %s\n" % ( x.get('Title', ''), x.get('BreachDate', ''), x.get('PwnCount', ''), x.get('Description', ''), - ", ".join(x.get('DataClasses', []))) + ", ".join(map(lambda y: y.encode('utf-8',"replace") ,x.get('DataClasses',[])))) else: print colored("[-] No breach status found.", 'red') diff --git a/ip/ip_shodan.py b/ip/ip_shodan.py index eb939fb9..f2181938 100755 --- a/ip/ip_shodan.py +++ b/ip/ip_shodan.py @@ -51,7 +51,8 @@ def output(data, ip=""): print '\tTitle: %s' % x['http']['title'] print '\tRobots: %s' % x['http']['robots'] print '\tServer: %s' % x['http']['server'] - print '\tComponents: %s' % x['http']['components'] + if "components" in x['http']: + print '\tComponents: %s' % x['http']['components'] print '\tSitemap: %s' % x['http']['sitemap'] if 'ssh' in x.keys(): print colored(style.BOLD + '[+] HTTP port present:\t' + style.END, 'green') diff --git a/ip/ip_virustotal.py b/ip/ip_virustotal.py index 84978e88..bfbe382e 100644 --- a/ip/ip_virustotal.py +++ b/ip/ip_virustotal.py @@ -1,6 +1,7 @@ #!/usr/bin/env python import base +import time import config as cfg import sys import requests @@ -23,12 +24,18 @@ def banner(): def main(ip): # Use the ip variable to do some stuff and return the data if cfg.virustotal_public_api != "": - print ip api = cfg.virustotal_public_api params = "{'ip': '%s', 'apikey': '%s'}" % (ip, api) url = "http://www.virustotal.com/vtapi/v2/ip-address/report?ip=%s&apikey=%s" % (ip, api) req = requests.get(url, params) - return req + while req.text == "": + req = requests.get(url, params) + print("Request failed, pausing for seconds") + time.sleep(10) + data = json.loads(req.text) + data["response code"] = req.status_code + data['raise for status'] = req.raise_for_status() + return data else: return [False, "INVALID_API"] @@ -38,10 +45,17 @@ def output(data, ip=""): if type(data) == list and data[1] == "INVALID_API": print colored( style.BOLD + '\n[-] VirusTotal API Key not configured. Skipping VirusTotal Search.\nPlease refer to http://datasploit.readthedocs.io/en/latest/apiGeneration/.\n' + style.END, 'red') - else: - for i in data: - print i - print "" + return None + + if data["response code"] != "200": + print "Response Code: " + str(data["response code"]) + data['raise for status'] + print json.dumps(data, indent=4, sort_keys=True) + print "" + return None + + print json.dumps(data, indent=4, sort_keys=True) + print "" if __name__ == "__main__": diff --git a/osint_runner.py b/osint_runner.py index 1b1458d5..e7c17d96 100755 --- a/osint_runner.py +++ b/osint_runner.py @@ -22,6 +22,7 @@ def run(component, module_dir, m_input, output = None): active_modules[os.path.basename(os.path.splitext(i)[0])] = x json_output = {} + txt_output = "" for name, x in active_modules.iteritems(): if "banner" in dir(x): @@ -30,7 +31,7 @@ def run(component, module_dir, m_input, output = None): if data: x.output(data, m_input) if output and str(output).upper() == "JSON": - json_output[name] = data + json_output[name] = data if output and str(output).upper() == "JSON": timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") @@ -42,4 +43,5 @@ def run(component, module_dir, m_input, output = None): fh.write(json.dumps(json_output, default=json_util.default, indent = 4)) fh.close() print "JSON report saved to %s/%s" % (filepath, filename) + diff --git a/username/username_gituserdetails.py b/username/username_gituserdetails.py index 1d44553a..3e2f809d 100755 --- a/username/username_gituserdetails.py +++ b/username/username_gituserdetails.py @@ -26,7 +26,7 @@ def main(username): def output(data, username=""): if "message" in data and data["message"] == "Not Found": - print 'Git account do not exist on this username.' + print 'Git account does not exist for this username.' else: print "Login: %s" % data['login'] print "avatar_url: %s" % data['avatar_url'] diff --git a/username/username_keybase.py b/username/username_keybase.py index 0bdeb47e..3981bdb3 100644 --- a/username/username_keybase.py +++ b/username/username_keybase.py @@ -24,9 +24,10 @@ def main(username): url = "https://keybase.io/_/api/1.0/user/lookup.json?usernames=%s" %username req = requests.get(url) data = json.loads(req.text) - if data['them'][0] is not None: - dict_them = data['them'][0] - return dict_them + if 'them' in data: + if data['them'][0] is not None: + dict_them = data['them'][0] + return dict_them else: dict_them = [] return dict_them diff --git a/username/username_twitterdetails.py b/username/username_twitterdetails.py index f3eeee44..a992d534 100755 --- a/username/username_twitterdetails.py +++ b/username/username_twitterdetails.py @@ -30,11 +30,15 @@ def twitterdetails(username): # preparing auth api = tweepy.API(auth) - f = open("temptweets.txt", "w+") - # writing tweets to temp file- last 1000 - for tweet in tweepy.Cursor(api.user_timeline, id=username).items(1000): - f.write(tweet.text.encode("utf-8")) - f.write("\n") + try: + f = open("temptweets.txt", "w+") + # writing tweets to temp file- last 1000 + for tweet in tweepy.Cursor(api.user_timeline, id=username).items(1000): + f.write(tweet.text.encode("utf-8")) + f.write("\n") + except tweepy.TweepError as e: + print str(e) + return None, None # extracting hashtags f = open('temptweets.txt', 'r') @@ -82,17 +86,20 @@ def output(data, username=""): if data: hashlist = data[0] userlist = data[1] - count = Counter(hashlist).most_common() - print "Top Hashtag Occurrence for user " + username + " based on last 1000 tweets" - for hash, cnt in count: - print "#" + hash + " : " + str(cnt) - print "\n" - - # counting user occurrence - countu = Counter(userlist).most_common() - print "Top User Occurrence for user " + username + " based on last 1000 tweets" - for usr, cnt in countu: - print "@" + usr + " : " + str(cnt) + + if hashlist: + count = Counter(hashlist).most_common() + print "Top Hashtag Occurrence for user " + username + " based on last 1000 tweets" + for hash, cnt in count: + print "#" + hash + " : " + str(cnt) + print "\n" + + if userlist: + # counting user occurrence + countu = Counter(userlist).most_common() + print "Top User Occurrence for user " + username + " based on last 1000 tweets" + for usr, cnt in countu: + print "@" + usr + " : " + str(cnt) else: print "No Associated Twitter account found."