Skip to content

Commit

Permalink
refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
thewhiteh4t committed Oct 2, 2023
1 parent 13dd6ab commit ab0c7bf
Show file tree
Hide file tree
Showing 5 changed files with 107 additions and 126 deletions.
24 changes: 12 additions & 12 deletions modules/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,25 +132,25 @@ async def robots(robo_url, base_url, data, output):
entry.find('Allow') == 0,
entry.find('Sitemap') == 0]):

url = entry.split(': ')
try:
url = url[1]
url = url.strip()
tmp_url = url_filter(base_url, url)
if tmp_url is not None:
r_total.append(url_filter(base_url, url))
if url.endswith('xml'):
sm_total.append(url)
except Exception as exc:
log_writer(f'[crawler.robots] Exception = {exc}')
url = entry.split(': ', 1)[1].strip()
tmp_url = url_filter(base_url, url)

if tmp_url is not None:
r_total.append(url_filter(base_url, url))

if url.endswith('xml'):
sm_total.append(url)

r_total = set(r_total)
print(f'{G}{"[".rjust(8, ".")} {len(r_total)} ]')
exporter(data, output, r_total, 'robots')

elif r_sc == 404:
print(f'{R}{"[".rjust(9, ".")} Not Found ]{W}')

else:
print(f'{R}{"[".rjust(9, ".")} {r_sc} ]{W}')

except Exception as exc:
print(f'\n{R}[-] Exception : {C}{exc}{W}')
log_writer(f'[crawler.robots] Exception = {exc}')
Expand All @@ -163,7 +163,7 @@ async def sitemap(target_url, data, output):
sm_rqst = requests.get(target_url, headers=user_agent, verify=False, timeout=10)
sm_sc = sm_rqst.status_code
if sm_sc == 200:
print(G + '['.rjust(8, '.') + ' Found ]' + W)
print(f'{G}{"[".rjust(8, ".")} Found ]{W}')
print(f'{G}[+] {C}Extracting sitemap Links{W}', end='', flush=True)
sm_page = sm_rqst.content
sm_soup = bs4.BeautifulSoup(sm_page, 'xml')
Expand Down
58 changes: 29 additions & 29 deletions modules/export.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
#!/usr/bin/env python3

import sys

R = '\033[31m' # red
G = '\033[32m' # green
C = '\033[36m' # cyan
Expand All @@ -9,45 +11,43 @@

def export(output, data):
if output['format'] != 'txt':
if output['export']:
fname = output['file']
with open(fname, 'w') as outfile:
if output['format'] != 'txt':
print(f'{R}[-] {C}Invalid Output Format, Valid Formats : {W}txt')
exit()
elif output['format'] == 'txt':
fname = output['file']
with open(fname, 'w') as outfile:
txt_export(data, outfile)


def txt_unpack(outfile, key, val):
print(f'{R}[-] {C}Invalid Output Format, Valid Formats : {W}txt')
sys.exit()

fname = output['file']
with open(fname, 'w') as outfile:
txt_export(data, outfile)


def txt_unpack(outfile, val):
def write_item(item):
if isinstance(item, list):
outfile.write(f'{item[0]}\t{item[1]}\t\t{item[2]}\n')
else:
outfile.write(f'{item}\n')

if isinstance(val, list):
for item in val:
if isinstance(item, list):
outfile.write('{}\t{}\t\t{}\n'.format(*item))
else:
outfile.write(str(item) + '\n')
write_item(item)

elif isinstance(val, dict):
for key, val in val.items():
if key != 'exported':
if isinstance(val, list):
txt_unpack(outfile, key, val)
else:
outfile.write(f'{key}: {val}\n')
for sub_key, sub_val in val.items():
if sub_key == 'exported':
continue
if isinstance(sub_val, list):
txt_unpack(outfile, sub_val)
else:
outfile.write(f'{sub_key}: {sub_val}\n')


def txt_export(data, outfile):
for key, val in data.items():
if key.startswith('module'):
if not val['exported']:
txt_unpack(outfile, key, val)
txt_unpack(outfile, val)
val['exported'] = True
elif key.startswith('Type'):
outfile.write('\n' + data[key] + '\n')
outfile.write('=' * len(data[key]) + '\n\n')
outfile.write(f'\n{data[key]}\n')
outfile.write(f'{"=" * len(data[key])}\n\n')
else:
outfile.write(str(key))
outfile.write(' : ')
outfile.write(str(val) + '\n')
outfile.write(f'{key}: {val}\n')
95 changes: 42 additions & 53 deletions modules/sslinfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,74 +15,63 @@

def cert(hostname, sslp, output, data):
result = {}
pair = {}
print(f'\n{Y}[!] SSL Certificate Information : {W}\n')

port_test = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
port_test.settimeout(5)
try:
port_test.connect((hostname, sslp))
port_test.close()
except Exception:
port_test.close()
print(f'{R}[-] {C}SSL is not Present on Target URL...Skipping...{W}')
result.update({'Error': 'SSL is not Present on Target URL'})
log_writer('[sslinfo] SSL is not Present on Target URL...Skipping...')

ctx = ssl.create_default_context()
sock = socket.socket()
sock.settimeout(5)
ssl_conn = ctx.wrap_socket(sock, server_hostname=hostname)
ctx = ssl.create_default_context()
sock = socket.socket()
sock.settimeout(5)
ssl_conn = ctx.wrap_socket(sock, server_hostname=hostname)

try:
ssl_conn.connect((hostname, sslp))
info = ssl_conn.getpeercert()
except Exception:
info = ssl.get_server_certificate((hostname, sslp))
with open(f'{hostname}.pem', 'w') as outfile:
outfile.write(info)
cert_dict = ssl._ssl._test_decode_cert(f'{hostname}.pem')
info = cert_dict
os.remove(f'{hostname}.pem')
try:
ssl_conn.connect((hostname, sslp))
info = ssl_conn.getpeercert()
except Exception:
info = ssl.get_server_certificate((hostname, sslp))
with open(f'{hostname}.pem', 'w') as outfile:
outfile.write(info)
cert_dict = ssl._ssl._test_decode_cert(f'{hostname}.pem')
info = cert_dict
os.remove(f'{hostname}.pem')

def unpack(val, pair):
convert = False
for item in val:
if isinstance(item, tuple):
for subitem in item:
if isinstance(subitem, tuple):
for elem in subitem:
if isinstance(elem, tuple):
unpack(elem)
else:
convert = True
if convert is True:
pair.update(dict([subitem]))
else:
pass
def unpack(nested_tuple, pair):
for item in nested_tuple:
if isinstance(item, tuple):
if len(item) == 2:
pair[item[0]] = item[1]
else:
print(f'{G}[+] {C}{key}: {W}{item}')
if output != 'None':
result.update({key: val})

for key, val in info.items():
if isinstance(val, tuple):
unpack(val, pair)
for key, val in pair.items():
print(f'{G}[+] {C}{key}: {W}{val}')
if output != 'None':
result.update({key: val})
pair.clear()
unpack(item, pair)
else:
print(f'{G}[+] {C}{key}: {W}{val}')
if output != 'None':
result.update({key: val})
pair[nested_tuple.index(item)] = item

pair = {}
for key, val in info.items():
if isinstance(val, tuple):
print(f'{G}[+] {C}{key}{W}')
unpack(val, pair)
for sub_key, sub_val in pair.items():
print(f'\t{G}└╴{C}{sub_key}: {W}{sub_val}')
result.update({f'{key}-{sub_key}': sub_val})
pair.clear()
else:
print(f'{G}[+] {C}{key} : {W}{val}')
result.update({key: val})

except Exception:
port_test.close()
print(f'{R}[-] {C}SSL is not Present on Target URL...Skipping...{W}')
if output != 'None':
result.update({'Error': 'SSL is not Present on Target URL'})
log_writer('[sslinfo] SSL is not Present on Target URL...Skipping...')
result.update({'exported': False})
if output != 'None':

if output:
fname = f'{output["directory"]}/ssl.{output["format"]}'
output['file'] = fname
data['module-SSL Certificate Information'] = result
export(output, data)
log_writer('[sslinfo] Completed')
log_writer('[sslinfo] Completed')
14 changes: 6 additions & 8 deletions modules/subdom.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,15 +60,13 @@ def subdomains(hostname, tout, output, data, conf_path):
found = set(found)
total = len(found)

if len(found) != 0:
if found:
print(f'\n{G}[+] {C}Results : {W}\n')
i = 0
for url in found:
print(url)
i += 1
if i == 20:
print(f'\n{G}[+]{C} Results truncated...{W}')
break
for url in enumerate(list(found)[:20]):
print(url[1])

if len(found) > 20:
print(f'\n{G}[+]{C} Results truncated...{W}')

print(f'\n{G}[+] {C}Total Unique Sub Domains Found : {W}{total}')

Expand Down
42 changes: 18 additions & 24 deletions modules/wayback.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,38 +34,32 @@ def timetravel(target, data, output):
check_data = check_rqst.text
json_chk_data = json.loads(check_data)
avail_data = json_chk_data['archived_snapshots']
if len(avail_data) != 0:
is_avail = True
print(G + '['.rjust(5, '.') + ' Available ]')
if avail_data:
print(f'{G}{"[".rjust(5, ".")} Available ]{W}')
else:
print(R + '['.rjust(5, '.') + ' N/A ]')
print(f'{R}{"[".rjust(5, ".")} N/A ]{W}')
else:
print(f'\n{R}[-] Status : {C}{check_sc}{W}')
log_writer(f'[wayback] Status = {check_sc}, expected 200')
except Exception as exc:
print(f'\n{R}[-] Exception : {C}{exc}{W}')
log_writer(f'[wayback] Exception = {exc}')

if is_avail is True:
print(f'{Y}[!] {C}Fetching URLs{W}', end='', flush=True)
wm_url = 'http://web.archive.org/cdx/search/cdx'
if avail_data:
print(f'{Y}[!] {C}Fetching URLs{W}', end='', flush=True)
wm_url = 'http://web.archive.org/cdx/search/cdx'

payload = {
'url': domain_query,
'fl': 'original',
'fastLatest': 'true',
'from': str(last_yr),
'to': str(curr_yr)
}
payload = {
'url': domain_query,
'fl': 'original',
'fastLatest': 'true',
'from': str(last_yr),
'to': str(curr_yr)
}

try:
rqst = requests.get(wm_url, params=payload, timeout=10)
r_sc = rqst.status_code
if r_sc == 200:
r_data = rqst.text
if len(r_data) != 0:
r_data = r_data.split('\n')
r_data = set(r_data)
if data:
r_data = set(r_data.split('\n'))
print(f'{G}{"[".rjust(5, ".")} {len(r_data)} ]{W}')
wayback_total.extend(r_data)

Expand All @@ -80,7 +74,7 @@ def timetravel(target, data, output):
print(f'{R}{"[".rjust(5, ".")} Not Found ]{W}')
else:
print(f'{R}{"[".rjust(5, ".")} {r_sc} ]{W}')
except Exception as exc:
print(f'\n{R}[-] Exception : {C}{exc}{W}')
log_writer(f'[wayback] Exception = {exc}')
except Exception as exc:
print(f'\n{R}[-] Exception : {C}{exc}{W}')
log_writer(f'[wayback] Exception = {exc}')
log_writer('[wayback] Completed')

0 comments on commit ab0c7bf

Please sign in to comment.