@@ -189,6 +189,10 @@ function tools_installed() {
189
189
printf " ${bred} [*] misconfig-mapper [NO]${reset} \n"
190
190
allinstalled=false
191
191
}
192
+ [ -f " ${tools} /Spoofy/spoofy.py" ] || {
193
+ printf " ${bred} [*] spoofy [NO]${reset} \n"
194
+ allinstalled=false
195
+ }
192
196
[ -f " ${tools} /SwaggerSpy/swaggerspy.py" ] || {
193
197
printf " ${bred} [*] swaggerspy [NO]${reset} \n"
194
198
allinstalled=false
@@ -413,6 +417,14 @@ function tools_installed() {
413
417
printf " ${bred} [*] sns [NO]${reset} \n"
414
418
allinstalled=false
415
419
}
420
+ command -v sourcemapper & > /dev/null || {
421
+ printf " ${bred} [*] sourcemapper [NO]${reset} \n"
422
+ allinstalled=false
423
+ }
424
+ command -v jsluice & > /dev/null || {
425
+ printf " ${bred} [*] jsluice [NO]${reset} \n"
426
+ allinstalled=false
427
+ }
416
428
if [[ ${allinstalled} == true ]]; then
417
429
printf " ${bgreen} Good! All installed! ${reset} \n\n"
418
430
else
@@ -652,21 +664,21 @@ function domain_info() {
652
664
653
665
function third_party_misconfigs() {
654
666
655
- mkdir -p 3rdparties
667
+ mkdir -p osint
656
668
if { [[ ! -f " $called_fn_dir /.${FUNCNAME[0]} " ]] || [[ $DIFF == true ]]; } && [[ $THIRD_PARTIES == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\. [0-9]+\. [0-9]+\. [0-9] ]]; then
657
669
start_func ${FUNCNAME[0]} " Searching for third parties misconfigurations"
658
670
company_name=$( echo $domain | unfurl format %r)
659
671
660
672
pushd " ${tools} /misconfig-mapper" > /dev/null || {
661
673
echo " Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO} "
662
674
}
663
- ./misconfig-mapper -target $company_name -service " *" | grep " \[-\]" > ${dir} /3rdparties/visma_misconfigurations .txt
675
+ ./misconfig-mapper -target $company_name -service " *" | grep -v " \[-\]" > ${dir} /osint/3rdparts_misconfigurations .txt
664
676
665
677
popd > /dev/null || {
666
678
echo " Failed to popd in ${FUNCNAME[0]} @ line ${LINENO} "
667
679
}
668
680
669
- end_func " Results are saved in $domain /3rdparties " ${FUNCNAME[0]}
681
+ end_func " Results are saved in $domain /osint/3rdparts_misconfigurations.txt " ${FUNCNAME[0]}
670
682
671
683
else
672
684
if [[ $THIRD_PARTIES == false ]] || [[ $OSINT == false ]]; then
@@ -684,6 +696,39 @@ function third_party_misconfigs() {
684
696
685
697
}
686
698
699
+ function spoof() {
700
+
701
+ mkdir -p osint
702
+ if { [[ ! -f " $called_fn_dir /.${FUNCNAME[0]} " ]] || [[ $DIFF == true ]]; } && [[ $SPOOF == true ]] && [[ $OSINT == true ]] && ! [[ $domain =~ ^[0-9]+\. [0-9]+\. [0-9]+\. [0-9] ]]; then
703
+ start_func ${FUNCNAME[0]} " Searching for spoofable domains"
704
+
705
+ pushd " ${tools} /Spoofy" > /dev/null || {
706
+ echo " Failed to cd directory in ${FUNCNAME[0]} @ line ${LINENO} "
707
+ }
708
+ ./spoofy.py -d $domain > ${dir} /osint/spoof.txt
709
+
710
+ popd > /dev/null || {
711
+ echo " Failed to popd in ${FUNCNAME[0]} @ line ${LINENO} "
712
+ }
713
+
714
+ end_func " Results are saved in $domain /osint/spoof.txt" ${FUNCNAME[0]}
715
+
716
+ else
717
+ if [[ $SPOOF == false ]] || [[ $OSINT == false ]]; then
718
+ printf " \n${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset} \n"
719
+ elif [[ $domain =~ ^[0-9]+\. [0-9]+\. [0-9]+\. [0-9] ]]; then
720
+ return
721
+ else
722
+ if [[ $SPOOF == false ]] || [[ $OSINT == false ]]; then
723
+ printf " \n${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] ${FUNCNAME[0]} skipped in this mode or defined in reconftw.cfg ${reset} \n"
724
+ else
725
+ printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] ${FUNCNAME[0]} is already processed, to force executing ${FUNCNAME[0]} delete\n $called_fn_dir /.${FUNCNAME[0]} ${reset} \n\n"
726
+ fi
727
+ fi
728
+ fi
729
+
730
+ }
731
+
687
732
function ip_info() {
688
733
689
734
mkdir -p osint
@@ -1365,7 +1410,13 @@ function geo_info() {
1365
1410
start_func ${FUNCNAME[0]} " Running: ipinfo and geoinfo"
1366
1411
ips_file=" ${dir} /hosts/ips.txt"
1367
1412
if [ ! -f $ips_file ]; then
1368
- echo " File ${dir} /hosts/ips.txt does not exist."
1413
+ if ! [[ $domain =~ ^[0-9]+\. [0-9]+\. [0-9]+\. [0-9] ]]; then
1414
+ [ -s " subdomains/subdomains_dnsregs.json" ] && cat subdomains/subdomains_dnsregs.json | jq -r ' try . | "\(.host) \(.a[0])"' | anew -q .tmp/subs_ips.txt
1415
+ [ -s " .tmp/subs_ips.txt" ] && awk ' { print $2 " " $1}' .tmp/subs_ips.txt | sort -k2 -n | anew -q hosts/subs_ips_vhosts.txt
1416
+ [ -s " hosts/subs_ips_vhosts.txt" ] && cat hosts/subs_ips_vhosts.txt | cut -d ' ' -f1 | grep -aEiv " ^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE " \b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
1417
+ else
1418
+ echo $domain | grep -aEiv " ^(127|10|169\.154|172\.1[6789]|172\.2[0-9]|172\.3[01]|192\.168)\." | grep -oE " \b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | anew -q hosts/ips.txt
1419
+ fi
1369
1420
else
1370
1421
for ip in $( cat " $ips_file " ) ; do
1371
1422
json_output=$( curl -s https://ipapi.co/$ip /json)
@@ -1932,6 +1983,7 @@ function urlchecks() {
1932
1983
[ -s " .tmp/katana.txt" ] && sed -i ' /^.\{2048\}./d' .tmp/katana.txt
1933
1984
[ -s " .tmp/katana.txt" ] && cat .tmp/katana.txt | anew -q .tmp/url_extract_tmp.txt
1934
1985
[ -s " .tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep " ${domain} " | grep -E ' ^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi " \.(js)" | anew -q .tmp/url_extract_js.txt
1986
+ [ -s " .tmp/url_extract_tmp.txt" ] && cat .tmp/url_extract_tmp.txt | grep " ${domain} " | grep -E ' ^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | grep -aEi " \.(js\.map)" | anew -q .tmp/url_extract_jsmap.txt
1935
1987
if [[ $DEEP == true ]]; then
1936
1988
[ -s " .tmp/url_extract_js.txt" ] && interlace -tL .tmp/url_extract_js.txt -threads 10 -c " python3 ${tools} /JSA/jsa.py -f target | anew -q .tmp/url_extract_tmp.txt" & > /dev/null
1937
1989
fi
@@ -2017,7 +2069,8 @@ function jschecks() {
2017
2069
if { [[ ! -f " $called_fn_dir /.${FUNCNAME[0]} " ]] || [[ $DIFF == true ]]; } && [[ $JSCHECKS == true ]]; then
2018
2070
start_func ${FUNCNAME[0]} " Javascript Scan"
2019
2071
if [[ -s " .tmp/url_extract_js.txt" ]]; then
2020
- printf " ${yellow} Running : Fetching Urls 1/5${reset} \n"
2072
+
2073
+ printf " ${yellow} Running : Fetching Urls 1/6${reset} \n"
2021
2074
if [[ $AXIOM != true ]]; then
2022
2075
cat .tmp/url_extract_js.txt | subjs -ua " Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0" -c 40 | grep " $domain " | grep -E ' ^((http|https):\/\/)?([a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?\.)+[a-zA-Z]{1,}(\/.*)?$' | anew -q .tmp/subjslinks.txt
2023
2076
else
@@ -2026,31 +2079,36 @@ function jschecks() {
2026
2079
[ -s " .tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | egrep -iv " \.(eot|jpg|jpeg|gif|css|tif|tiff|png|ttf|otf|woff|woff2|ico|pdf|svg|txt|js)" | anew -q js/nojs_links.txt
2027
2080
[ -s " .tmp/subjslinks.txt" ] && cat .tmp/subjslinks.txt | grep -iE " \.js($|\?)" | anew -q .tmp/url_extract_js.txt
2028
2081
cat .tmp/url_extract_js.txt | python3 ${tools} /urless/urless/urless.py | anew -q js/url_extract_js.txt 2>> " $LOGFILE " > /dev/null
2029
- printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] Running : Resolving JS Urls 2/5${reset} \n"
2082
+
2083
+ printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] Running : Resolving JS Urls 2/6${reset} \n"
2030
2084
if [[ $AXIOM != true ]]; then
2031
2085
[ -s " js/url_extract_js.txt" ] && cat js/url_extract_js.txt | httpx -follow-redirects -random-agent -silent -timeout $HTTPX_TIMEOUT -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -status-code -content-type -retries 2 -no-color | grep " [200]" | grep " javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
2032
2086
else
2033
2087
[ -s " js/url_extract_js.txt" ] && axiom-scan js/url_extract_js.txt -m httpx -follow-host-redirects -H \" ${HEADER} \" -status-code -threads $HTTPX_THREADS -rl $HTTPX_RATELIMIT -timeout $HTTPX_TIMEOUT -silent -content-type -retries 2 -no-color -o .tmp/js_livelinks.txt $AXIOM_EXTRA_ARGS 2>> " $LOGFILE " > /dev/null
2034
2088
[ -s " .tmp/js_livelinks.txt" ] && cat .tmp/js_livelinks.txt | anew .tmp/web_full_info.txt | grep " [200]" | grep " javascript" | cut -d ' ' -f1 | anew -q js/js_livelinks.txt
2035
2089
fi
2036
- printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] Running : Gathering endpoints 3/5${reset} \n"
2090
+
2091
+ printf " ${yellow} Running : Extracting JS from sourcemaps 3/6${reset} \n"
2092
+ mkdir -p .tmp/sourcemapper
2093
+ [ -s " js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c " sourcemapper -jsurl '_target_' -output _output_/_cleantarget_" -o .tmp/sourcemapper 2>> " $LOGFILE " > /dev/null
2094
+ [ -s " .tmp/url_extract_jsmap.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c " sourcemapper -url '_target_' -output _output_/_cleantarget_" -o .tmp/sourcemapper 2>> " $LOGFILE " > /dev/null
2095
+
2096
+ printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] Running : Gathering endpoints 4/6${reset} \n"
2037
2097
[ -s " js/js_livelinks.txt" ] && xnLinkFinder -i js/js_livelinks.txt -sf subdomains/subdomains.txt -d $XNLINKFINDER_DEPTH -o .tmp/js_endpoints.txt 2>> " $LOGFILE " > /dev/null
2098
+ find .tmp/sourcemapper/ \( -name " *.js" -o -name " *.ts" \) -type f | jsluice urls | jq -r .url | anew -q .tmp/js_endpoints.txt
2038
2099
[ -s " parameters.txt" ] && rm -f parameters.txt 2>> " $LOGFILE " > /dev/null
2039
2100
if [[ -s " .tmp/js_endpoints.txt" ]]; then
2040
2101
sed -i ' /^\//!d' .tmp/js_endpoints.txt
2041
2102
cat .tmp/js_endpoints.txt | anew -q js/js_endpoints.txt
2042
2103
fi
2043
- printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] Running : Gathering secrets 4/5${reset} \n"
2044
2104
2045
- if [[ $AXIOM != true ]]; then
2046
- [ -s " js/js_livelinks.txt" ] && cat js/js_livelinks.txt | mantra -ua ${HEADER} -s | anew -q js/js_secrets.txt
2047
- [ -s " js/js_secrets.txt" ] && trufflehog filesystem js/js_secrets.txt --only-verified -j 2> /dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
2048
- else
2049
- [ -s " js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua \" ${HEADER} \" -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS & > /dev/null
2050
- [ -s " js/js_secrets.txt" ] && trufflehog filesystem js/js_secrets.txt --only-verified -j 2> /dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
2051
- fi
2105
+ printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] Running : Gathering secrets 5/6${reset} \n"
2106
+ [ -s " js/js_livelinks.txt" ] && axiom-scan js/js_livelinks.txt -m mantra -ua \" ${HEADER} \" -s -o js/js_secrets.txt $AXIOM_EXTRA_ARGS & > /dev/null
2107
+ [ -s " js/js_secrets.txt" ] && trufflehog filesystem js/js_secrets.txt -j 2> /dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
2108
+ [ -s " js/js_secrets.txt" ] && trufflehog filesystem .tmp/sourcemapper/ -j 2> /dev/null | jq -c | anew -q js/js_secrets_trufflehog.txt
2052
2109
[ -s " js/js_secrets.txt" ] && sed -r " s/\x1B\[([0-9]{1,3}(;[0-9]{1,2};?)?)?[mGK]//g" -i js/js_secrets.txt
2053
- printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] Running : Building wordlist 5/5${reset} \n"
2110
+
2111
+ printf " ${yellow} [$( date +' %Y-%m-%d %H:%M:%S' ) ] Running : Building wordlist 6/6${reset} \n"
2054
2112
[ -s " js/js_livelinks.txt" ] && interlace -tL js/js_livelinks.txt -threads ${INTERLACE_THREADS} -c " python3 ${tools} /getjswords.py '_target_' | anew -q webs/dict_words.txt" 2>> " $LOGFILE " > /dev/null
2055
2113
end_func " Results are saved in $domain /js folder" ${FUNCNAME[0]}
2056
2114
else
0 commit comments