Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix some issues #7

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 4 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
# sumrecon

script I built upon courtesy of @hmaverickadams

Has been tested only on kali. To be run in root directory
script @Gr1mmie built upon courtesy of @hmaverickadams
and was edited by me.<br>
Has been tested on Ubuntu. To be run in any directory

## DEPENDENCIES
* assetfinder - https://github.com/tomnomnom/assetfinder
* amass - https://github.com/OWASP/Amass
* certspotter - #curl -s https://certspotter.com/api/v0/certs\?domain\=$url | jq '.[].dns_names[]' | sed 's/\"//g' | sed 's/\*\.//g' | sort -u (set as alias)
* sublist3r - https://github.com/aboul3la/Sublist3r
* httprobe - https://github.com/tomnomnom/httprobe
* waybackurls - https://github.com/tomnomnom/waybackurls
* whatweb - https://github.com/urbanadventurer/WhatWeb
* nmap - https://nmap.org/download.html
* eyewitness - https://github.com/FortyNorthSecurity/EyeWitness
* gowitness - https://github.com/sensepost/gowitness
158 changes: 78 additions & 80 deletions sumrecon.sh
Original file line number Diff line number Diff line change
@@ -1,11 +1,7 @@
#!/bin/bash
pwd=$PWD

url=$1

# echo $url > var; sed 's/https\?:\/\///g' var >> var1
# sed '1d' var1 | cut -d '/' -f 1 | tee var
# url=$(cat var)

url=$1
if [ ! -x "$(command -v assetfinder)" ]; then
echo "[-] assetfinder required to run script"
exit 1
Expand All @@ -20,9 +16,8 @@
echo "[-] sublist3r required to run script"
exit 1
fi

if [ ! -x "$(find / -type f -name 'EyeWitness')" ];then
echo "[-] Eyewitness required to run script"
if [ ! -x "$(command -v gowitness)" ]; then
echo "[-] gowitness required to run script"
exit 1
fi

Expand All @@ -41,128 +36,131 @@
exit 1
fi

if [ ! -d "$url" ];then
mkdir $url
if [ ! -d "$pwd/$url" ];then
mkdir $pwd/$url
fi
if [ ! -d "$url/recon" ];then
mkdir $url/recon
if [ ! -d "$pwd/$url/recon" ];then
mkdir $pwd/$url/recon
fi
if [ ! -d "$url/recon/3rd-lvls" ];then
mkdir $url/recon/3rd-lvls
if [ ! -d "$pwd/$url/recon/3rd-lvls" ];then
mkdir $pwd/$url/recon/3rd-lvls
fi
if [ ! -d "$url/recon/scans" ];then
mkdir $url/recon/scans
if [ ! -d "$pwd/$url/recon/scans" ];then
mkdir $pwd/$url/recon/scans
fi
if [ ! -d "$url/recon/httprobe" ];then
mkdir $url/recon/httprobe
if [ ! -d "$pwd/$url/recon/httprobe" ];then
mkdir $pwd/$url/recon/httprobe
fi
if [ ! -d "$url/recon/potential_takeovers" ];then
mkdir $url/recon/potential_takeovers
if [ ! -d "$pwd/$url/recon/potential_takeovers" ];then
mkdir $pwd/$url/recon/potential_takeovers
fi
if [ ! -d "$url/recon/wayback" ];then
mkdir $url/recon/wayback
if [ ! -d "$pwd/$url/recon/wayback" ];then
mkdir $pwd/$url/recon/wayback
fi
if [ ! -d "$url/recon/wayback/params" ];then
mkdir $url/recon/wayback/params
if [ ! -d "$pwd/$url/recon/wayback/params" ];then
mkdir $pwd/$url/recon/wayback/params
fi
if [ ! -d "$url/recon/wayback/extensions" ];then
mkdir $url/recon/wayback/extensions
if [ ! -d "$pwd/$url/recon/wayback/extensions" ];then
mkdir $pwd/$url/recon/wayback/extensions
fi
if [ ! -d "$url/recon/whatweb" ];then
mkdir $url/recon/whatweb
if [ ! -d "$pwd/$url/recon/whatweb" ];then
mkdir $pwd/$url/recon/whatweb
fi
if [ ! -f "$url/recon/httprobe/alive.txt" ];then
touch $url/recon/httprobe/alive.txt
if [ ! -d "$pwd/$url/recon/gowitness" ];then
mkdir $pwd/$url/recon/gowitness
fi
if [ ! -f "$url/recon/final.txt" ];then
touch $url/recon/final.txt

if [ ! -f "$pwd/$url/recon/httprobe/alive.txt" ];then
touch $pwd/$url/recon/httprobe/alive.txt
fi
if [ ! -f "$pwd/$url/recon/final.txt" ];then
touch $pwd/$url/recon/final.txt
fi
if [ ! -f "$url/recon/3rd-lvl" ];then
touch $url/recon/3rd-lvl-domains.txt
if [ ! -f "$pwd/$url/recon/3rd-lvl" ];then
touch $pwd/$url/recon/3rd-lvl-domains.txt
fi

echo "[+] Harvesting subdomains with assetfinder..."
assetfinder $url | grep '.$url' | sort -u | tee -a $url/recon/final1.txt
assetfinder $url| grep '.'$url | sort -u | tee -a $pwd/$url/recon/final1.txt

echo "[+] Double checking for subdomains with amass and certspotter..."
amass enum -d $url | tee -a $url/recon/final1.txt
#curl -s https://certspotter.com/api/v0/certs\?domain\=$url | jq '.[].dns_names[]' | sed 's/\"//g' | sed 's/\*\.//g' | sort -u
certspotter | tee -a $url/recon/final1.txt
sort -u $url/recon/final1.txt >> $url/recon/final.txt
rm $url/recon/final1.txt
echo "[+] Double checking for subdomains with amass..."
amass enum -passive -d $url | tee -a $pwd/$url/recon/final1.txt
sort -u $pwd/$url/recon/final1.txt >> $pwd/$url/recon/final.txt
rm $pwd/$url/recon/final1.txt

echo "[+] Compiling 3rd lvl domains..."
cat ~/$url/recon/final.txt | grep -Po '(\w+\.\w+\.\w+)$' | sort -u >> ~/$url/recon/3rd-lvl-domains.txt
cat $pwd/$url/recon/final.txt | grep -Po '(\w+\.\w+\.\w+)$' | sort -u >> $pwd/$url/recon/3rd-lvl-domains.txt
#write in line to recursively run thru final.txt
for line in $(cat $url/recon/3rd-lvl-domains.txt);do echo $line | sort -u | tee -a $url/recon/final.txt;done
for line in $(cat $pwd/$url/recon/3rd-lvl-domains.txt);do echo $line | sort -u | tee -a $pwd/$url/recon/final.txt;done

echo "[+] Harvesting full 3rd lvl domains with sublist3r..."
for domain in $(cat $url/recon/3rd-lvl-domains.txt);do sublist3r -d $domain -o $url/recon/3rd-lvls/$domain.txt;done

for domain in $(cat $pwd/$url/recon/3rd-lvl-domains.txt);do sublist3r -d $domain -o $pwd/$url/recon/3rd-lvls/$domain.txt;done
cat $pwd/$url/recon/final.txt | sort -u >> $pwd/$url/recon/final2.txt
rm $pwd/$url/recon/final.txt
mv $pwd/$url/recon/final2.txt $pwd/$url/recon/final.txt
echo "[+] Probing for alive domains..."
cat $url/recon/final.txt | sort -u | httprobe -s -p https:443 | sed 's/https\?:\/\///' | tr -d ':443' | sort -u >> $url/recon/httprobe/alive.txt
sort -u $url/
cat $pwd/$url/recon/final.txt | sort -u | httprobe -s -p https:443 | sed 's/https\?:\/\///' | tr -d ':443' | sort -u >> $pwd/$url/recon/httprobe/a.txt
sort -u $pwd/$url/recon/httprobe/a.txt > $pwd/$url/recon/httprobe/alive.txt
echo "[+] Checking for possible subdomain takeover..."
if [ ! -f "$url/recon/potential_takeovers/domains.txt" ];then
touch $url/recon/potential_takeovers/domains.txt
if [ ! -f "$pwd/$url/recon/potential_takeovers/domains.txt" ];then
touch $pwd/$url/recon/potential_takeovers/domains.txt
fi
if [ ! -f "$url/recon/potential_takeovers/potential_takeovers1.txt" ];then
touch $url/recon/potential_takeovers/potential_takeovers1.txt
if [ ! -f "$pwd/$url/recon/potential_takeovers/potential_takeovers1.txt" ];then
touch $pwd/$url/recon/potential_takeovers/potential_takeovers1.txt
fi
for line in $(cat ~/$url/recon/final.txt);do echo $line |sort -u >> ~/$url/recon/potential_takeovers/domains.txt;done
subjack -w $url/recon/httprobe/alive.txt -t 100 -timeout 30 -ssl -c ~/go/src/github.com/haccer/subjack/fingerprints.json -v 3 >> $url/recon/potential_takeovers/potential_takeovers/potential_takeovers1.txt
sort -u $url/recon/potential_takeovers/potential_takeovers1.txt >> $url/recon/potential_takeovers/potential_takeovers.txt
rm $url/recon/potential_takeovers/potential_takeovers1.txt
for line in $(cat $pwd/$url/recon/final.txt);do echo $line |sort -u >> $pwd/$url/recon/potential_takeovers/domains.txt;done
subjack -w $pwd/$url/recon/httprobe/alive.txt -t 100 -timeout 30 -ssl -c ~/go/src/github.com/haccer/subjack/fingerprints.json -v 3 >> $pwd/$url/recon/potential_takeovers/potential_takeovers1.txt
sort -u $pwd/$url/recon/potential_takeovers/potential_takeovers1.txt >> $pwd/$url/recon/potential_takeovers/potential_takeovers.txt
rm $pwd/$url/recon/potential_takeovers/potential_takeovers1.txt

echo "[+] Running whatweb on compiled domains..."
for domain in $(cat ~/$url/recon/httprobe/alive.txt);do
if [ ! -d "$url/recon/whatweb/$domain" ];then
mkdir $url/recon/whatweb/$domain
for domain in $(cat $pwd/$url/recon/httprobe/alive.txt);do
if [ ! -d "$pwd/$url/recon/whatweb/$domain" ];then
mkdir $pwd/$url/recon/whatweb/$domain
fi
if [ ! -d "$url/recon/whatweb/$domain/output.txt" ];then
touch $url/recon/whatweb/$domain/output.txt
if [ ! -d "$pwd/$url/recon/whatweb/$domain/output.txt" ];then
touch $pwd/$url/recon/whatweb/$domain/output.txt
fi
if [ ! -d "$url/recon/whaweb/$domain/plugins.txt" ];then
touch $url/recon/whatweb/$domain/plugins.txt
if [ ! -d "$pwd/$url/recon/whaweb/$domain/plugins.txt" ];then
touch $pwd/$url/recon/whatweb/$domain/plugins.txt
fi
echo "[*] Pulling plugins data on $domain $(date +'%Y-%m-%d %T') "
whatweb --info-plugins -t 50 -v $domain >> $url/recon/whatweb/$domain/plugins.txt; sleep 3
whatweb --info-plugins -t 50 -v $domain >> $pwd/$url/recon/whatweb/$domain/plugins.txt; sleep 3
echo "[*] Running whatweb on $domain $(date +'%Y-%m-%d %T')"
whatweb -t 50 -v $domain >> $url/recon/whatweb/$domain/output.txt; sleep 3
whatweb -t 50 -v $domain >> $pwd/$url/recon/whatweb/$domain/output.txt; sleep 3
done

echo "[+] Scraping wayback data..."
cat $url/recon/final.txt | waybackurls | tee -a $url/recon/wayback/wayback_output1.txt
sort -u $url/recon/wayback/wayback_output1.txt >> $url/recon/wayback/wayback_output.txt
rm $url/recon/wayback/wayback_output1.txt
cat $pwd/$url/recon/final.txt | waybackurls | tee -a $pwd/$url/recon/wayback/wayback_output1.txt
sort -u $pwd/$url/recon/wayback/wayback_output1.txt >> $pwd/$url/recon/wayback/wayback_output.txt
rm $pwd/$url/recon/wayback/wayback_output1.txt

echo "[+] Pulling and compiling all possible params found in wayback data..."
cat $url/recon/wayback/wayback_output.txt | grep '?*=' | cut -d '=' -f 1 | sort -u >> $url/recon/wayback/params/wayback_params.txt
for line in $(cat $url/recon/wayback/params/wayback_params.txt);do echo $line'=';done
cat $pwd/$url/recon/wayback/wayback_output.txt | grep '?*=' | cut -d '=' -f 1 | sort -u >> $pwd/$url/recon/wayback/params/wayback_params.txt
for line in $(cat $pwd/$url/recon/wayback/params/wayback_params.txt);do echo $line'=';done

echo "[+] Pulling and compiling js/php/aspx/jsp/json files from wayback output..."
for line in $(cat $url/recon/wayback/wayback_output.txt);do
for line in $(cat $pwd/$url/recon/wayback/wayback_output.txt);do
ext="${line##*.}"
if [[ "$ext" == "js" ]]; then
echo $line | sort -u | tee -a $url/recon/wayback/extensions/js.txt
echo $line | sort -u | tee -a $pwd/$url/recon/wayback/extensions/js.txt
fi
if [[ "$ext" == "html" ]];then
echo $line | sort -u | tee -a $url/recon/wayback/extensions/jsp.txt
echo $line | sort -u | tee -a $pwd/$url/recon/wayback/extensions/jsp.txt
fi
if [[ "$ext" == "json" ]];then
echo $line | sort -u | tee -a $url/recon/wayback/extensions/json.txt
echo $line | sort -u | tee -a $pwd/$url/recon/wayback/extensions/json.txt
fi
if [[ "$ext" == "php" ]];then
echo $line | sort -u | tee -a $url/recon/wayback/extensions/php.txt
echo $line | sort -u | tee -a $pwd/$url/recon/wayback/extensions/php.txt
fi
if [[ "$ext" == "aspx" ]];then
echo $line | sort -u | tee -a $url/recon/wayback/extensions/aspx.txt
echo $line | sort -u | tee -a $pwd/$url/recon/wayback/extensions/aspx.txt
fi
done

echo "[+] Scanning for open ports..."
nmap -iL $url/recon/httprobe/alive.txt -T4 -oA $url/recon/scans/scanned.txt
nmap -iL $pwd/$url/recon/httprobe/alive.txt -T4 -oA $pwd/$url/recon/scans/scanned.txt

echo "[+] Running eyewitness against all compiled domains..."
eyewitness=$(find / -type f -name 'EyeWitness.py')
python3 $eyewitness --web -f $url/recon/httprobe/alive.txt -d $url/recon/eyewitness --resolve --no-prompt
echo "[+] Running gowitness against all compiled domains..."
gowitness file -f $pwd/$url/recon/httprobe/alive.txt -D $pwd/$url/recon/gowitness/gowitness.sqlite3 -P $pwd/$url/recon/gowitness/Screenshots