Enumerate Applications
Check List
Cheat Sheet
Different URL
Subdomain Fuzzing
dnsenum $WEBSITE -f /usr/share/seclists/Discovery/DNS/subdomains-top1million-20000.txt
dnsrecon -d $WEBSITE \
-D /usr/share/seclists/Discovery/DNS/subdomains-top1million-20000.txt \
-t brt
gobuster dns --domain $WEBSITE \
-w /usr/share/seclists/Discovery/DNS/subdomains-top1million-20000.txt
urlfinder -d $WEBSITE
curl -s https://rapiddns.io/subdomain/${WEBSITE}?full=1 | \
grep -Eo '[a-zA-Z0–9.-]+\.[a-zA-Z]{2,}' | sort -u
curl -s https://$WEBSITE/favicon.ico | \
base64 | python3 -c 'import mmh3, sys;print(mmh3.hash(sys.stdin.buffer.read()))' | \
xargs -I{} shodan search http.favicon.hash:{} --fields hostnames | tr ";" "\n"
amass enum -passive -d $WEBSITE
amass enum -active \
-brute \
-d $WEBSITE \
-w /usr/share/seclists/Discovery/DNS/subdomains-top1million-20000.txt
amass intel -ip -cidr $TARGET
amass intel -active -asn $ASN
echo | \
openssl s_client -showcerts -servername $WEBSITE -connect $IP:443 2>/dev/null | \
openssl x509 -inform pem -noout -text
assetfinder $WEBSITE | httpx --status-code --title
subfinder -d $WEBSITE -all -recursive | httpx -favicon -j | \
jq -r .favicon | grep -v null | sort-u
subfinder -d $WEBSITE -all -recursive -o /tmp/subdomains.txt
cat /tmp/subdomains.txt | alterx -o /tmp/gen-subdomains.txt
cat /tmp/gen-subdomains.txt | \
httpx -ports 80,443,8080,8000,8888,8082,8083 \
-H "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/117.0" \
> /tmp/alive-subdomains.txt
puredns resolve /tmp/gen-subdomains.txt -r /tmp/resolve-subdomains.txt
katana -u /tmp/alive-subdomains.txt \
-d 5 -ps \
-pss waybackarchive,commoncrawl,alienvault \
-kf -jc -fx \
-ef woff,css,png,svg,jpg,woff2,jpeg,gif,svg \
-o /tmp/all-urls.txt
echo "1.1.1.1" > /tmp/resolvers.txt
subfinder -d $WEBSITE -all -recursive | \
shuffledns -d $WEBSITE -r /tmp/resolvers.txt -mode resolve
Directory Fuzzing
dirb $WEBSITE /usr/share/seclists/Discovery/Web-Content/raft-large-directories.txt
dirsearch -u $WEBSITE \
-w /usr/share/seclists/Discovery/Web-Content/raft-large-directories.txt
dirsearch -u $WEBSITE \
-e php,cgi,htm,html,shtm,sql.gz,sql.zip,shtml,lock,js,jar,txt,bak,inc,smp,csv,cache,zip,old,conf,config,backup,log,pl,asp,aspx,jsp,sql,db,sqlite,mdb,wasl,tar.gz,tar.bz2,7z,rar,json,xml,yml,yaml,ini,java,py,rb,php3,php4,php5 \
--random-agent \
--deep-recursive \
--exclude-status=404 \
--follow-redirects \
--delay=0.1
wfuzz -c -z file,/usr/share/seclists/Discovery/Web-Content/raft-large-directories.txt \
--sc 200 "$WEBSITE/FUZZ"
gobuster dir -u $WEBSITE \
-w /usr/share/seclists/Discovery/Web-Content/raft-large-directories.txt
feroxbuster --url $WEBSITE -C 200
ffuf -u $WEBSITE/FUZZ \
-w /usr/share/seclists/Discovery/Web-Content/raft-large-directories.txt
Non-Standard Ports
Port Scans
nmap -sS -sV --mtu 5000 $WEBSITE
nmap -sU -sV --mtu 5000 $WEBSITE
nc -zv -w 1 $WEBSITE 1-65535
nc -zvu -w 1 $WEBSITE 1-65535
TARGETS=$(dig +short A "$WEBSITE" | sed '/^\s*$/d' | awk -F. '{print $1"."$2"."$3".0/24"}' | sort -u | paste -s -d, -);
sudo masscan --range $TARGETS -p1-65535,U:1-65535 --rate=10000 --http-user-agent "Mozilla/5.0 (Windows NT10.0; Win64; x64; rv:67.0) Gecko/20100101 Firefox/67.0" -oG /tmp/massscan.txt
grep 'Host:' /tmp/massscan.txt \
| sed -E 's/.*Host: ([0-9.]+) .*Ports: (.*)/\1 \2/' \
| while read ip ports; do
echo "$ports" | tr ',' '\n' | awk -v ip="$ip" -F'/' '{print ip ":" $1}'
done \
| sort -u > /tmp/masscan-ipports.txt
awk -F: '{print $1}' /tmp/masscan-ipports.txt | sort -u > /tmp/masscan-ips.txt
awk -F: '{print $2}' /tmp/masscan-ipports.txt | sort -n -u | paste -s -d, - > /tmp/masscan-ports.csv
sudo naabu -list /tmp/masscan-ips.txt -p $(cat /tmp/masscan-ports.csv) -rate 1000 -nmap-cli 'nmap -sV --mtu 5000' -o /tmp/naabu-raw.txt
cat /tmp/naabu-raw.txt | sed -n 's/^\([0-9.]*:[0-9]*\).*$/\1/p' | sort -u > /tmp/naabu-ports.txt
cat /tmp/naabu-ports.txt \
| httpx --follow-redirects \
-ports -sc -td -auto-referer -title -favicon -server -location -ip \
-o /tmp/httpx-results.txt
CIDR Discovery
whois -h whois.cymru.com $TARGET
curl -s https://api.bgpview.io/ip/$TARGET | \
jq -r ".data.prefixes[] | {prefix: .prefix, ASN: .asn.asn}"
nmap --script targets-asn --script-args targets-asn.asn=$ASN
Virtual Hosts
nslookup -type=ns $WEBSITE
dig $WEBSITE NS +noall +answer; \
dig {a|txt|ns|mx} $WEBSITE; \
dig AXRF @ns1.$WEBSITE $WEBSTITE; \
dig @$NS $WEBSITE
Host
host -t ns $WEBSITE; \
host -t {a|txt|ns|mx} $WEBSITE; \
host -a $WEBSITE; \
host -C $WEBSITE; \
host -R 3 $WEBSITE
gobuster vhost ‐u $WEBSITE \
-w /usr/share/seclists/Discovery/DNS/subdomains-top1million-5000.txt \
--append-domain
dig -x $IP
subfinder -silent -d $WEBSITE | dnsx -silent > /tmp/sub-domains.txt
dnsgen /tmp/sub-domains.txt > /tmp/gen-sub-domains.txt
echo "1.1.1.1" > /tmp/resolver.txt
shuffledns -d $WEBSITE \
-l /tmp/gen-sub-domains.txt \
-mode resolve \
-r /tmp/resolver.txt
httpx -silent -u $WEBSITE
Host: [FUZZ]
ffuf -w /usr/share/seclists/Discovery/DNS/namelist.txt \
-u $TARGET \
-H "Host: FUZZ"
Host: [FUZZ].$WEBSITE
ffuf -w /usr/share/seclists/Discovery/DNS/namelist.txt \
-u $TARGET \
-H "Host: FUZZ.$WEBSITE"
Host: [FOUND-SUBDOMAINS]
ffuf -w /tmp/gen-sub-domains.txt -u $TARGET -H "Host: FUZZ"
Web Based DNS Search
ViewDNS
YouGetSignal
Website Informer
Reverse Whois
Whoxy
Security Insights
Reverse IP Service
Query DNS records of domains and subdomains to get IP
for domain in $(subfinder -d $WEBSITE -silent); do echo $domain | \
dnsx -a -silent -resp-only; done
Whois the IP addresses and extract the properties
whois $TARGET
Reverse Lookup on the properties
RapidDNS
domain="$WEBSITE"
curl -s "https://rapiddns.io/sameip/${domain}#result" \
| pup 'table tr td:nth-child(2) text{}' \
| sed '/^[[:space:]]*$/d' \
| nl -ba
Query Ripe
whois -h whois.ripe.net -i mnt-by $COMPANY
whois -h whois.ripe.net -- -i person $NAME
whois -h whois.ripe.net -- -i admin-c $NAME
Query Arin
whois -h whois.arin.net -- 'n ! $NAME'
whois -h whois.arin.net -- 'o ! $NAME'
amass intel -org $ORG
amass intel -ip -cidr $TARGET
amass intel -active -asn $ASN
Digital Certificate
curl -s "https://crt.sh/?q=$WEBSITE&output=json" | \
jq -r ".[].common_name" | sort -u
curl -s "https://crt.sh/?q=$WEBSITE&output=json" | \
jq -r ".[].name_value" | sort -u
curl -X 'POST' 'https://search.censys.io/api/v2/certificates/search' -H 'Authorization: Basic API_SECRET' -H "content-type: application/json" --data '{"q":"parsed.subject.organization: Google"}' | \
jq -r '.result.hits[] | (.parsed.subject_dn | capture("CN=(?<cn>[^,]+)") | .cn), (.names | if type=="array" and (.[0] | type) == "array" then .[][] else .[] end)'
github-subdomains -d $WEBSITE -t $TOKEN
waybackurls $WEBSITE
Last updated