Skip to content

Commit

Permalink
fixed domain extraction, temp disabled wayback in subdomain enum, fas…
Browse files Browse the repository at this point in the history
…ter crt.sh lookup, fixed typo
  • Loading branch information
thewhiteh4t committed May 16, 2024
1 parent a4092d0 commit ee602b1
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 62 deletions.
54 changes: 37 additions & 17 deletions finalrecon.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@
import ipaddress
import tldextract
from json import loads, dumps
from urllib import parse

type_ip = False
data = {}
Expand Down Expand Up @@ -173,25 +174,44 @@ def save_key(key_string):
target = target[:-1]

print(f'{G}[+] {C}Target : {W}{target}')
ext = tldextract.extract(target)
domain = ext.registered_domain
if not domain:
domain = ext.domain
domain_suffix = ext.suffix

if ext.subdomain:
hostname = f'{ext.subdomain}.{ext.domain}.{ext.suffix}'

split_url = parse.urlsplit(target)
extractor = tldextract.TLDExtract()
parsed_url = extractor.extract_urllib(split_url)
protocol = split_url.scheme

if split_url.port:
if not parsed_url.subdomain:
netloc = parsed_url.domain # localhost:8000
domain = netloc.split(':')[0]
domain_suffix = ''
hostname = domain
else:
netloc = f'{parsed_url.subdomain}.{parsed_url.domain}' # abc.com:8000
domain = parsed_url.subdomain
domain_suffix = parsed_url.domain.split(':')[0]
hostname = f'{domain}.{domain_suffix}'
else:
hostname = domain
if len(parsed_url.registered_domain) == 0:
netloc = parsed_url.domain # 8.8.8.8
domain = ''
domain_suffix = ''
else:
netloc = parsed_url.registered_domain # abc.com
domain = parsed_url.domain
domain_suffix = parsed_url.suffix
hostname = netloc

try:
ipaddress.ip_address(hostname)
type_ip = True
ip = hostname
private_ip = ipaddress.ip_address(ip).is_private
except Exception:
try:
ip = socket.gethostbyname(hostname)
print(f'\n{G}[+] {C}IP Address : {W}{str(ip)}')
private_ip = ipaddress.ip_address(ip).is_private
except Exception as e:
print(f'\n{R}[-] {C}Unable to Get IP : {W}{str(e)}')
sys.exit(1)
Expand Down Expand Up @@ -228,11 +248,11 @@ def save_key(key_string):
headers(target, out_settings, data)
cert(hostname, sslp, out_settings, data)
whois_lookup(domain, domain_suffix, path_to_script, out_settings, data)
dnsrec(domain, out_settings, data)
if not type_ip:
subdomains(domain, tout, out_settings, data, conf_path)
dnsrec(hostname, out_settings, data)
if not type_ip and not private_ip:
subdomains(hostname, tout, out_settings, data, conf_path)
scan(ip, out_settings, data, pscan_threads)
crawler(target, out_settings, data)
crawler(target, protocol, netloc, out_settings, data)
hammer(target, threads, tout, wdlist, redir, sslv, dserv, out_settings, data, filext)
timetravel(target, data, out_settings)

Expand All @@ -254,17 +274,17 @@ def save_key(key_string):
if crawl:
from modules.crawler import crawler
log_writer('Starting crawler...')
crawler(target, out_settings, data)
crawler(target, protocol, netloc, out_settings, data)

if dns:
from modules.dns import dnsrec
log_writer('Starting DNS enum...')
dnsrec(domain, out_settings, data)
dnsrec(hostname, out_settings, data)

if subd and not type_ip:
if subd and not type_ip and not private_ip:
from modules.subdom import subdomains
log_writer('Starting subdomain enum...')
subdomains(domain, tout, out_settings, data, conf_path)
subdomains(hostname, tout, out_settings, data, conf_path)

elif subd and type_ip:
print(f'{R}[-] {C}Sub-Domain Enumeration is Not Supported for IP Addresses{W}\n')
Expand Down
25 changes: 5 additions & 20 deletions modules/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,14 @@
sm_crawl_total = []


def crawler(target, output, data):
def crawler(target, protocol, netloc, output, data):
global r_url, sm_url
print(f'\n{Y}[!] Starting Crawler...{W}\n')

try:
rqst = requests.get(target, headers=user_agent, verify=False, timeout=10)
except Exception as exc:
print(f'{R} [-] Exception : {C}{exc}{W}')
print(f'{R}[-] Exception : {C}{exc}{W}')
log_writer(f'[crawler] Exception = {exc}')
return

Expand All @@ -47,24 +47,9 @@ def crawler(target, output, data):
page = rqst.content
soup = bs4.BeautifulSoup(page, 'lxml')

protocol = target.split('://')
protocol = protocol[0]
temp_tgt = target.split('://')[1]
pattern = r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{2,5}'
custom = bool(re.match(pattern, temp_tgt))
if custom:
r_url = f'{protocol}://{temp_tgt}/robots.txt'
sm_url = f'{protocol}://{temp_tgt}/sitemap.xml'
base_url = f'{protocol}://{temp_tgt}'
else:
ext = tldextract.extract(target)
if ext.subdomain:
hostname = f'{ext.subdomain}.{ext.domain}.{ext.suffix}'
else:
hostname = ext.registered_domain
base_url = f'{protocol}://{hostname}'
r_url = f'{base_url}/robots.txt'
sm_url = f'{base_url}/sitemap.xml'
r_url = f'{protocol}://{netloc}/robots.txt'
sm_url = f'{protocol}://{netloc}/sitemap.xml'
base_url = f'{protocol}://{netloc}'

loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
Expand Down
4 changes: 2 additions & 2 deletions modules/subdom.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ async def query(hostname, tout, conf_path):
virust(hostname, conf_path, session),
shodan(hostname, conf_path, session),
certspot(hostname, session),
machine(hostname, session),
#machine(hostname, session),
hackertgt(hostname, session),
crtsh(hostname)
crtsh(hostname, session)
)
await session.close()

Expand Down
2 changes: 1 addition & 1 deletion modules/subdomain_modules/certspot_subs.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ async def certspot(hostname, session):
if status == 200:
json_data = await resp.text()
json_read = loads(json_data)
print(f'{G}[+] {Y}Certsport {W}found {C}{len(json_read)} {W}subdomains!')
print(f'{G}[+] {Y}Certspotter {W}found {C}{len(json_read)} {W}subdomains!')
for i in range(0, len(json_read)):
domains = json_read[i]['dns_names']
parent.found.extend(domains)
Expand Down
39 changes: 18 additions & 21 deletions modules/subdomain_modules/crtsh_subs.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,32 +6,29 @@
W = '\033[0m' # white
Y = '\033[33m' # yellow

import psycopg2
from json import loads
import modules.subdom as parent
from modules.write_log import log_writer


async def crtsh(hostname):
async def crtsh(hostname, session):
print(f'{Y}[!] {C}Requesting {G}crt.sh{W}')
url = f'https://crt.sh/?dNSName=%25.{hostname}&output=json'

try:
conn = psycopg2.connect(
host="crt.sh",
database="certwatch",
user="guest",
port="5432"
)
conn.autocommit = True
cur = conn.cursor()
query = f"SELECT ci.NAME_VALUE NAME_VALUE FROM certificate_identity ci WHERE ci.NAME_TYPE = 'dNSName' AND reverse(lower(ci.NAME_VALUE)) LIKE reverse(lower('%.{hostname}'))"
cur.execute(query)
result = cur.fetchall()
cur.close()
conn.close()
tmp_list = []
for url in result:
tmp_list.append(url[0])
print(f'{G}[+] {Y}CRT.sh {W}found {C}{len(tmp_list)} {W}subdomains!')
parent.found.extend(tmp_list)
async with session.get(url) as resp:
status = resp.status
if status == 200:
data = await resp.text()
data_json = loads(data)
tmp_list = []
for entry in data_json:
subdomain = entry['name_value']
tmp_list.append(subdomain)
print(f'{G}[+] {Y}crt.sh {W}found {C}{len(tmp_list)} {W}subdomains!')
parent.found.extend(tmp_list)
else:
print(f'{R}[-] {C}HackerTarget Status : {W}{status}')
log_writer(f'[htarget_subs] Status = {status}, expected 200')
except Exception as exc:
print(f'{R}[-] {C}crtsh Exception : {W}{exc}')
log_writer(f'[crtsh_subs] Exception = {exc}')
Expand Down
2 changes: 1 addition & 1 deletion modules/whois.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def whois_lookup(domain, tld, script_path, output, data):

try:
whois_sv = db_json[tld]
whois_info = asyncio.run(get_whois(domain, whois_sv))
whois_info = asyncio.run(get_whois(f'{domain}.{tld}', whois_sv))
print(whois_info['whois'])
result.update(whois_info)
except KeyError:
Expand Down

0 comments on commit ee602b1

Please sign in to comment.