diff --git a/finalrecon.py b/finalrecon.py index e1b5c6d..29f7c8c 100644 --- a/finalrecon.py +++ b/finalrecon.py @@ -8,6 +8,8 @@ C = '\033[36m' # cyan W = '\033[0m' # white +from modules.write_log import log_writer +log_writer('Importing config...') import settings as config home = config.home @@ -17,14 +19,18 @@ src_conf_path = config.src_conf_path meta_file_path = config.meta_file_path +log_writer( + f'PATHS = HOME:{home}, SCRIPT_LOC:{path_to_script},\ + METADATA:{meta_file_path}, KEYS:{config.keys_file_path},\ + CONFIG:{config.conf_file_path}, LOG:{config.log_file_path}' +) + import argparse -version = '1.1.5' -gh_version = '' -twitter_url = '' -discord_url = '' +VERSION = '1.1.5' +log_writer(f'FinalRecon v{VERSION}') -parser = argparse.ArgumentParser(description=f'FinalRecon - The Last Web Recon Tool You Will Need | v{version}') +parser = argparse.ArgumentParser(description=f'FinalRecon - The Last Web Recon Tool You Will Need | v{VERSION}') parser.add_argument('url', help='Target URL') parser.add_argument('--headers', help='Header Information', action='store_true') parser.add_argument('--sslinfo', help='SSL Certificate Information', action='store_true') @@ -64,6 +70,8 @@ try: args = parser.parse_args() except SystemExit: + log_writer('[finalrecon] Help menu accessed') + log_writer(f'{"-" * 30}') sys.exit() target = args.url @@ -119,7 +127,7 @@ def banner(): print(f'{G}[>]{C} Created By :{W} thewhiteh4t') print(f'{G} |--->{C} Twitter :{W} {twitter_url}') print(f'{G} |--->{C} Community :{W} {comms_url}') - print(f'{G}[>]{C} Version :{W} {version}\n') + print(f'{G}[>]{C} Version :{W} {VERSION}\n') def full_recon(): @@ -129,7 +137,7 @@ def full_recon(): from modules.dns import dnsrec from modules.whois import whois_lookup from modules.dirrec import hammer - from modules.portscan import ps + from modules.portscan import scan from modules.subdom import subdomains from modules.wayback import timetravel headers(target, output, data) @@ -140,7 +148,7 @@ def full_recon(): subdomains(domain, tout, output, data, conf_path) else: pass - ps(ip, output, data, pscan_threads) + scan(ip, output, data, pscan_threads) crawler(target, output, data) hammer(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext) timetravel(target, data, output) @@ -151,6 +159,7 @@ def full_recon(): if target.startswith(('http', 'https')) is False: print(f'{R}[-] {C}Protocol Missing, Include {W}http:// {C}or{W} https:// \n') + log_writer(f'Protocol missing in {target}, exiting') sys.exit(1) else: pass @@ -186,65 +195,83 @@ def full_recon(): respath = f'{fpath}fr_{hostname}_{dt_now}' if not os.path.exists(respath): os.makedirs(respath) - output = { + out_settings = { 'format': output, 'directory': respath, 'file': fname } + log_writer(f'OUTPUT = FORMAT: {output}, DIR: {respath}, FILENAME: {fname}') if full is True: + log_writer('Starting full recon...') full_recon() if headinfo is True: from modules.headers import headers - headers(target, output, data) + log_writer('Starting header enum...') + headers(target, out_settings, data) if sslinfo is True: from modules.sslinfo import cert - cert(hostname, sslp, output, data) + log_writer('Starting SSL enum...') + cert(hostname, sslp, out_settings, data) if whois is True: from modules.whois import whois_lookup - whois_lookup(ip, output, data) + log_writer('Starting whois enum...') + whois_lookup(ip, out_settings, data) if crawl is True: from modules.crawler import crawler - crawler(target, output, data) + log_writer('Starting crawler...') + crawler(target, out_settings, data) if dns is True: from modules.dns import dnsrec - dnsrec(domain, output, data) + log_writer('Starting DNS enum...') + dnsrec(domain, out_settings, data) if subd is True and type_ip is False: from modules.subdom import subdomains - subdomains(domain, tout, output, data, conf_path) + log_writer('Starting subdomain enum...') + subdomains(domain, tout, out_settings, data, conf_path) elif subd is True and type_ip is True: print(f'{R}[-] {C}Sub-Domain Enumeration is Not Supported for IP Addresses{W}\n') + log_writer('Sub-Domain Enumeration is Not Supported for IP Addresses, exiting') sys.exit(1) else: pass if wback is True: from modules.wayback import timetravel - timetravel(hostname, data, output) + log_writer('Starting wayback enum...') + timetravel(hostname, data, out_settings) if pscan is True: - from modules.portscan import ps - ps(ip, output, data, threads) + from modules.portscan import scan + log_writer('Starting port scan...') + scan(ip, out_settings, data, threads) if dirrec is True: from modules.dirrec import hammer - hammer(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext) + log_writer('Starting dir enum...') + hammer(target, threads, tout, wdlist, redir, sslv, dserv, out_settings, data, filext) if any([full, headinfo, sslinfo, whois, crawl, dns, subd, wback, pscan, dirrec]) is not True: print(f'\n{R}[-] Error : {C}At least One Argument is Required with URL{W}') + log_writer('At least One Argument is Required with URL, exiting') output = 'None' sys.exit(1) end_time = datetime.datetime.now() - start_time print(f'\n{G}[+] {C}Completed in {W}{str(end_time)}\n') + log_writer(f'Completed in {end_time}') print(f'{G}[+] {C}Exported : {W}{respath}') + log_writer(f'Exported to {respath}') + log_writer(f'{"-" * 30}') sys.exit() except KeyboardInterrupt: print(f'{R}[-] {C}Keyboard Interrupt.{W}\n') + log_writer('Keyboard interrupt, exiting') + log_writer(f'{"-" * 30}') sys.exit(130) diff --git a/modules/crawler.py b/modules/crawler.py index 7e1731e..40a7231 100644 --- a/modules/crawler.py +++ b/modules/crawler.py @@ -3,13 +3,12 @@ import re import bs4 import lxml -import json import asyncio import requests import threading import tldextract -from datetime import date from modules.export import export +from modules.write_log import log_writer requests.packages.urllib3.disable_warnings() R = '\033[31m' # red @@ -20,7 +19,6 @@ user_agent = {'User-Agent': 'FinalRecon'} -soup = '' total = [] r_total = [] sm_total = [] @@ -34,17 +32,18 @@ def crawler(target, output, data): - global soup, r_url, sm_url + global r_url, sm_url print(f'\n{Y}[!] Starting Crawler...{W}\n') try: rqst = requests.get(target, headers=user_agent, verify=False, timeout=10) - except Exception as e: - print(f'{R} [-] Exception : {C}{e}{W}') + except Exception as exc: + print(f'{R} [-] Exception : {C}{exc}{W}') + log_writer(f'[crawler] Exception = {exc}') return - sc = rqst.status_code - if sc == 200: + status = rqst.status_code + if status == 200: page = rqst.content soup = bs4.BeautifulSoup(page, 'lxml') @@ -69,18 +68,20 @@ def crawler(target, output, data): tasks = asyncio.gather( robots(r_url, base_url, data, output), sitemap(sm_url, data, output), - css(target, data, output), - js(target, data, output), - internal_links(target, data, output), - external_links(target, data, output), - images(target, data, output), + css(target, data, soup, output), + js_scan(target, data, soup, output), + internal_links(target, data, soup, output), + external_links(target, data, soup, output), + images(target, data, soup, output), sm_crawl(data, output), js_crawl(data, output)) loop.run_until_complete(tasks) loop.close() - stats(output, data) + stats(output, data, soup) + log_writer('[crawler] Completed') else: - print(f'{R}[-] {C}Status : {W}{sc}') + print(f'{R}[-] {C}Status : {W}{status}') + log_writer(f'[crawler] Status code = {status}, expected 200') def url_filter(target, link): @@ -120,6 +121,7 @@ def url_filter(target, link): pass return link + async def robots(robo_url, base_url, data, output): global r_total print(f'{G}[+] {C}Looking for robots.txt{W}', end='', flush=True) @@ -128,7 +130,7 @@ async def robots(robo_url, base_url, data, output): r_rqst = requests.get(robo_url, headers=user_agent, verify=False, timeout=10) r_sc = r_rqst.status_code if r_sc == 200: - print(G + '['.rjust(9, '.') + ' Found ]' + W) + print(f'{G}{"[".rjust(9, ".")} Found ]{W}') print(f'{G}[+] {C}Extracting robots Links{W}', end='', flush=True) r_page = r_rqst.text r_scrape = r_page.split('\n') @@ -147,25 +149,26 @@ async def robots(robo_url, base_url, data, output): r_total.append(url_filter(base_url, url)) if url.endswith('xml') is True: sm_total.append(url) - except Exception: - pass + except Exception as exc: + log_writer(f'[crawler.robots] Exception = {exc}') r_total = set(r_total) - print(G + '['.rjust(8, '.') + ' {} ]'.format(str(len(r_total)))) + print(f'{G}{"[".rjust(8, ".")} {len(r_total)} ]') exporter(data, output, r_total, 'robots') elif r_sc == 404: - print(R + '['.rjust(9, '.') + ' Not Found ]' + W) + print(f'{R}{"[".rjust(9, ".")} Not Found ]{W}') else: - print(R + '['.rjust(9, '.') + ' {} ]'.format(r_sc) + W) - except Exception as e: - print(f'\n{R}[-] Exception : {C}{e}{W}') + print(f'{R}{"[".rjust(9, ".")} {r_sc} ]{W}') + except Exception as exc: + print(f'\n{R}[-] Exception : {C}{exc}{W}') + log_writer(f'[crawler.robots] Exception = {exc}') -async def sitemap(sm_url, data, output): +async def sitemap(target_url, data, output): global sm_total print(f'{G}[+] {C}Looking for sitemap.xml{W}', end='', flush=True) try: - sm_rqst = requests.get(sm_url, headers=user_agent, verify=False, timeout=10) + sm_rqst = requests.get(target_url, headers=user_agent, verify=False, timeout=10) sm_sc = sm_rqst.status_code if sm_sc == 200: print(G + '['.rjust(8, '.') + ' Found ]' + W) @@ -179,33 +182,34 @@ async def sitemap(sm_url, data, output): sm_total.append(url) sm_total = set(sm_total) - print(G + '['.rjust(7, '.') + ' {} ]'.format(str(len(sm_total)))) + print(f'{G}{"[".rjust(7, ".")} {len(sm_total)} ]{W}') exporter(data, output, sm_total, 'sitemap') elif sm_sc == 404: - print(R + '['.rjust(8, '.') + ' Not Found ]' + W) + print(f'{R}{"[".rjust(8, ".")} Not Found ]{W}') else: print(f'{R}{"[".rjust(8, ".")} Status Code : {sm_sc} ]{W}') - except Exception as e: - print(f'\n{R}[-] Exception : {C}{e}{W}') + except Exception as exc: + print(f'\n{R}[-] Exception : {C}{exc}{W}') + log_writer(f'[crawler.sitemap] Exception = {exc}') -async def css(target, data, output): +async def css(target, data, soup, output): global css_total print(f'{G}[+] {C}Extracting CSS Links{W}', end='', flush=True) - css = soup.find_all('link', href=True) + css_links = soup.find_all('link', href=True) - for link in css: + for link in css_links: url = link.get('href') if url is not None and '.css' in url: css_total.append(url_filter(target, url)) css_total = set(css_total) - print(G + '['.rjust(11, '.') + ' {} ]'.format(str(len(css_total))) + W) + print(f'{G}{"[".rjust(11, ".")} {len(css_total)} ]{W}') exporter(data, output, css_total, 'css') -async def js(target, data, output): - global total, js_total +async def js_scan(target, data, soup, output): + global js_total print(f'{G}[+] {C}Extracting Javascript Links{W}', end='', flush=True) scr_tags = soup.find_all('script', src=True) @@ -217,12 +221,12 @@ async def js(target, data, output): js_total.append(tmp_url) js_total = set(js_total) - print(G + '['.rjust(4, '.') + ' {} ]'.format(str(len(js_total)))) + print(f'{G}{"[".rjust(4, ".")} {len(js_total)} ]{W}') exporter(data, output, js_total, 'javascripts') -async def internal_links(target, data, output): - global total, int_total +async def internal_links(target, data, soup, output): + global int_total print(f'{G}[+] {C}Extracting Internal Links{W}', end='', flush=True) ext = tldextract.extract(target) @@ -236,12 +240,12 @@ async def internal_links(target, data, output): int_total.append(url) int_total = set(int_total) - print(G + '['.rjust(6, '.') + ' {} ]'.format(str(len(int_total)))) + print(f'{G}{"[".rjust(6, ".")} {len(int_total)} ]{W}') exporter(data, output, int_total, 'internal_urls') -async def external_links(target, data, output): - global total, ext_total +async def external_links(target, data, soup, output): + global ext_total print(f'{G}[+] {C}Extracting External Links{W}', end='', flush=True) ext = tldextract.extract(target) @@ -255,12 +259,12 @@ async def external_links(target, data, output): ext_total.append(url) ext_total = set(ext_total) - print(G + '['.rjust(6, '.') + ' {} ]'.format(str(len(ext_total)))) + print(f'{G}{"[".rjust(6, ".")} {len(ext_total)} ]{W}') exporter(data, output, ext_total, 'external_urls') -async def images(target, data, output): - global total, img_total +async def images(target, data, soup, output): + global img_total print(f'{G}[+] {C}Extracting Images{W}', end='', flush=True) image_tags = soup.find_all('img') @@ -270,7 +274,7 @@ async def images(target, data, output): img_total.append(url_filter(target, url)) img_total = set(img_total) - print(G + '['.rjust(14, '.') + ' {} ]'.format(str(len(img_total)))) + print(f'{G}{"[".rjust(14, ".")} {len(img_total)} ]{W}') exporter(data, output, img_total, 'images') @@ -298,23 +302,23 @@ def fetch(site_url): else: # print(R + '['.rjust(8, '.') + ' {} ]'.format(sm_sc) + W) pass - except Exception: - # print(f'\n{R}[-] Exception : {C}{e}{W}') - pass + except Exception as exc: + # print(f'\n{R}[-] Exception : {C}{exc}{W}') + log_writer(f'[crawler.sm_crawl] Exception = {exc}') for site_url in sm_total: if site_url != sm_url: if site_url.endswith('xml') is True: - t = threading.Thread(target=fetch, args=[site_url]) - t.daemon = True - threads.append(t) - t.start() + task = threading.Thread(target=fetch, args=[site_url]) + task.daemon = True + threads.append(task) + task.start() for thread in threads: thread.join() sm_crawl_total = set(sm_crawl_total) - print(G + '['.rjust(14, '.') + ' {} ]'.format(str(len(sm_crawl_total)))) + print(f'{G}{"[".rjust(14, ".")} {len(sm_crawl_total)} ]{W}') exporter(data, output, sm_crawl_total, 'urls_inside_sitemap') @@ -337,32 +341,33 @@ def fetch(js_url): for item in found: if len(item) > 8: js_crawl_total.append(item) - except Exception as e: - print(f'\n{R}[-] Exception : {C}{e}{W}') + except Exception as exc: + # print(f'\n{R}[-] Exception : {C}{exc}{W}') + log_writer(f'[crawler.js_crawl] Exception = {exc}') for js_url in js_total: - t = threading.Thread(target=fetch, args=[js_url]) - t.daemon = True - threads.append(t) - t.start() + task = threading.Thread(target=fetch, args=[js_url]) + task.daemon = True + threads.append(task) + task.start() for thread in threads: thread.join() js_crawl_total = set(js_crawl_total) - print(G + '['.rjust(11, '.') + ' {} ]'.format(str(len(js_crawl_total)))) + print(f'{G}{"[".rjust(11, ".")} {len(js_crawl_total)} ]{W}') exporter(data, output, js_crawl_total, 'urls_inside_js') def exporter(data, output, list_name, file_name): - data[f'module-crawler-{file_name}'] = ({'links': list(list_name)}) + data[f'module-crawler-{file_name}'] = {'links': list(list_name)} data[f'module-crawler-{file_name}'].update({'exported': False}) fname = f'{output["directory"]}/{file_name}.{output["format"]}' output['file'] = fname export(output, data) -def stats(output, data): +def stats(output, data, soup): global total total.extend(r_total) diff --git a/modules/dirrec.py b/modules/dirrec.py index f6a25ad..029180c 100644 --- a/modules/dirrec.py +++ b/modules/dirrec.py @@ -1,11 +1,11 @@ #!/usr/bin/env python3 - import socket import aiohttp import asyncio from datetime import date from modules.export import export +from modules.write_log import log_writer R = '\033[31m' # red G = '\033[32m' # green @@ -28,8 +28,9 @@ async def fetch(url, session, redir): async with session.get(url, headers=header, allow_redirects=redir) as response: responses.append((url, response.status)) return response.status - except Exception as e: - print(f'{R}[-] {C}Exception : {W}' + str(e).strip('\n')) + except Exception as exc: + print(f'{R}[-] {C}Exception : {W}' + str(exc).strip('\n')) + log_writer(f'[dirrec] Exception : {exc}') async def insert(queue, filext, target, wdlist, redir): @@ -106,7 +107,6 @@ async def filter_out(target, url, status): def dir_output(output, data): - global responses, found result = {} for entry in responses: @@ -155,3 +155,4 @@ def hammer(target, threads, tout, wdlist, redir, sslv, dserv, output, data, file loop.run_until_complete(run(target, threads, tout, wdlist, redir, sslv, dserv, filext, total_num_words)) dir_output(output, data) loop.close() + log_writer('[dirrec] Completed') diff --git a/modules/dns.py b/modules/dns.py index 6a3211a..6b01d04 100644 --- a/modules/dns.py +++ b/modules/dns.py @@ -2,6 +2,7 @@ import dnslib from modules.export import export +from modules.write_log import log_writer R = '\033[31m' # red G = '\033[32m' # green @@ -15,9 +16,9 @@ def dnsrec(domain, output, data): print(f'\n{Y}[!] Starting DNS Enumeration...{W}\n') types = ['A', 'AAAA', 'ANY', 'CAA', 'CNAME', 'MX', 'NS', 'TXT'] full_ans = [] - for Type in types: - q = dnslib.DNSRecord.question(domain, Type) - pkt = q.send('8.8.8.8', 53, tcp='UDP') + for rec_type in types: + query = dnslib.DNSRecord.question(domain, rec_type) + pkt = query.send('8.8.8.8', 53, tcp='UDP') ans = dnslib.DNSRecord.parse(pkt) ans = str(ans) ans = ans.split('\n') @@ -42,8 +43,8 @@ def dnsrec(domain, output, data): result.setdefault('dns', ['DNS Records Not Found']) dmarc_target = f'_dmarc.{domain}' - q = dnslib.DNSRecord.question(dmarc_target, 'TXT') - pkt = q.send('8.8.8.8', 53, tcp='UDP') + query = dnslib.DNSRecord.question(dmarc_target, 'TXT') + pkt = query.send('8.8.8.8', 53, tcp='UDP') dmarc_ans = dnslib.DNSRecord.parse(pkt) dmarc_ans = str(dmarc_ans) dmarc_ans = dmarc_ans.split('\n') @@ -70,3 +71,4 @@ def dnsrec(domain, output, data): fname = f'{output["directory"]}/dns_records.{output["format"]}' output['file'] = fname export(output, data) + log_writer('[dns] Completed') diff --git a/modules/headers.py b/modules/headers.py index 79bed28..6026af8 100644 --- a/modules/headers.py +++ b/modules/headers.py @@ -2,6 +2,7 @@ import requests from modules.export import export +from modules.write_log import log_writer requests.packages.urllib3.disable_warnings() R = '\033[31m' # red @@ -20,10 +21,11 @@ def headers(target, output, data): print(f'{C}{key} : {W}{val}') if output != 'None': result.update({key: val}) - except Exception as e: - print(f'\n{R}[-] {C}Exception : {W}{e}\n') + except Exception as exc: + print(f'\n{R}[-] {C}Exception : {W}{exc}\n') if output != 'None': - result.update({'Exception': str(e)}) + result.update({'Exception': str(exc)}) + log_writer(f'[headers] Exception = {exc}') result.update({'exported': False}) if output != 'None': @@ -31,3 +33,4 @@ def headers(target, output, data): output['file'] = fname data['module-headers'] = result export(output, data) + log_writer('[headers] Completed') diff --git a/modules/portscan.py b/modules/portscan.py index 66872dd..3cd04e4 100644 --- a/modules/portscan.py +++ b/modules/portscan.py @@ -2,6 +2,7 @@ import asyncio from modules.export import export +from modules.write_log import log_writer R = '\033[31m' # red G = '\033[32m' # green @@ -18,24 +19,22 @@ async def insert(queue): await queue.put(port) -async def consumer(queue, ip, result): +async def consumer(queue, ip_addr, result): global counter while True: port = await queue.get() - await sock_conn(ip, port, result) + await sock_conn(ip_addr, port, result) queue.task_done() counter += 1 print(f'{Y}[!] {C}Scanning : {W}{counter}/{len(port_list)}', end='\r') -async def run(ip, result, threads): +async def run(ip_addr, result, threads): queue = asyncio.Queue(maxsize=threads) - - distrib = asyncio.create_task(insert(queue)) workers = [ asyncio.create_task( - consumer(queue, ip, result) + consumer(queue, ip_addr, result) ) for _ in range(threads)] await asyncio.gather(distrib) @@ -44,7 +43,7 @@ async def run(ip, result, threads): worker.cancel() -def ps(ip, output, data, threads): +def scan(ip_addr, output, data, threads): result = {} result['ports'] = [] print(f'\n{Y}[!] Starting Port Scan...{W}\n') @@ -52,16 +51,17 @@ def ps(ip, output, data, threads): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) - loop.run_until_complete(run(ip, result, threads)) + loop.run_until_complete(run(ip_addr, result, threads)) loop.close() if output != 'None': ps_output(output, data, result) + log_writer('[portscan] Completed') -async def sock_conn(ip, port, result): +async def sock_conn(ip_addr, port, result): try: - connector = asyncio.open_connection(ip, port) + connector = asyncio.open_connection(ip_addr, port) await asyncio.wait_for(connector, 1) print(f'\x1b[K{G}[+] {C}{port}{W}') result['ports'].append(str(port)) diff --git a/modules/sslinfo.py b/modules/sslinfo.py index 8f69ede..1fe6b3d 100644 --- a/modules/sslinfo.py +++ b/modules/sslinfo.py @@ -4,6 +4,7 @@ import ssl import socket from modules.export import export +from modules.write_log import log_writer R = '\033[31m' # red G = '\033[32m' # green @@ -17,32 +18,31 @@ def cert(hostname, sslp, output, data): pair = {} print(f'\n{Y}[!] SSL Certificate Information : {W}\n') - pt = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - pt.settimeout(5) + port_test = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + port_test.settimeout(5) try: - pt.connect((hostname, sslp)) - pt.close() + port_test.connect((hostname, sslp)) + port_test.close() ctx = ssl.create_default_context() sock = socket.socket() sock.settimeout(5) - s = ctx.wrap_socket(sock, server_hostname=hostname) + ssl_conn = ctx.wrap_socket(sock, server_hostname=hostname) try: - s.connect((hostname, sslp)) - info = s.getpeercert() + ssl_conn.connect((hostname, sslp)) + info = ssl_conn.getpeercert() except Exception: info = ssl.get_server_certificate((hostname, sslp)) - f = open(f'{hostname}.pem', 'w') - f.write(info) - f.close() + with open(f'{hostname}.pem', 'w') as outfile: + outfile.write(info) cert_dict = ssl._ssl._test_decode_cert(f'{hostname}.pem') info = cert_dict os.remove(f'{hostname}.pem') - def unpack(v, pair): + def unpack(val, pair): convert = False - for item in v: + for item in val: if isinstance(item, tuple): for subitem in item: if isinstance(subitem, tuple): @@ -56,31 +56,33 @@ def unpack(v, pair): else: pass else: - print(f'{G}[+] {C}{k}: {W}{item}') + print(f'{G}[+] {C}{key}: {W}{item}') if output != 'None': - result.update({k: v}) + result.update({key: val}) - for k, v in info.items(): - if isinstance(v, tuple): - unpack(v, pair) - for k, v in pair.items(): - print(f'{G}[+] {C}{k}: {W}{v}') + for key, val in info.items(): + if isinstance(val, tuple): + unpack(val, pair) + for key, val in pair.items(): + print(f'{G}[+] {C}{key}: {W}{val}') if output != 'None': - result.update({k: v}) + result.update({key: val}) pair.clear() else: - print(f'{G}[+] {C}{k}: {W}{v}') + print(f'{G}[+] {C}{key}: {W}{val}') if output != 'None': - result.update({k: v}) + result.update({key: val}) except Exception: - pt.close() + port_test.close() print(f'{R}[-] {C}SSL is not Present on Target URL...Skipping...{W}') if output != 'None': result.update({'Error': 'SSL is not Present on Target URL'}) + log_writer('[sslinfo] SSL is not Present on Target URL...Skipping...') result.update({'exported': False}) if output != 'None': fname = f'{output["directory"]}/ssl.{output["format"]}' output['file'] = fname data['module-SSL Certificate Information'] = result export(output, data) + log_writer('[sslinfo] Completed') \ No newline at end of file diff --git a/modules/subdom.py b/modules/subdom.py index c760147..1d88482 100644 --- a/modules/subdom.py +++ b/modules/subdom.py @@ -2,9 +2,10 @@ import aiohttp import asyncio +from re import match from modules.export import export +from modules.write_log import log_writer from modules.subdomain_modules.bevigil_subs import bevigil -from modules.subdomain_modules.thcrowd_subs import thcrowd from modules.subdomain_modules.anubis_subs import anubisdb from modules.subdomain_modules.thminer_subs import thminer from modules.subdomain_modules.fb_subs import fb_cert @@ -30,7 +31,6 @@ async def query(hostname, tout, conf_path): async with aiohttp.ClientSession(timeout=timeout) as session: await asyncio.gather( bevigil(hostname, conf_path, session), - thcrowd(hostname, session), anubisdb(hostname, session), thminer(hostname, session), fb_cert(hostname, conf_path, session), @@ -58,15 +58,19 @@ def subdomains(hostname, tout, output, data, conf_path): found = [item for item in found if item.endswith(hostname)] valid = r"^[A-Za-z0-9._~()'!*:@,;+?-]*$" - from re import match found = [item for item in found if match(valid, item)] found = set(found) total = len(found) if len(found) != 0: print(f'\n{G}[+] {C}Results : {W}\n') + i = 0 for url in found: print(url) + i += 1 + if i == 20: + print(f'\n{G}[+]{C} Results truncated...{W}') + break print(f'\n{G}[+] {C}Total Unique Sub Domains Found : {W}{total}') @@ -77,3 +81,4 @@ def subdomains(hostname, tout, output, data, conf_path): fname = f'{output["directory"]}/subdomains.{output["format"]}' output['file'] = fname export(output, data) + log_writer('[subdom] Completed') diff --git a/modules/subdomain_modules/anubis_subs.py b/modules/subdomain_modules/anubis_subs.py index 9ba45e3..1ac6a5e 100644 --- a/modules/subdomain_modules/anubis_subs.py +++ b/modules/subdomain_modules/anubis_subs.py @@ -8,6 +8,7 @@ from json import loads import modules.subdom as parent +from modules.write_log import log_writer async def anubisdb(hostname, session): @@ -15,15 +16,17 @@ async def anubisdb(hostname, session): url = f'https://jldc.me/anubis/subdomains/{hostname}' try: async with session.get(url) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: output = await resp.text() json_out = loads(output) parent.found.extend(json_out) print(f'{G}[+] {Y}AnubisDB {W}found {C}{len(json_out)} {W}subdomains!') - elif sc == 300: + elif status == 300: pass else: - print(f'{R}[-] {C}AnubisDB Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}AnubisDB Exception : {W}{e}') + print(f'{R}[-] {C}AnubisDB Status : {W}{status}') + log_writer(f'[anubis_subs] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}AnubisDB Exception : {W}{exc}') + log_writer('[anubis_subs] Completed') diff --git a/modules/subdomain_modules/bevigil_subs.py b/modules/subdomain_modules/bevigil_subs.py index 5f253ec..b5ad16e 100644 --- a/modules/subdomain_modules/bevigil_subs.py +++ b/modules/subdomain_modules/bevigil_subs.py @@ -6,33 +6,48 @@ W = '\033[0m' # white Y = '\033[33m' # yellow -from json import loads +from json import loads, dumps import modules.subdom as parent +from modules.write_log import log_writer + async def bevigil(hostname, conf_path, session): with open(f'{conf_path}/keys.json', 'r') as keyfile: json_read = keyfile.read() - json_load = loads(json_read) - bevigil_key = json_load['bevigil'] + json_load = loads(json_read) + try: + bevigil_key = json_load['bevigil'] + except KeyError: + log_writer('[bevigil_subs] key missing in keys.json') + with open(f'{conf_path}/keys.json', 'w') as outfile: + json_load['bevigil'] = None + bevigil_key = None + outfile.write( + dumps(json_load, sort_keys=True, indent=4) + ) - if bevigil_key is not None: + if bevigil_key is not None: print(f'{Y}[!] {C}Requesting {G}BeVigil{W}') url = f"https://osint.bevigil.com/api/{hostname}/subdomains/" header = {"X-Access-Token": bevigil_key} - + try: async with session.get(url, headers=header) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: json_data: list = await resp.json() subdomains = json_data.get("subdomains") print(f'{G}[+] {Y}BeVigil {W}found {C}{len(subdomains)} {W}subdomains!') parent.found.extend(subdomains) else: - print(f'{R}[-] {C}BeVigil Status : {W}{sc}') + print(f'{R}[-] {C}BeVigil Status : {W}{status}') + log_writer(f'[bevigil_subs] Status = {status}, expected 200') - except Exception as e: - print(f'{R}[-] {C}BeVigil Exception : {W}{e}') + except Exception as exc: + print(f'{R}[-] {C}BeVigil Exception : {W}{exc}') + log_writer(f'[bevigil_subs] Exception = {exc}') else: - print(f'{Y}[!] Skipping BeVigil : {W}API key not found!') \ No newline at end of file + print(f'{Y}[!] Skipping BeVigil : {W}API key not found!') + log_writer('[bevigil_subs] API key not found') + log_writer('[bevigil_subs] Completed') diff --git a/modules/subdomain_modules/certspot_subs.py b/modules/subdomain_modules/certspot_subs.py index 1201711..076b6f7 100644 --- a/modules/subdomain_modules/certspot_subs.py +++ b/modules/subdomain_modules/certspot_subs.py @@ -8,6 +8,7 @@ from json import loads import modules.subdom as parent +from modules.write_log import log_writer async def certspot(hostname, session): @@ -21,8 +22,8 @@ async def certspot(hostname, session): try: async with session.get(url, params=cs_params) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: json_data = await resp.text() json_read = loads(json_data) print(f'{G}[+] {Y}Certsport {W}found {C}{len(json_read)} {W}subdomains!') @@ -30,6 +31,9 @@ async def certspot(hostname, session): domains = json_read[i]['dns_names'] parent.found.extend(domains) else: - print(f'{R}[-] {C}CertSpotter Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}CertSpotter Exception : {W}{e}') + print(f'{R}[-] {C}CertSpotter Status : {W}{status}') + log_writer(f'[certspot_subs] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}CertSpotter Exception : {W}{exc}') + log_writer(f'[certspot_subs] Exception = {exc}') + log_writer('[certspot_subs] Completed') diff --git a/modules/subdomain_modules/crtsh_subs.py b/modules/subdomain_modules/crtsh_subs.py index 2774305..5c92e49 100644 --- a/modules/subdomain_modules/crtsh_subs.py +++ b/modules/subdomain_modules/crtsh_subs.py @@ -8,6 +8,7 @@ import psycopg2 import modules.subdom as parent +from modules.write_log import log_writer async def crtsh(hostname): @@ -31,5 +32,7 @@ async def crtsh(hostname): tmp_list.append(url[0]) print(f'{G}[+] {Y}CRT.sh {W}found {C}{len(tmp_list)} {W}subdomains!') parent.found.extend(tmp_list) - except Exception as e: - print(f'{R}[-] {C}crtsh Exception : {W}{e}') + except Exception as exc: + print(f'{R}[-] {C}crtsh Exception : {W}{exc}') + log_writer(f'[crtsh_subs] Exception = {exc}') + log_writer('[crtsh_subs] Completed') diff --git a/modules/subdomain_modules/fb_subs.py b/modules/subdomain_modules/fb_subs.py index 39bc786..8f799e5 100644 --- a/modules/subdomain_modules/fb_subs.py +++ b/modules/subdomain_modules/fb_subs.py @@ -8,6 +8,7 @@ from json import loads import modules.subdom as parent +from modules.write_log import log_writer async def fb_cert(hostname, conf_path, session): @@ -27,8 +28,8 @@ async def fb_cert(hostname, conf_path, session): } try: async with session.get(url, params=fb_params) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: json_data = await resp.text() json_read = loads(json_data) domains = json_read['data'] @@ -36,8 +37,12 @@ async def fb_cert(hostname, conf_path, session): for i in range(0, len(domains)): parent.found.extend(json_read['data'][i]['domains']) else: - print(f'{R}[-] {C}Facebook Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}Facebook Exception : {W}{e}') + print(f'{R}[-] {C}Facebook Status : {W}{status}') + log_writer(f'[fb_subs] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}Facebook Exception : {W}{exc}') + log_writer(f'[fb_subs] Exception = {exc}') else: print(f'{Y}[!] Skipping Facebook : {W}API key not found!') + log_writer('[fb_subs] API key not found') + log_writer('[fb_subs] Completed') diff --git a/modules/subdomain_modules/htarget_subs.py b/modules/subdomain_modules/htarget_subs.py index 9cd0201..9133623 100644 --- a/modules/subdomain_modules/htarget_subs.py +++ b/modules/subdomain_modules/htarget_subs.py @@ -7,6 +7,7 @@ Y = '\033[33m' # yellow import modules.subdom as parent +from modules.write_log import log_writer async def hackertgt(hostname, session): @@ -14,8 +15,8 @@ async def hackertgt(hostname, session): url = f'https://api.hackertarget.com/hostsearch/?q={hostname}' try: async with session.get(url) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: data = await resp.text() data_list = data.split('\n') tmp_list = [] @@ -25,6 +26,9 @@ async def hackertgt(hostname, session): print(f'{G}[+] {Y}HackerTarget {W}found {C}{len(tmp_list)} {W}subdomains!') parent.found.extend(tmp_list) else: - print(f'{R}[-] {C}HackerTarget Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}HackerTarget Exception : {W}{e}') + print(f'{R}[-] {C}HackerTarget Status : {W}{status}') + log_writer(f'[htarget_subs] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}HackerTarget Exception : {W}{exc}') + log_writer(f'[htarget_subs] Exception = {exc}') + log_writer('[htarget_subs] Completed') diff --git a/modules/subdomain_modules/shodan_subs.py b/modules/subdomain_modules/shodan_subs.py index 2ff5da2..a1da409 100644 --- a/modules/subdomain_modules/shodan_subs.py +++ b/modules/subdomain_modules/shodan_subs.py @@ -8,6 +8,7 @@ from json import loads import modules.subdom as parent +from modules.write_log import log_writer async def shodan(hostname, conf_path, session): @@ -23,8 +24,8 @@ async def shodan(hostname, conf_path, session): try: async with session.get(url) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: json_data = await resp.text() json_read = loads(json_data) domains = json_read['subdomains'] @@ -34,8 +35,12 @@ async def shodan(hostname, conf_path, session): print(f'{G}[+] {Y}Shodan {W}found {C}{len(tmp_list)} {W}subdomains!') parent.found.extend(tmp_list) else: - print(f'{R}[-] {C}Shodan Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}Shodan Exception : {W}{e}') + print(f'{R}[-] {C}Shodan Status : {W}{status}') + log_writer(f'[shodan_subs] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}Shodan Exception : {W}{exc}') + log_writer(f'[shodan_subs] Exception = {exc}') else: print(f'{Y}[!] Skipping Shodan : {W}API key not found!') + log_writer('[shodan_subs] API key not found') + log_writer('[shodan_subs] Completed') diff --git a/modules/subdomain_modules/sonar_subs.py b/modules/subdomain_modules/sonar_subs.py index ffce1e9..6bc3cab 100644 --- a/modules/subdomain_modules/sonar_subs.py +++ b/modules/subdomain_modules/sonar_subs.py @@ -8,6 +8,7 @@ from json import loads import modules.subdom as parent +from modules.write_log import log_writer async def sonar(hostname, session): @@ -15,13 +16,16 @@ async def sonar(hostname, session): url = f'https://sonar.omnisint.io/subdomains/{hostname}' try: async with session.get(url) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: json_data = await resp.text() json_read = loads(json_data) print(f'{G}[+] {Y}Sonar {W}found {C}{len(json_read)} {W}subdomains!') parent.found.extend(json_read) else: - print(f'{R}[-] {C}Sonar Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}Sonar Exception : {W}{e}') + print(f'{R}[-] {C}Sonar Status : {W}{status}') + log_writer(f'[sonar_subs] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}Sonar Exception : {W}{exc}') + log_writer(f'[sonar_subs] Exception = {exc}') + log_writer('[sonar_subs] Completed') diff --git a/modules/subdomain_modules/thcrowd_subs.py b/modules/subdomain_modules/thcrowd_subs.py index da8d4d6..1ce44d6 100644 --- a/modules/subdomain_modules/thcrowd_subs.py +++ b/modules/subdomain_modules/thcrowd_subs.py @@ -8,6 +8,7 @@ from json import loads import modules.subdom as parent +from modules.write_log import log_writer async def thcrowd(hostname, session): @@ -18,8 +19,8 @@ async def thcrowd(hostname, session): } try: async with session.get(url, params=thc_params) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: output = await resp.text() json_out = loads(output) if json_out['response_code'] == '0': @@ -29,6 +30,9 @@ async def thcrowd(hostname, session): print(f'{G}[+] {Y}ThreatCrowd {W}found {C}{len(subd)} {W}subdomains!') parent.found.extend(subd) else: - print(f'{R}[-] {C}ThreatCrowd Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}ThreatCrowd Exception : {W}{e}') + print(f'{R}[-] {C}ThreatCrowd Status : {W}{status}') + log_writer(f'[thcrowd] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}ThreatCrowd Exception : {W}{exc}') + log_writer(f'[thcrowd] Exception = {exc}') + log_writer('[thcrowd] Completed') diff --git a/modules/subdomain_modules/thminer_subs.py b/modules/subdomain_modules/thminer_subs.py index 119226e..afd570d 100644 --- a/modules/subdomain_modules/thminer_subs.py +++ b/modules/subdomain_modules/thminer_subs.py @@ -8,6 +8,7 @@ from json import loads import modules.subdom as parent +from modules.write_log import log_writer async def thminer(hostname, session): @@ -19,14 +20,17 @@ async def thminer(hostname, session): } try: async with session.get(url, params=thm_params) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: output = await resp.text() json_out = loads(output) subd = json_out['results'] print(f'{G}[+] {Y}ThreatMiner {W}found {C}{len(subd)} {W}subdomains!') parent.found.extend(subd) else: - print(f'{R}[-] {C}ThreatMiner Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}ThreatMiner Exception : {W}{e}') + print(f'{R}[-] {C}ThreatMiner Status : {W}{status}') + log_writer(f'[thminer_subs] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}ThreatMiner Exception : {W}{exc}') + log_writer(f'[thminer_subs] Exception = {exc}') + log_writer('[thminer_subs] Completed') diff --git a/modules/subdomain_modules/virustotal_subs.py b/modules/subdomain_modules/virustotal_subs.py index 6c65e30..c5a6ae6 100644 --- a/modules/subdomain_modules/virustotal_subs.py +++ b/modules/subdomain_modules/virustotal_subs.py @@ -8,6 +8,7 @@ from json import loads import modules.subdom as parent +from modules.write_log import log_writer async def virust(hostname, conf_path, session): @@ -25,8 +26,8 @@ async def virust(hostname, conf_path, session): } try: async with session.get(url, headers=vt_headers) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: json_data = await resp.text() json_read = loads(json_data) domains = json_read['data'] @@ -36,8 +37,12 @@ async def virust(hostname, conf_path, session): print(f'{G}[+] {Y}VirusTotal {W}found {C}{len(tmp_list)} {W}subdomains!') parent.found.extend(tmp_list) else: - print(f'{R}[-] {C}VirusTotal Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}VirusTotal Exception : {W}{e}') + print(f'{R}[-] {C}VirusTotal Status : {W}{status}') + log_writer(f'[virustotal_subs] Status = {status}') + except Exception as exc: + print(f'{R}[-] {C}VirusTotal Exception : {W}{exc}') + log_writer(f'[virustotal_subs] Exception = {exc}') else: print(f'{Y}[!] Skipping VirusTotal : {W}API key not found!') + log_writer('[virustotal_subs] API key not found') + log_writer('[virustotal_subs] Completed') diff --git a/modules/subdomain_modules/wayback_subs.py b/modules/subdomain_modules/wayback_subs.py index 7e95c0c..219b9b4 100644 --- a/modules/subdomain_modules/wayback_subs.py +++ b/modules/subdomain_modules/wayback_subs.py @@ -7,6 +7,7 @@ Y = '\033[33m' # yellow import modules.subdom as parent +from modules.write_log import log_writer async def machine(hostname, session): @@ -14,8 +15,8 @@ async def machine(hostname, session): url = f'http://web.archive.org/cdx/search/cdx?url=*.{hostname}/*&output=txt&fl=original&collapse=urlkey' try: async with session.get(url) as resp: - sc = resp.status - if sc == 200: + status = resp.status + if status == 200: raw_data = await resp.text() lines = raw_data.split('\n') tmp_list = [] @@ -26,6 +27,9 @@ async def machine(hostname, session): print(f'{G}[+] {Y}Wayback {W}found {C}{len(tmp_list)} {W}subdomains!') parent.found.extend(tmp_list) else: - print(f'{R}[-] {C}Wayback Status : {W}{sc}') - except Exception as e: - print(f'{R}[-] {C}Wayback Exception : {W}{e}') + print(f'{R}[-] {C}Wayback Status : {W}{status}') + log_writer(f'[wayback_subs] Status = {status}, expected 200') + except Exception as exc: + print(f'{R}[-] {C}Wayback Exception : {W}{exc}') + log_writer(f'[wayback_subs] Exception = {exc}') + log_writer('[wayback_subs] Completed') diff --git a/modules/wayback.py b/modules/wayback.py index e75784f..27e8a7e 100644 --- a/modules/wayback.py +++ b/modules/wayback.py @@ -10,6 +10,7 @@ import requests from datetime import date from modules.export import export +from modules.write_log import log_writer def timetravel(target, data, output): @@ -40,8 +41,10 @@ def timetravel(target, data, output): print(R + '['.rjust(5, '.') + ' N/A ]') else: print(f'\n{R}[-] Status : {C}{check_sc}{W}') - except Exception as e: - print(f'\n{R}[-] Exception : {C}{e}{W}') + log_writer(f'[wayback] Status = {check_sc}, expected 200') + except Exception as exc: + print(f'\n{R}[-] Exception : {C}{exc}{W}') + log_writer(f'[wayback] Exception = {exc}') if is_avail is True: print(f'{Y}[!] {C}Fetching URLs{W}', end='', flush=True) @@ -56,14 +59,14 @@ def timetravel(target, data, output): } try: - r = requests.get(wm_url, params=payload) - r_sc = r.status_code + rqst = requests.get(wm_url, params=payload, timeout=10) + r_sc = rqst.status_code if r_sc == 200: - r_data = r.text + r_data = rqst.text if len(r_data) != 0: r_data = r_data.split('\n') r_data = set(r_data) - print(G + '['.rjust(5, '.') + ' {} ]'.format(str(len(r_data)))) + print(f'{G}{"[".rjust(5, ".")} {len(r_data)} ]{W}') wayback_total.extend(r_data) if output != 'None': @@ -74,8 +77,10 @@ def timetravel(target, data, output): output['file'] = fname export(output, data) else: - print(R + '['.rjust(5, '.') + ' Not Found ]' + W) + print(f'{R}{"[".rjust(5, ".")} Not Found ]{W}') else: - print(R + '['.rjust(5, '.') + ' {} ]'.format(r_sc) + W) - except Exception as e: - print(f'\n{R}[-] Exception : {C}{e}{W}') + print(f'{R}{"[".rjust(5, ".")} {r_sc} ]{W}') + except Exception as exc: + print(f'\n{R}[-] Exception : {C}{exc}{W}') + log_writer(f'[wayback] Exception = {exc}') + log_writer('[wayback] Completed') diff --git a/modules/whois.py b/modules/whois.py index db00542..270d963 100644 --- a/modules/whois.py +++ b/modules/whois.py @@ -2,6 +2,7 @@ import ipwhois from modules.export import export +from modules.write_log import log_writer R = '\033[31m' # red G = '\033[32m' # green @@ -42,11 +43,11 @@ def whois_lookup(ip_addr, output, data): result.update({str(key): str(temp_val)}) else: pass - except Exception as e: - print(f'{R}[-] Error : {C}{e}{W}') + except Exception as exc: + print(f'{R}[-] Error : {C}{exc}{W}') if output != 'None': - result.update({'Error': str(e)}) - + result.update({'Error': str(exc)}) + log_writer(f'[whois] Exception = {exc}') result.update({'exported': False}) if output != 'None': @@ -54,3 +55,4 @@ def whois_lookup(ip_addr, output, data): output['file'] = fname data['module-whois'] = result export(output, data) + log_writer('[whois] Completed') diff --git a/modules/write_log.py b/modules/write_log.py new file mode 100644 index 0000000..d8d7191 --- /dev/null +++ b/modules/write_log.py @@ -0,0 +1,13 @@ +import logging +import settings + + +def log_writer(message): + logging.basicConfig( + filename=settings.log_file_path, + encoding='utf-8', + level=logging.INFO, + format='[%(asctime)s] : %(message)s', + datefmt='%m/%d/%Y %I:%M:%S %p' + ) + logging.info(message) diff --git a/settings.py b/settings.py index 5bd8b6d..75f4732 100644 --- a/settings.py +++ b/settings.py @@ -11,6 +11,7 @@ meta_file_path = f'{path_to_script}/metadata.json' keys_file_path = f'{conf_path}/keys.json' conf_file_path = f'{conf_path}/config.json' +log_file_path = f'{home}/.local/share/finalrecon/run.log' if path.exists(conf_path): pass