Skip to content

Commit

Permalink
v1.1.5
Browse files Browse the repository at this point in the history
  • Loading branch information
thewhiteh4t committed Jul 31, 2022
1 parent 60c6c7f commit 8567cec
Show file tree
Hide file tree
Showing 21 changed files with 511 additions and 263 deletions.
16 changes: 13 additions & 3 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
# Changelog

## v1.1.5

* fixed some url issues in crawler
* threads added in port scanner
* fixed status code issue in directory enumeration module
* more sources added for subdomain enumeration
* wayback
* sonar
* hackertarget

---

## v1.1.4

* CHANGELOG.md added
Expand All @@ -14,6 +26,4 @@
* results are printed as they are found
* port scanner
* module optimized
* dedicated wayback module added

---
* dedicated wayback module added
21 changes: 21 additions & 0 deletions conf/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
{
"common": {
"timeout": 30
},
"ssl_cert": {
"ssl_port": 443
},
"port_scan": {
"threads": 50
},
"dir_enum": {
"threads": 50,
"redirect": false,
"verify_ssl": false,
"dns_server": "8.8.8.8, 8.8.4.4, 1.1.1.1, 1.0.0.1",
"extension": ""
},
"export": {
"format": "txt"
}
}
54 changes: 27 additions & 27 deletions finalrecon.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,23 +8,18 @@
C = '\033[36m' # cyan
W = '\033[0m' # white

home = os.getenv('HOME')
usr_data = home + '/.local/share/finalrecon/dumps/'
conf_path = home + '/.config/finalrecon'
path_to_script = os.path.dirname(os.path.realpath(__file__))
src_conf_path = path_to_script + '/conf/'
meta_file_path = path_to_script + '/metadata.json'
fail = False

if os.path.exists(conf_path):
pass
else:
import shutil
shutil.copytree(src_conf_path, conf_path, dirs_exist_ok=True)
import settings as config

home = config.home
usr_data = config.usr_data
conf_path = config.conf_path
path_to_script = config.path_to_script
src_conf_path = config.src_conf_path
meta_file_path = config.meta_file_path

import argparse

version = '1.1.4'
version = '1.1.5'
gh_version = ''
twitter_url = ''
discord_url = ''
Expand All @@ -43,25 +38,28 @@
parser.add_argument('--full', help='Full Recon', action='store_true')

ext_help = parser.add_argument_group('Extra Options')
ext_help.add_argument('-t', type=int, help='Number of Threads [ Default : 30 ]')
ext_help.add_argument('-dt', type=int, help='Number of threads for directory enum [ Default : 30 ]')
ext_help.add_argument('-pt', type=int, help='Number of threads for port scan [ Default : 50 ]')
ext_help.add_argument('-T', type=float, help='Request Timeout [ Default : 30.0 ]')
ext_help.add_argument('-w', help='Path to Wordlist [ Default : wordlists/dirb_common.txt ]')
ext_help.add_argument('-r', action='store_true', help='Allow Redirect [ Default : False ]')
ext_help.add_argument('-s', action='store_false', help='Toggle SSL Verification [ Default : True ]')
ext_help.add_argument('-sp', type=int, help='Specify SSL Port [ Default : 443 ]')
ext_help.add_argument('-d', help='Custom DNS Servers [ Default : 1.1.1.1 ]')
ext_help.add_argument('-e', help='File Extensions [ Example : txt, xml, php ]')
ext_help.add_argument('-o', help='Export Output [ Default : txt ]')
ext_help.add_argument('-o', help='Export Format [ Default : txt ]')
ext_help.set_defaults(
t=30,
T=30.0,
w=path_to_script + '/wordlists/dirb_common.txt',
r=False,
s=True,
sp=443,
d='1.1.1.1',
e='',
o='txt')
dt=config.dir_enum_th,
pt=config.port_scan_th,
T=config.timeout,
w=config.dir_enum_wlist,
r=config.dir_enum_redirect,
s=config.dir_enum_sslv,
sp=config.ssl_port,
d=config.dir_enum_dns,
e=config.dir_enum_ext,
o=config.export_fmt
)

try:
args = parser.parse_args()
Expand All @@ -78,7 +76,8 @@
wback = args.wayback
pscan = args.ps
full = args.full
threads = args.t
threads = args.dt
pscan_threads = args.pt
tout = args.T
wdlist = args.w
redir = args.r
Expand Down Expand Up @@ -141,11 +140,12 @@ def full_recon():
subdomains(domain, tout, output, data, conf_path)
else:
pass
ps(ip, output, data, threads)
ps(ip, output, data, pscan_threads)
crawler(target, output, data)
hammer(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext)
timetravel(target, data, output)


try:
banner()

Expand Down
2 changes: 1 addition & 1 deletion metadata.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "FinalRecon",
"author": "thewhiteh4t",
"version": "1.1.4",
"version": "1.1.5",
"twitter": "https://twitter.com/thewhiteh4t",
"comms": "https://twc1rcle.com/"
}
30 changes: 14 additions & 16 deletions modules/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,7 @@

user_agent = {'User-Agent': 'FinalRecon'}

# url = ''
soup = ''
r_url = ''
sm_url = ''
total = []
r_total = []
sm_total = []
Expand Down Expand Up @@ -62,14 +59,15 @@ def crawler(target, output, data):
else:
ext = tldextract.extract(target)
hostname = '.'.join(part for part in ext if part)
r_url = f'{protocol}://{hostname}/robots.txt'
sm_url = f'{protocol}://{hostname}/sitemap.xml'
base_url = f'{protocol}://{hostname}'
r_url = f'{base_url}/robots.txt'
sm_url = f'{base_url}/sitemap.xml'

loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
tasks = asyncio.gather(
robots(target, data, output),
sitemap(data, output),
robots(r_url, base_url, data, output),
sitemap(sm_url, data, output),
css(target, data, output),
js(target, data, output),
internal_links(target, data, output),
Expand Down Expand Up @@ -119,14 +117,14 @@ def url_filter(target, link):
return ret_url
else:
pass
return link


async def robots(target, data, output):
async def robots(robo_url, base_url, data, output):
global r_total
print(f'{G}[+] {C}Looking for robots.txt{W}', end='', flush=True)

try:
r_rqst = requests.get(r_url, headers=user_agent, verify=False, timeout=10)
r_rqst = requests.get(robo_url, headers=user_agent, verify=False, timeout=10)
r_sc = r_rqst.status_code
if r_sc == 200:
print(G + '['.rjust(9, '.') + ' Found ]' + W)
Expand All @@ -143,9 +141,9 @@ async def robots(target, data, output):
try:
url = url[1]
url = url.strip()
tmp_url = url_filter(target, url)
tmp_url = url_filter(base_url, url)
if tmp_url is not None:
r_total.append(url_filter(target, url))
r_total.append(url_filter(base_url, url))
if url.endswith('xml') is True:
sm_total.append(url)
except Exception:
Expand All @@ -162,8 +160,8 @@ async def robots(target, data, output):
print(f'\n{R}[-] Exception : {C}{e}{W}')


async def sitemap(data, output):
global sm_url, total, sm_total
async def sitemap(sm_url, data, output):
global sm_total
print(f'{G}[+] {C}Looking for sitemap.xml{W}', end='', flush=True)
try:
sm_rqst = requests.get(sm_url, headers=user_agent, verify=False, timeout=10)
Expand All @@ -185,13 +183,13 @@ async def sitemap(data, output):
elif sm_sc == 404:
print(R + '['.rjust(8, '.') + ' Not Found ]' + W)
else:
print(R + '['.rjust(8, '.') + ' {} ]'.format(sm_sc) + W)
print(f'{R}{"[".rjust(8, ".")} Status Code : {sm_sc} ]{W}')
except Exception as e:
print(f'\n{R}[-] Exception : {C}{e}{W}')


async def css(target, data, output):
global soup, total, css_total
global css_total
print(f'{G}[+] {C}Extracting CSS Links{W}', end='', flush=True)
css = soup.find_all('link', href=True)

Expand Down
5 changes: 2 additions & 3 deletions modules/dirrec.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,7 @@ async def consumer(queue, target, session, redir, total_num_words):
url = values[0]
redir = values[1]
status = await fetch(url, session, redir)
if status == 200:
await filter_out(target, url, status)
await filter_out(target, url, status)
queue.task_done()
count += 1
print(f'{Y}[!] {C}Requests : {W}{count}/{total_num_words}', end='\r')
Expand All @@ -73,7 +72,7 @@ async def consumer(queue, target, session, redir, total_num_words):
async def run(target, threads, tout, wdlist, redir, sslv, dserv, filext, total_num_words):
queue = asyncio.Queue(maxsize=threads)

resolver = aiohttp.AsyncResolver(nameservers=[dserv])
resolver = aiohttp.AsyncResolver(nameservers=dserv.split(', '))
conn = aiohttp.TCPConnector(limit=threads, resolver=resolver, family=socket.AF_INET, verify_ssl=sslv)
timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout)
async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:
Expand Down
Loading

0 comments on commit 8567cec

Please sign in to comment.