Skip to content

Commit

Permalink
Merge pull request #1 from RedMapleTech/feature/sc-25981/make-improve…
Browse files Browse the repository at this point in the history
…ments-to-cloud-enum-tool

Feature/sc 25981/make improvements to cloud enum tool
  • Loading branch information
drunkenplatypus authored Jul 5, 2024
2 parents 0e54b48 + 20a5260 commit 783e2cf
Show file tree
Hide file tree
Showing 8 changed files with 1,105 additions and 1,131 deletions.
12 changes: 10 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,20 +1,24 @@
# cloud_enum

Multi-cloud OSINT tool. Enumerate public resources in AWS, Azure, and Google Cloud.

Currently enumerates the following:

**Amazon Web Services**:

- Open / Protected S3 Buckets
- awsapps (WorkMail, WorkDocs, Connect, etc.)

**Microsoft Azure**:

- Storage Accounts
- Open Blob Storage Containers
- Hosted Databases
- Virtual Machines
- Web Apps

**Google Cloud Platform**

- Open / Protected GCP Buckets
- Open / Protected Firebase Realtime Databases
- Google App Engine sites
Expand All @@ -25,17 +29,18 @@ See it in action in [Codingo](https://github.com/codingo)'s video demo [here](ht

<img src="https://initstring.keybase.pub/host/images/cloud_enum.png" align="center"/>


# Usage

## Setup

Several non-standard libaries are required to support threaded HTTP requests and dns lookups. You'll need to install the requirements as follows:

```sh
pip3 install -r ./requirements.txt
```

## Running

The only required argument is at least one keyword. You can use the built-in fuzzing strings, but you will get better results if you supply your own with `-m` and/or `-b`.

You can provide multiple keywords by specifying the `-k` argument multiple times.
Expand All @@ -57,6 +62,7 @@ HTTP scraping and DNS lookups use 5 threads each by default. You can try increas
**IMPORTANT**: Some resources (Azure Containers, GCP Functions) are discovered per-region. To save time scanning, there is a "REGIONS" variable defined in `cloudenum/azure_regions.py and cloudenum/gcp_regions.py` that is set by default to use only 1 region. You may want to look at these files and edit them to be relevant to your own work.

**Complete Usage Details**

```
usage: cloud_enum.py [-h] -k KEYWORD [-m MUTATIONS] [-b BRUTE]
Expand All @@ -77,7 +83,7 @@ optional arguments:
-ns NAMESERVER, --nameserver NAMESERVER
DNS server to use in brute-force.
-l LOGFILE, --logfile LOGFILE
Will APPEND found items to specified file.
REMOVED Will APPEND found items to specified file.
-f FORMAT, --format FORMAT
Format for log file (text,json,csv - defaults to text)
--disable-aws Disable Amazon checks.
Expand All @@ -87,5 +93,7 @@ optional arguments:
```

# Thanks

So far, I have borrowed from:

- Some of the permutations from [GCPBucketBrute](https://github.com/RhinoSecurityLabs/GCPBucketBrute/blob/master/permutations.txt)
116 changes: 43 additions & 73 deletions cloud_enum.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,10 @@
import sys
import argparse
import re
from enum_tools import aws_checks
from enum_tools import azure_checks
from enum_tools import gcp_checks
from enum_tools import utils

BANNER = '''
##########################
cloud_enum
github.com/initstring
##########################
'''
from enum_tools import aws_checks as aws
from enum_tools import azure_checks as azure
from enum_tools import gcp_checks as gcp
from logger import logger


def parse_arguments():
Expand All @@ -47,32 +39,25 @@ def parse_arguments():
kw_group.add_argument('-kf', '--keyfile', type=str, action='store',
help='Input file with a single keyword per line.')

parser.add_argument('-l', '--log-level', type=str,
action='store', default='info', help='Log level')

# Use included mutations file by default, or let the user provide one
parser.add_argument('-m', '--mutations', type=str, action='store',
default=script_path + '/enum_tools/fuzz.txt',
help='Mutations. Default: enum_tools/fuzz.txt')
parser.add_argument('-m', '--mutations', type=str, action='store', default=script_path +
'/enum_tools/fuzz.txt', help='Mutations. Default: enum_tools/fuzz.txt')

# Use include container brute-force or let the user provide one
parser.add_argument('-b', '--brute', type=str, action='store',
default=script_path + '/enum_tools/fuzz.txt',
help='List to brute-force Azure container names.'
' Default: enum_tools/fuzz.txt')
parser.add_argument('-b', '--brute', type=str, action='store', default=script_path + '/enum_tools/fuzz.txt',
help='List to brute-force Azure container names. Default: enum_tools/fuzz.txt')

parser.add_argument('-t', '--threads', type=int, action='store',
default=5, help='Threads for HTTP brute-force.'
' Default = 5')
default=5, help='Threads for HTTP brute-force. Default = 5')

parser.add_argument('-ns', '--nameserver', type=str, action='store',
default='8.8.8.8',
help='DNS server to use in brute-force.')
parser.add_argument('-nsf', '--nameserverfile', type=str,
default='8.8.8.8', help='DNS server to use in brute-force.')

parser.add_argument('-nsf', '--nameserverfile', type=str,
help='Path to the file containing nameserver IPs')
parser.add_argument('-l', '--logfile', type=str, action='store',
help='Appends found items to specified file.')
parser.add_argument('-f', '--format', type=str, action='store',
default='text',
help='Format for log file (text,json,csv)'
' - default: text')

parser.add_argument('--disable-aws', action='store_true',
help='Disable Amazon checks.')
Expand All @@ -86,65 +71,44 @@ def parse_arguments():
parser.add_argument('-qs', '--quickscan', action='store_true',
help='Disable all mutations and second-level scans')

parser.add_argument('-r', '--region', type=str,
action='store', help='Region to use for checks')

args = parser.parse_args()

# Ensure mutations file is readable
if not os.access(args.mutations, os.R_OK):
print(f"[!] Cannot access mutations file: {args.mutations}")
log.new().error(f"Cannot access mutations file: {args.mutations}")
sys.exit()

# Ensure brute file is readable
if not os.access(args.brute, os.R_OK):
print("[!] Cannot access brute-force file, exiting")
log.new().error("Cannot access brute-force file, exiting")
sys.exit()

# Ensure keywords file is readable
if args.keyfile:
if not os.access(args.keyfile, os.R_OK):
print("[!] Cannot access keyword file, exiting")
log.new().error("Cannot access keyword file, exiting")
sys.exit()

# Parse keywords from input file
with open(args.keyfile, encoding='utf-8') as infile:
args.keyword = [keyword.strip() for keyword in infile]

# Ensure log file is writeable
if args.logfile:
if os.path.isdir(args.logfile):
print("[!] Can't specify a directory as the logfile, exiting.")
sys.exit()
if os.path.isfile(args.logfile):
target = args.logfile
else:
target = os.path.dirname(args.logfile)
if target == '':
target = '.'

if not os.access(target, os.W_OK):
print("[!] Cannot write to log file, exiting")
sys.exit()

# Set up logging format
if args.format not in ('text', 'json', 'csv'):
print("[!] Sorry! Allowed log formats: 'text', 'json', or 'csv'")
sys.exit()
# Set the global in the utils file, where logging needs to happen
utils.init_logfile(args.logfile, args.format)

return args


def print_status(args):
"""
Print a short pre-run status message
"""
print(f"Keywords: {', '.join(args.keyword)}")
log.new().debug(f"Keywords: {', '.join(args.keyword)}")
if args.quickscan:
print("Mutations: NONE! (Using quickscan)")
log.new().debug("Mutations: NONE! (Using quickscan)")
else:
print(f"Mutations: {args.mutations}")
print(f"Brute-list: {args.brute}")
print("")
log.new().debug(f"Mutations: {args.mutations}")
log.new().debug(f"Brute-list: {args.brute}")


def check_windows():
Expand All @@ -157,8 +121,8 @@ def check_windows():
import colorama
colorama.init()
except ModuleNotFoundError:
print("[!] Yo, Windows user - if you want pretty colors, you can"
" install the colorama python package.")
log.new().debug("Yo, Windows user - if you want pretty colors, you can"
" install the colorama python package.")


def read_mutations(mutations_file):
Expand All @@ -168,7 +132,7 @@ def read_mutations(mutations_file):
with open(mutations_file, encoding="utf8", errors="ignore") as infile:
mutations = infile.read().splitlines()

print(f"[+] Mutations list imported: {len(mutations)} items")
log.new().debug(f"Mutations list imported: {len(mutations)} items")
return mutations


Expand Down Expand Up @@ -218,10 +182,11 @@ def build_names(base_list, mutations):
append_name(f"{mutation}.{base}", names)
append_name(f"{mutation}-{base}", names)

print(f"[+] Mutated results: {len(names)} items")
log.new().debug(f"Mutated results: {len(names)} items")

return names


def read_nameservers(file_path):
try:
with open(file_path, 'r') as file:
Expand All @@ -230,18 +195,22 @@ def read_nameservers(file_path):
raise ValueError("Nameserver file is empty")
return nameservers
except FileNotFoundError:
print(f"Error: File '{file_path}' not found.")
log.new().error(f"Error: File '{file_path}' not found.")
exit(1)
except ValueError as e:
print(e)
log.new().error(e)
exit(1)


def main():
"""
Main program function.
"""
args = parse_arguments()
print(BANNER)

# Set up logging
global log
log = logger.Logger(args.log_level.upper())

# Generate a basic status on targets and parameters
print_status(args)
Expand All @@ -254,22 +223,23 @@ def main():
mutations = []
else:
mutations = read_mutations(args.mutations)

names = build_names(args.keyword, mutations)

# All the work is done in the individual modules
try:
if not args.disable_aws:
aws_checks.run_all(names, args)
aws.AWSChecks(log, args, names).run_all()
if not args.disable_azure:
azure_checks.run_all(names, args)
azure.AzureChecks(log, args, names).run_all()
if not args.disable_gcp:
gcp_checks.run_all(names, args)
gcp.GCPChecks(log, args, names).run_all()
except KeyboardInterrupt:
print("Thanks for playing!")
log.new().trace("Thanks for playing!")
sys.exit()

# Best of luck to you!
print("\n[+] All done, happy hacking!\n")
log.new().trace("All done, happy hacking!")
sys.exit()


Expand Down
Loading

0 comments on commit 783e2cf

Please sign in to comment.