Skip to content

Commit bb1a4b6

Browse files
authored
Merge pull request #32 from initstring/functions
WIP: Adding Cloud Functions
2 parents 98fe026 + a62efe2 commit bb1a4b6

File tree

6 files changed

+184
-24
lines changed

6 files changed

+184
-24
lines changed

README.md

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,7 @@ Currently enumerates the following:
1919
- Open GCP Buckets
2020
- Protected GCP Buckets
2121
- Google App Engine sites
22-
23-
By "open" buckets/containers, I mean those that allow anonymous users to list contents. if you discover a protected bucket/container, it is still worth trying to brute force the contents with another tool.
24-
25-
**IMPORTANT**: Azure Virtual Machine DNS records can span a lot of geo regions. To save time scanning, there is a "REGIONS" variable defined in cloudenum/azure_regions.py. You'll want to look at this file and edit it to be relevant to your own work.
22+
- Cloud Functions (enumerates project/regions with existing functions, then brute forces actual function names)
2623

2724
See it in action in [Codingo](https://github.com/codingo)'s video demo [here](https://www.youtube.com/embed/pTUDJhWJ1m0).
2825

@@ -43,7 +40,7 @@ The only required argument is at least one keyword. You can use the built-in fuz
4340

4441
You can provide multiple keywords by specifying the `-k` argument multiple times.
4542

46-
Azure Containers required two levels of brute-forcing, both handled automatically by this tool. First, by finding valid accounts (DNS). Then, by brute-forcing container names inside that account (HTTP scraping). The tool uses the same fuzzing file for both by default, but you can specificy individual files separately if you'd like.
43+
Keywords are mutated automatically using strings from `enum_tools/fuzz.txt` or a file you provide with the `-m` flag. Services that require a second-level of brute forcing (Azure Containers and GCP Functions) will also use `fuzz.txt` by default or a file you provide with the `-b` flag.
4744

4845
Let's say you were researching "somecompany" whose website is "somecompany.io" that makes a product called "blockchaindoohickey". You could run the tool like this:
4946

@@ -57,6 +54,8 @@ HTTP scraping and DNS lookups use 5 threads each by default. You can try increas
5754
cloudenum.py -k keyword -t 10
5855
```
5956

57+
**IMPORTANT**: Some resources (Azure Containers, GCP Functions) are discovered per-region. To save time scanning, there is a "REGIONS" variable defined in `cloudenum/azure_regions.py and cloudenum/gcp_regions.py` that is set by default to use only 1 region. You may want to look at these files and edit them to be relevant to your own work.
58+
6059
**Complete Usage Details**
6160
```
6261
usage: cloud_enum.py [-h] -k KEYWORD [-m MUTATIONS] [-b BRUTE]

enum_tools/azure_checks.py

Lines changed: 5 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -102,8 +102,10 @@ def print_container_response(reply):
102102

103103
# Stop brute forcing accounts without permission
104104
if ('not authorized to perform this operation' in reply.reason or
105-
'not have sufficient permissions' in reply.reason):
106-
print(" [!] Breaking out early, auth errors.")
105+
'not have sufficient permissions' in reply.reason or
106+
'Public access is not permitted' in reply.reason or
107+
'Server failed to authenticate the request' in reply.reason):
108+
print(" [!] Breaking out early, auth required.")
107109
return 'breakout'
108110

109111
# Stop brute forcing unsupported accounts
@@ -151,18 +153,7 @@ def brute_force_containers(storage_accounts, brute_list, threads):
151153
valid_accounts.append(account)
152154

153155
# Read the brute force file into memory
154-
with open(brute_list, encoding="utf8", errors="ignore") as infile:
155-
names = infile.read().splitlines()
156-
157-
# Clean up the names to usable for containers
158-
banned_chars = re.compile('[^a-z0-9-]')
159-
clean_names = []
160-
for name in names:
161-
name = name.lower()
162-
name = banned_chars.sub('', name)
163-
if 63 >= len(name) >= 3:
164-
if name not in clean_names:
165-
clean_names.append(name)
156+
clean_names = utils.get_brute(brute_list, mini=3)
166157

167158
# Start a counter to report on elapsed time
168159
start_time = utils.start_timer()

enum_tools/fuzz.txt

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,7 @@ graphql
120120
gs
121121
gw
122122
help
123+
iaas
123124
hub
124125
iam
125126
images
@@ -131,6 +132,7 @@ ios
131132
iot
132133
jira
133134
js
135+
k8s
134136
kube
135137
kubeengine
136138
kubernetes
@@ -156,6 +158,7 @@ ops
156158
oracle
157159
org
158160
packages
161+
paas
159162
passwords
160163
photos
161164
pics
@@ -180,6 +183,7 @@ repo
180183
reports
181184
resources
182185
s3
186+
saas
183187
screenshots
184188
scripts
185189
sec
@@ -210,6 +214,7 @@ store
210214
subversion
211215
support
212216
svn
217+
svc
213218
syslog
214219
tasks
215220
teamcity
@@ -228,6 +233,7 @@ userpictures
228233
users
229234
ux
230235
videos
236+
vm
231237
web
232238
website
233239
wp

enum_tools/gcp_checks.py

Lines changed: 124 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,22 @@
44
"""
55

66
from enum_tools import utils
7+
from enum_tools import gcp_regions
78

89
BANNER = '''
910
++++++++++++++++++++++++++
1011
google checks
1112
++++++++++++++++++++++++++
1213
'''
1314

14-
# Known S3 domain names
15+
# Known GCP domain names
1516
GCP_URL = 'storage.googleapis.com'
1617
APPSPOT_URL = 'appspot.com'
18+
FUNC_URL = 'cloudfunctions.net'
19+
20+
# Hacky, I know. Used to store project/region combos that report at least
21+
# one cloud function, to brute force later on
22+
HAS_FUNCS = []
1723

1824
def print_bucket_response(reply):
1925
"""
@@ -69,10 +75,12 @@ def print_appspot_response(reply):
6975
"""
7076
if reply.status_code == 404:
7177
pass
72-
elif reply.status_code == 500 or reply.status_code == 503:
78+
elif (str(reply.status_code)[0] == 5):
7379
utils.printc(" Google App Engine app with a 50x error: {}\n"
7480
.format(reply.url), 'orange')
75-
elif reply.status_code == 200 or reply.status_code == 302:
81+
elif (reply.status_code == 200
82+
or reply.status_code == 302
83+
or reply.status_code == 404):
7684
utils.printc(" Google App Engine app: {}\n"
7785
.format(reply.url), 'green')
7886
else:
@@ -107,6 +115,118 @@ def check_appspot(names, threads):
107115
# Stop the time
108116
utils.stop_timer(start_time)
109117

118+
def print_functions_response1(reply):
119+
"""
120+
Parses the HTTP reply the initial Cloud Functions check
121+
122+
This function is passed into the class object so we can view results
123+
in real-time.
124+
"""
125+
if reply.status_code == 404:
126+
pass
127+
elif reply.status_code == 302:
128+
utils.printc(" Contains at least 1 Cloud Function: {}\n"
129+
.format(reply.url), 'green')
130+
HAS_FUNCS.append(reply.url)
131+
else:
132+
print(" Unknown status codes being received from {}:\n"
133+
" {}: {}"
134+
.format(reply.url, reply.status_code, reply.reason))
135+
136+
def print_functions_response2(reply):
137+
"""
138+
Parses the HTTP reply from the secondary, brute-force Cloud Functions check
139+
140+
This function is passed into the class object so we can view results
141+
in real-time.
142+
"""
143+
if 'accounts.google.com/ServiceLogin' in reply.url:
144+
pass
145+
elif reply.status_code == 403 or reply.status_code == 401:
146+
utils.printc(" Auth required Cloud Function: {}\n"
147+
.format(reply.url), 'orange')
148+
elif reply.status_code == 405:
149+
utils.printc(" UNAUTHENTICATED Cloud Function (POST-Only): {}\n"
150+
.format(reply.url), 'green')
151+
elif reply.status_code == 200 or reply.status_code == 404:
152+
utils.printc(" UNAUTHENTICATED Cloud Function (GET-OK): {}\n"
153+
.format(reply.url), 'green')
154+
else:
155+
print(" Unknown status codes being received from {}:\n"
156+
" {}: {}"
157+
.format(reply.url, reply.status_code, reply.reason))
158+
159+
def check_functions(names, brute_list, threads):
160+
"""
161+
Checks for Google Cloud Functions running on cloudfunctions.net
162+
163+
This is a two-part process. First, we want to find region/project combos
164+
that have existing Cloud Functions. The URL for a function looks like this:
165+
https://[ZONE]-[PROJECT-ID].cloudfunctions.net/[FUNCTION-NAME]
166+
167+
We look for a 302 in [ZONE]-[PROJECT-ID].cloudfunctions.net. That means
168+
there are some functions defined in that region. Then, we brute force a list
169+
of possible function names there.
170+
171+
See gcp_regions.py to define which regions to check. The tool currently
172+
defaults to only 1 region, so you should really modify it for best results.
173+
"""
174+
print("[+] Checking for project/zones with Google Cloud Functions.")
175+
176+
# Start a counter to report on elapsed time
177+
start_time = utils.start_timer()
178+
179+
# Pull the regions from a config file
180+
regions = gcp_regions.REGIONS
181+
182+
print("[*] Testing across {} regions defined in the config file"
183+
.format(len(regions)))
184+
185+
for region in regions:
186+
# Initialize the list of initial URLs to check
187+
candidates = [region + '-' + name + '.' + FUNC_URL for name in names]
188+
189+
# Send the valid names to the batch HTTP processor
190+
utils.get_url_batch(candidates, use_ssl=False,
191+
callback=print_functions_response1,
192+
threads=threads,
193+
redir=False)
194+
195+
# Retun from function if we have not found any valid combos
196+
if not HAS_FUNCS:
197+
utils.stop_timer(start_time)
198+
return
199+
200+
# If we did find something, we'll use the brute list. This will allow people
201+
# to provide a separate fuzzing list if they choose.
202+
print("[*] Brute-forcing function names in {} project/region combos"
203+
.format(len(HAS_FUNCS)))
204+
205+
# Load brute list in memory, based on allowed chars/etc
206+
brute_strings = utils.get_brute(brute_list)
207+
208+
# The global was built in a previous function. We only want to brute force
209+
# project/region combos that we know have existing functions defined
210+
for func in HAS_FUNCS:
211+
print("[*] Brute-forcing {} function names in {}"
212+
.format(len(brute_strings), func))
213+
# Initialize the list of initial URLs to check. Strip out the HTTP
214+
# protocol first, as that is handled in the utility
215+
func = func.replace("http://", "")
216+
217+
# Noticed weird behaviour with functions when a slash is not appended.
218+
# Works for some, but not others. However, appending a slash seems to
219+
# get consistent results. Might need further validation.
220+
candidates = [func + brute + '/' for brute in brute_strings]
221+
222+
# Send the valid names to the batch HTTP processor
223+
utils.get_url_batch(candidates, use_ssl=False,
224+
callback=print_functions_response2,
225+
threads=threads)
226+
227+
# Stop the time
228+
utils.stop_timer(start_time)
229+
110230
def run_all(names, args):
111231
"""
112232
Function is called by main program
@@ -115,3 +235,4 @@ def run_all(names, args):
115235

116236
check_gcp_buckets(names, args.threads)
117237
check_appspot(names, args.threads)
238+
check_functions(names, args.brute, args.threads)

enum_tools/gcp_regions.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
"""
2+
File used to track the DNS regions for GCP resources.
3+
"""
4+
5+
# Some enumeration tasks will need to go through the complete list of
6+
# possible DNS names for each region. You may want to modify this file to
7+
# use the regions meaningful to you.
8+
#
9+
# Whatever is listed in the last instance of 'REGIONS' below is what the tool
10+
# will use.
11+
12+
13+
# Here is the list I get when running `gcloud functions regions list`
14+
REGIONS = ['us-central1', 'us-east1', 'us-east4', 'us-west2', 'us-west3',
15+
'us-west4', 'europe-west1', 'europe-west2', 'europe-west3',
16+
'europe-west6', 'asia-east2', 'asia-northeast1', 'asia-northeast2',
17+
'asia-northeast3', 'asia-south1', 'asia-southeast2',
18+
'northamerica-northeast1', 'southamerica-east1',
19+
'australia-southeast1']
20+
21+
22+
# And here I am limiting the search by overwriting this variable:
23+
REGIONS = ['us-central1',]

enum_tools/utils.py

Lines changed: 22 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ def init_logfile(logfile):
3434
log_writer.write("\n\n#### CLOUD_ENUM {} ####\n"
3535
.format(now))
3636

37-
def get_url_batch(url_list, use_ssl=False, callback='', threads=5):
37+
def get_url_batch(url_list, use_ssl=False, callback='', threads=5, redir=True):
3838
"""
3939
Processes a list of URLs, sending the results back to the calling
4040
function in real-time via the `callback` parameter
@@ -66,7 +66,7 @@ def get_url_batch(url_list, use_ssl=False, callback='', threads=5):
6666

6767
# First, grab the pending async request and store it in a dict
6868
for url in batch:
69-
batch_pending[url] = session.get(proto + url)
69+
batch_pending[url] = session.get(proto + url, allow_redirects=redir)
7070

7171
# Then, grab all the results from the queue.
7272
# This is where we need to catch exceptions that occur with large
@@ -212,6 +212,26 @@ def printc(text, color):
212212
with open(LOGFILE, 'a') as log_writer:
213213
log_writer.write(text.lstrip())
214214

215+
def get_brute(brute_file, mini=1, maxi=63, banned='[^a-z0-9_-]'):
216+
"""
217+
Generates a list of brute-force words based on length and allowed chars
218+
"""
219+
# Read the brute force file into memory
220+
with open(brute_file, encoding="utf8", errors="ignore") as infile:
221+
names = infile.read().splitlines()
222+
223+
# Clean up the names to usable for containers
224+
banned_chars = re.compile(banned)
225+
clean_names = []
226+
for name in names:
227+
name = name.lower()
228+
name = banned_chars.sub('', name)
229+
if maxi >= len(name) >= mini:
230+
if name not in clean_names:
231+
clean_names.append(name)
232+
233+
return clean_names
234+
215235
def start_timer():
216236
"""
217237
Starts a timer for functions in main module

0 commit comments

Comments
 (0)