-
Notifications
You must be signed in to change notification settings - Fork 42
/
Copy pathgit-wild-hunt.py
executable file
·265 lines (224 loc) · 8.57 KB
/
git-wild-hunt.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
#!/usr/bin/python
import argparse
import requests
import json
import re
import os
import sys
import datetime
import time
from tqdm import tqdm
from pathlib import Path
from modules.CustomConfigParser import CustomConfigParser
from modules import logger
VERSION = 1
def load_regexes(regexes_path):
reg = dict()
with open(os.path.join(os.path.dirname(__file__), regexes_path), 'r') as f:
regexes = json.loads(f.read())
return regexes
def search_github(github_token, search):
results = []
h = {
"Authorization":"token "+ github_token,
"User-Agent": "Git-Wild-Hunt 1.0"
}
url = "https://api.github.com/search/code?per_page=100&q=" + search
#extension:yml+path:.circleci+filename:config+language:YAML
r = requests.get( url, headers=h, timeout=30)
result = r.json()
try:
if result['total_count'] > 0:
log.info("total results: {}".format(result['total_count']))
else:
log.error("no results found for the search: {}".format(search))
sys.exit(1)
except:
log.error("using github search api: {0}".format(json.dumps(result,indent=2)))
sys.exit(1)
# first check for rate limit
if 'documentation_url' in result:
if result['documentation_url'] == "https://docs.github.com/en/free-pro-team@latest/rest/overview/resources-in-the-rest-api#abuse-rate-limits":
r = requests.get('https://api.github.com/rate_limit', headers=h, timeout=30)
timestamp = datetime.datetime.fromtimestamp(int(r.headers.get('X-RateLimit-Reset')))
log.error("we have hit githubs rate limit, cool off for 300 seconds")
for i in tqdm(range(300)):
time.sleep(1)
if 'items' in result:
for i in result['items']:
results.append(i)
#limit to 5 results for testing only
#if len(results) > 5:
# return results
# check if there is pagination
link = r.headers.get('link')
if link is None:
log.info("all done processing results")
return result
next_url = find_next(r.headers['link'])
time.sleep(60)
# process pagination of results
if next_url is None:
log.info("all done processing results")
return results
else:
results = process_pages(github_token, next_url, results)
log.info("all done processing results")
return results
def process_pages(github_token, url, results):
log.info("processing page: {0}".format(url[-1]))
# limiting for testing
#if int(url[-1]) >= 4:
# return results
h = {
"Authorization":"token "+ github_token,
"User-Agent": "Git-Wild-Hunt 1.0"
}
r = requests.get( url, headers=h, timeout=30)
result = r.json()
# first check for rate limit
if 'documentation_url' in result:
if result['documentation_url'] == "https://docs.github.com/en/free-pro-team@latest/rest/overview/resources-in-the-rest-api#abuse-rate-limits":
timestamp = datetime.datetime.fromtimestamp(int(r.headers.get('X-RateLimit-Reset')))
log.error("we have hit githubs rate limits, cool off for 300 seconds")
for i in tqdm(range(300)):
time.sleep(1)
return process_pages(github_token, next_url, results)
# process results if we haven't hit any
if 'items' in result:
for i in result['items']:
results.append(i)
# check if there is pagination
link = r.headers.get('link')
if link is None:
return result
next_url = find_next(r.headers['link'])
time.sleep(60)
# process pagination of results
if next_url is None:
log.info("all done processing results")
return results
return process_pages(github_token, next_url, results)
# given a link header from github, find the link for the next url which they use for pagination
def find_next(link):
for l in link.split(','):
a, b = l.split(';')
if b.strip() == 'rel="next"':
return a.strip()[1:-1]
def rawurl( result ):
if 'html_url' in result:
raw_url = result['html_url'];
raw_url = raw_url.replace( 'https://github.com/', 'https://raw.githubusercontent.com/' )
raw_url = raw_url.replace( '/blob/', '/' )
return raw_url
else:
raw_url = None
return raw_url
def getcode( url ):
# print( url )
try:
r = requests.get( url, timeout=10 )
return r.text
except:
log.error("timeout requesting url: {0} .. continuing".format(url))
code = None
return code
def findleaks(conf, regexes):
match_creds = []
match_details = dict()
url = rawurl(conf)
if url:
code = getcode(url)
else:
log.error("could not get raw content for {}".format(json.dumps(conf, indent=2)))
code = None
if code:
for check, regex in regexes.items():
matches = re.findall(regex, code)
if matches:
match_details['url'] = url
match_details['check'] = check
match_details['matches'] = matches
match_details['timestamp'] = str(datetime.datetime.utcnow().isoformat())
# add owner data
match_details['owner'] = conf['repository']['owner']['login']
match_details['owner_url'] = conf['repository']['owner']['html_url']
match_details['owner_type'] = conf['repository']['owner']['type']
match_creds.append(match_details)
log.warning("url: {0}\ncheck: {1} matches: {2}".format(url, check, matches))
return match_creds
def write_leaks(leaks, output_path):
try:
with open(output_path, 'a') as outfile:
json.dump(leaks, outfile)
except Exection as e:
log.error("writing result file: {0}".format(str(e)))
def get_user_details(leak, github_token):
results = []
h = {"Authorization":"token "+ github_token}
url = "https://api.github.com/users/" + leak['owner']
#extension:yml+path:.circleci+filename:config+language:YAML
r = requests.get( url, headers=h, timeout=30)
result = r.json()
leak['name'] = result['name']
leak['email'] = result['email']
leak['company'] = result['company']
leak['blog'] = result['blog']
leak['location'] = result['location']
leak['twitter_username'] = result['twitter_username']
leak['owner_created'] = result['created_at']
leak['owner_last_updated'] = result['updated_at']
leak['owner_bio'] = result['bio']
return leak
if __name__ == "__main__":
# grab oarguments
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--search", required=True, default="", help="search to execute")
parser.add_argument("-c", "--config", required=False, default="git-wild-hunt.conf", help="config file path")
parser.add_argument("-v", "--version", default=False, action="store_true", required=False,
help="shows current git-wild-hunt version")
# parse them
args = parser.parse_args()
ARG_VERSION = args.version
config = args.config
search = args.search
# needs config parser here
tool_config = Path(config)
if tool_config.is_file():
print("git-wild-hunt is using config at path {0}".format(tool_config))
configpath = str(tool_config)
else:
print("ERROR: git-wild-hunt failed to find a config file at {0} or {1}..exiting".format(tool_config))
sys.exit(1)
# Parse config
parser = CustomConfigParser()
config = parser.load_conf(configpath)
log = logger.setup_logging(config['log_path'], config['log_level']
)
log.info("INIT - git-wild-hunt v" + str(VERSION))
if ARG_VERSION:
log.info("version: {0}".format(VERSION))
sys.exit(0)
regexes = load_regexes(config['regexes'])
github_token = config['github_token']
if github_token == "TOKENHERE":
print("ERROR: git-wild-hunt failed to find a github_token in the config file at {0}..exiting".format(tool_config))
sys.exit(1)
else:
s = search.replace(" ", "+")
results = search_github(github_token, s)
# lets process the search results
count = 0
all_leaks = []
for conf in results:
count += 1
if 'html_url' in conf:
log.info("processing potential leak #{0} on {1}".format(count, conf['html_url']))
leaks = findleaks(conf, regexes)
else:
continue
for l in leaks:
l = get_user_details(l, github_token)
all_leaks.append(l)
# write output of all leaks
write_leaks(all_leaks, config['output'])