@@ -25,8 +25,9 @@ def run(gx_context, gx_output):
25
25
26
26
# If focused on a contributor, let's first make sure the contributor exists in the repository
27
27
if contributor_scope != None :
28
- if not any (contributor .get ('login' ) in contributor_scope for contributor in gx_context .getContributors ()):
29
- gx_output .warn (f"One of the collaborators you specified { contributor_scope } were not found as a contributor in the repo. Quitting.." )
28
+ if not gx_context .areContributors (contributor_scope ):
29
+ gx_output .warn (f"One of the collaborators you specified { contributor_scope } were not found as a contributor in the repo." )
30
+ gx_output .warn (f"If you intend to filter results for a non-contributor, using the filter function instead (eg. -f johnDoe03). Quitting.." )
30
31
return False
31
32
32
33
# Were were invoked to just list contributors and quit?
@@ -72,22 +73,22 @@ def run(gx_context, gx_output):
72
73
gx_output .c_log (f"Owned repositories: https://github.com/{ contributor_login } ?tab=repositories" , rtype = "urls" )
73
74
74
75
if contributor .get ('name' ) != None :
75
- gx_output .c_log (f"[Name: { contributor .get ('name' )} ] obtained from the user's profile" , rtype = "personal" )
76
+ gx_output .c_log (f"[Name: { contributor .get ('name' )} ] obtained from the user's profile. " , rtype = "personal" )
76
77
77
78
if contributor .get ('twitter_username' ) != None :
78
- gx_output .c_log (f"[X/Twitter account: { contributor .get ('twitter_username' )} ] obtained from the user's profile" , rtype = "personal" )
79
+ gx_output .c_log (f"[X/Twitter account: { contributor .get ('twitter_username' )} ] obtained from the user's profile. " , rtype = "personal" )
79
80
if contributor .get ('bio' ) != None :
80
81
bio = contributor .get ('bio' ).replace ("\r \n " ," | " )
81
- gx_output .c_log (f"[Bio: { bio } ] obtained from the profile" , rtype = "personal" )
82
+ gx_output .c_log (f"[Bio: { bio } ] obtained from the profile. " , rtype = "personal" )
82
83
if contributor .get ('company' ) != None :
83
- gx_output .c_log (f"[Company: { contributor .get ('company' )} ] obtained from the user's profile" , rtype = "personal" )
84
+ gx_output .c_log (f"[Company: { contributor .get ('company' )} ] obtained from the user's profile. " , rtype = "personal" )
84
85
if contributor .get ('blog' ) != None and len (contributor .get ('blog' )) > 0 :
85
- gx_output .c_log (f"[Blog: { contributor .get ('blog' )} ] obtained from the user's profile" , rtype = "personal" )
86
+ gx_output .c_log (f"[Blog: { contributor .get ('blog' )} ] obtained from the user's profile. " , rtype = "personal" )
86
87
if contributor .get ('location' ) != None :
87
- gx_output .c_log (f"[Location: { contributor .get ('location' )} ] obtained from the user's profile" , rtype = "personal" )
88
+ gx_output .c_log (f"[Location: { contributor .get ('location' )} ] obtained from the user's profile. " , rtype = "personal" )
88
89
89
90
if contributor .get ('email' ) != None :
90
- gx_output .c_log (f"[{ contributor .get ('email' )} ] obtained from the user's profile" , rtype = "emails" )
91
+ gx_output .c_log (f"[{ contributor .get ('email' )} ] obtained from the user's profile. " , rtype = "emails" )
91
92
gx_context .linkIdentifier ("EMAIL" , [contributor .get ('email' )], contributor_login )
92
93
93
94
contributor_created_at_time = gh_time .parse_date (contributor .get ('created_at' ))
@@ -126,6 +127,7 @@ def run(gx_context, gx_output):
126
127
print (f"\r [{ c_users_index } /{ len (c_users )} ] Analyzing { len (commits )} commits and any signing keys for { contributor .get ('login' )} " + ' ' * 40 , end = '' , flush = True )
127
128
for commit in commits :
128
129
c = commit ["commit" ]
130
+
129
131
v_reason = c ["verification" ]["reason" ]
130
132
if c ["verification" ]["verified" ] == True :
131
133
try :
@@ -162,16 +164,16 @@ def run(gx_context, gx_output):
162
164
failed_verifications .append (c )
163
165
164
166
if c ["author" ]["email" ] not in contributor_emails :
165
- gx_output .c_log (f"[{ c ['author' ]['email' ]} ] obtained by parsing a commit dated { c [ 'author' ][ 'date' ] } " , rtype = "emails" )
167
+ gx_output .c_log (f"[{ c ['author' ]['email' ]} ] obtained by parsing commits. " , rtype = "emails" )
166
168
contributor_emails .append (c ["author" ]["email" ])
167
169
gx_context .linkIdentifier ("EMAIL" , [c ["author" ]["email" ]], contributor_login )
168
170
169
- if gh_time .parse_date (c ["author" ][ "date" ]) < contributor_created_at_time or gh_time . parse_date ( c [ 'committer ' ]['date' ]) < contributor_created_at_time :
171
+ if gh_time .parse_date (c ['author ' ]['date' ]) < contributor_created_at_time :
170
172
dates_mismatch_commits .append (c )
171
173
172
174
if len (dates_mismatch_commits ) > 0 :
173
- gx_output .c_log (f"WARNING: UNRELIABLE DATES in { len (dates_mismatch_commits )} commits by Contributor [{ contributor_login } ]. The GitHub account is newer than the commit! Unreliable historic activity or account re-use. " , rtype = "commits" )
174
- gx_output .c_log (f"View commits with unreliable DATES here: https://github.com/ { repository .get ('full_name ' )} /commits/?author={ contributor_login } &until={ contributor .get ('created_at' )} " , rtype = "urls " )
175
+ gx_output .c_log (f"WARNING: UNRELIABLE DATES (Older than Account) in { len (dates_mismatch_commits )} commits by [{ contributor_login } ]. Potential tampering, account re-use, or Rebase. List at: { repository . get ( 'html_url' ) } /commits/?author= { contributor_login } &until= { contributor . get ( 'created_at' ) } " , rtype = "commits" )
176
+ gx_output .c_log (f"View commits with unreliable DATES here: { repository .get ('html_url ' )} /commits/?author={ contributor_login } &until={ contributor .get ('created_at' )} " , rtype = "commits " )
175
177
gx_context .linkIdentifier ("DATE_MISMATCH_COMMITS" , [len (dates_mismatch_commits )], contributor_login )
176
178
177
179
# PGP Signature attributes: We have precise Key IDs used in signatures + details on signature creation time and algorithm
@@ -210,7 +212,7 @@ def run(gx_context, gx_output):
210
212
if pgp_keys != None and len (pgp_keys ) > 0 :
211
213
primary_key_ids = [key .get ('key_id' ) for key in pgp_keys ]
212
214
gx_output .c_log (f"{ len (pgp_keys )} Primary PGP Keys in this contributor's profile: { str (primary_key_ids )} " , rtype = "keys" )
213
- gx_output .c_log (f"PGP Keys: https://api.github.com/users/{ contributor_login } /gpg_keys" , rtype = "urls " )
215
+ gx_output .c_log (f"PGP Keys: https://api.github.com/users/{ contributor_login } /gpg_keys" , rtype = "keys " )
214
216
215
217
for primary_key in pgp_keys :
216
218
# Let's parse and drain info from raw_key fields in primary keys
@@ -284,7 +286,7 @@ def run(gx_context, gx_output):
284
286
ssh_signing_keys = gh_api .fetch_ssh_signing_keys (contributor_login )
285
287
if len (ssh_signing_keys ) > 0 :
286
288
gx_output .c_log (f"{ len (ssh_signing_keys )} SSH Keys used for Signatures in this contributor's profile" , rtype = "keys" )
287
- gx_output .c_log (f"SSH Signing Keys: https://api.github.com/users/{ contributor_login } /ssh_signing_keys" , rtype = "urls " )
289
+ gx_output .c_log (f"SSH Signing Keys: https://api.github.com/users/{ contributor_login } /ssh_signing_keys" , rtype = "keys " )
288
290
289
291
for ssh_signing_key in ssh_signing_keys :
290
292
algorithm = gx_ugly_ssh_parser .ugly_inhouse_ssh_key (ssh_signing_key .get ('key' ))
@@ -298,7 +300,7 @@ def run(gx_context, gx_output):
298
300
ssh_auth_keys = gh_api .fetch_ssh_auth_keys (contributor_login )
299
301
if len (ssh_auth_keys ) > 0 :
300
302
gx_output .c_log (f"{ len (ssh_auth_keys )} SSH Authentication Keys in this contributor's profile" , rtype = "keys" )
301
- gx_output .c_log (f"SSH Authentication Keys: https://api.github.com/users/{ contributor_login } /keys" , rtype = "urls " )
303
+ gx_output .c_log (f"SSH Authentication Keys: https://api.github.com/users/{ contributor_login } /keys" , rtype = "keys " )
302
304
303
305
# We don't keep track of duplicate/cloned keys for authentication SSH keys because GitHub won't allow them
304
306
# https://docs.github.com/en/authentication/troubleshooting-ssh/error-key-already-in-use
@@ -307,7 +309,7 @@ def run(gx_context, gx_output):
307
309
algorithm = f"of type [{ algorithm } ] " if algorithm != None else ""
308
310
gx_output .c_log (f"SSH Authentication Key ID [{ ssh_auth_key .get ('id' )} ] { algorithm } in profile." , rtype = "keys" )
309
311
310
- gx_output .c_log (f"All commits (for this Repo): https://github.com/ { repository .get ('full_name ' )} /commits/?author={ contributor_login } " , rtype = "urls " )
312
+ gx_output .c_log (f"All commits (for this Repo): { repository .get ('html_url ' )} /commits/?author={ contributor_login } " , rtype = "commits " )
311
313
# Unique key ids for now only holds keys we've extracted from commit signatures
312
314
if len (unique_pgp_keyids ) > 0 :
313
315
# https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#constructing-a-search-query
0 commit comments