@@ -40,6 +40,7 @@ def log_event(message, log_file=None):
4040 if log_file :
4141 log_file .write (full_message + "\n " )
4242
43+
4344# Constants
4445MAX_CHERRY_PICK_SCAN = 50
4546PR_CACHE = {} # Cache for PR details to speed up multiple rounds referencing same PRs
@@ -236,9 +237,7 @@ def get_prs_from_log(log_output, prs_base=None, log_file=None):
236237 if details :
237238 # Check if it's a cherry-pick round PR - scan deep to identify meta-PRs
238239 is_meta_pr = (
239- "cherry pick" in subject .lower ()
240- or "cherry-pick" in subject .lower ()
241- or "cherrypick" in subject .lower ()
240+ "cherry pick" in subject .lower () or "cherry-pick" in subject .lower () or "cherrypick" in subject .lower ()
242241 )
243242
244243 if is_meta_pr and commit_count < MAX_CHERRY_PICK_SCAN :
@@ -252,9 +251,7 @@ def get_prs_from_log(log_output, prs_base=None, log_file=None):
252251 if commits_output :
253252 commits_data = json .loads (commits_output )
254253 for commit in commits_data .get ("commits" , []):
255- all_extracted_nums .extend (
256- extract_pr_numbers (commit .get ("messageHeadline" , "" ), strict = True )
257- )
254+ all_extracted_nums .extend (extract_pr_numbers (commit .get ("messageHeadline" , "" ), strict = True ))
258255 all_extracted_nums .extend (extract_pr_numbers (commit .get ("messageBody" , "" ), strict = True ))
259256
260257 # Filter and Normalize
@@ -287,7 +284,10 @@ def get_prs_from_log(log_output, prs_base=None, log_file=None):
287284 }
288285 else :
289286 # FALLBACK: Use Meta-PR authors if sub-PR fetch fails
290- log_event (f" - Warning: Fetch failed for PR #{ op_num_str } , using meta-PR authors fallback." , log_file )
287+ log_event (
288+ f" - Warning: Fetch failed for PR #{ op_num_str } , using meta-PR authors fallback." ,
289+ log_file ,
290+ )
291291 meta_authors = extract_authors_from_pr (details )
292292 all_prs [op_num_str ] = {
293293 "title" : f"Original PR #{ op_num_str } (details missing)" ,
@@ -296,7 +296,7 @@ def get_prs_from_log(log_output, prs_base=None, log_file=None):
296296 "cherry_pick_pr" : pr_num_str ,
297297 }
298298 else :
299- log_event (f " - No sub-PRs found, treating meta-PR as a normal PR." , log_file )
299+ log_event (" - No sub-PRs found, treating meta-PR as a normal PR." , log_file )
300300 all_prs [pr_num_str ] = {
301301 "title" : details ["title" ],
302302 "authors" : list (extract_authors_from_pr (details )),
@@ -315,7 +315,10 @@ def get_prs_from_log(log_output, prs_base=None, log_file=None):
315315 # Not a PR OR PR detail fetch failed (e.g. it was an issue or deleted PR)
316316 # Use git commit author as the reliable fallback
317317 if pr_num_str :
318- log_event (f" - PR #{ pr_num_str } lookup failed (possibly issue or deleted). Falling back to commit author." , log_file )
318+ log_event (
319+ f" - PR #{ pr_num_str } lookup failed (possibly issue or deleted). Falling back to commit author." ,
320+ log_file ,
321+ )
319322 authors = extract_authors_from_commit (commit_id )
320323 if authors :
321324 log_event (f" - Added commit { commit_id } with authors: { list (authors )} " , log_file )
@@ -384,7 +387,7 @@ def main():
384387
385388 # Consolidation Pass
386389 consolidated_contributors = {} # login_lower -> count
387- display_names = {} # login_lower -> original_casing
390+ display_names = {} # login_lower -> original_casing
388391 raw_contributors = {} # login_lower -> count
389392
390393 for contributor , count in contributors .items ():
@@ -408,7 +411,7 @@ def main():
408411 log_event ("\n --- Summary ---" , log_file )
409412 # Prefix only identified github logins (no spaces) and format as markdown links
410413 output_users = []
411- for login_lower , c in sorted_contributors :
414+ for login_lower , _login in sorted_contributors :
412415 u = display_names [login_lower ]
413416 if " " not in u :
414417 output_users .append (f"[@{ u } ](https://github.com/{ u } )" )
@@ -444,7 +447,10 @@ def main():
444447 row ["authors" ] = "; " .join (sorted (unique_authors .values (), key = lambda x : x .lower ()))
445448 writer .writerow (row )
446449
447- log_event (f"\n Detailed information written to { csv_path } . Total human contributors: { len (consolidated_contributors )} " , log_file )
450+ log_event (
451+ f"\n Detailed information written to { csv_path } . Total human contributors: { len (consolidated_contributors )} " ,
452+ log_file ,
453+ )
448454
449455
450456if __name__ == "__main__" :
0 commit comments