@@ -27,6 +27,39 @@ def copy_transform(src, dst, transformer):
2727 with open (dst , 'w' ) as file :
2828 file .write (content )
2929
30+ def deduplicate_summary (summary_path ):
31+ """Remove duplicate file references from SUMMARY.md"""
32+ if not os .path .exists (summary_path ):
33+ return
34+
35+ with open (summary_path , 'r' , encoding = 'utf-8' ) as f :
36+ lines = f .readlines ()
37+
38+ seen_paths = set ()
39+ new_lines = []
40+ link_pattern = re .compile (r'\[([^\]]+)\]\(([^)]+)\)' )
41+
42+ for line in lines :
43+ matches = link_pattern .findall (line )
44+ is_duplicate = False
45+
46+ if matches :
47+ for text , path in matches :
48+ clean_path = path .split ('#' )[0 ]
49+ if clean_path in seen_paths :
50+ # Skip this line entirely instead of commenting it out
51+ is_duplicate = True
52+ break
53+ else :
54+ seen_paths .add (clean_path )
55+
56+ # Only add the line if it's not a duplicate
57+ if not is_duplicate :
58+ new_lines .append (line )
59+
60+ with open (summary_path , 'w' , encoding = 'utf-8' ) as f :
61+ f .writelines (new_lines )
62+
3063def export_version (ref , name ):
3164 docs_spec = f'{ ref } :docs'
3265
@@ -46,6 +79,12 @@ def export_version(ref, name):
4679 subprocess .check_output (('tar' , 'xvf' , '-' , '-C' , src_dir ), stdin = ps .stdout )
4780 ps .wait ()
4881
82+ # Deduplicate SUMMARY.md for old versions with duplicate paths
83+ # mdBook v0.5+ enforces unique paths, but old release tags may have duplicates
84+ # This preprocessing step removes duplicate entries before mdBook sees them
85+ summary_path = os .path .join (src_dir , 'SUMMARY.md' )
86+ deduplicate_summary (summary_path )
87+
4988 # copy README.md to <version>/src/
5089 # rewrite absolute URLs to relative mdBook URLs
5190 copy_transform ('README.md' , os .path .join (src_dir , 'README.md' ), lambda content : re .sub (r'\(https://docs.ue4ss.com/dev/([^)#]+)\.html(#[^)]*)?\)' , r'(\1.md\2)' , content ))
0 commit comments