fix: ToC so it doesn’t include itself or duplicate the end marker (#4388)

turns out the ToC was including itself when generating, which messed up
comparisons and sometimes made the file rewrite endlessly.

also fixed the slice so `<!-- End ToC -->` doesn’t get duplicated when
we insert the new ToC.

should behave nicely now - no extra rewrites, no doubled markers.

Co-authored-by: Eric Traut <etraut@openai.com>
This commit is contained in:
wizard
2025-11-06 01:52:51 +03:00
committed by GitHub
parent 86c149ae8e
commit 4a1a7f9685

View File

@@ -87,8 +87,9 @@ def check_or_fix(readme_path: Path, fix: bool) -> int:
# extract current ToC list items # extract current ToC list items
current_block = lines[begin_idx + 1 : end_idx] current_block = lines[begin_idx + 1 : end_idx]
current = [l for l in current_block if l.lstrip().startswith("- [")] current = [l for l in current_block if l.lstrip().startswith("- [")]
# generate expected ToC # generate expected ToC from content without current ToC
expected = generate_toc_lines(content) toc_content = lines[:begin_idx] + lines[end_idx+1:]
expected = generate_toc_lines("\n".join(toc_content))
if current == expected: if current == expected:
return 0 return 0
if not fix: if not fix:
@@ -108,7 +109,7 @@ def check_or_fix(readme_path: Path, fix: bool) -> int:
return 1 return 1
# rebuild file with updated ToC # rebuild file with updated ToC
prefix = lines[: begin_idx + 1] prefix = lines[: begin_idx + 1]
suffix = lines[end_idx:] suffix = lines[end_idx+1:]
new_lines = prefix + [""] + expected + [""] + suffix new_lines = prefix + [""] + expected + [""] + suffix
readme_path.write_text("\n".join(new_lines) + "\n", encoding="utf-8") readme_path.write_text("\n".join(new_lines) + "\n", encoding="utf-8")
print(f"Updated ToC in {readme_path}.") print(f"Updated ToC in {readme_path}.")