Skip to content

Commit f568e7a

Browse files
committed
docs: update TOC generation workflow and enhance script for dynamic directory handling
1 parent 44af260 commit f568e7a

File tree

5 files changed

+60
-46
lines changed

5 files changed

+60
-46
lines changed

.github/workflows/build-toc.yaml

+16-31
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
name: Generate TOC on PR Merge or Push
22

3+
# This workflow automatically updates the TOC.md files in the repository
4+
# whenever there's a PR merge or direct push to the main branch.
5+
# It ensures that the table of contents stays up-to-date without manual intervention.
6+
37
on:
48
# Trigger on PR merge
59
pull_request_target:
@@ -13,45 +17,26 @@ on:
1317
- main
1418

1519
jobs:
16-
build_toc_on_pr_merge:
17-
if: github.event_name == 'pull_request_target' && github.event.pull_request.merged == true
20+
build_toc:
21+
# Only run on PR merge or direct push to main
22+
if: github.event_name == 'push' || (github.event_name == 'pull_request_target' && github.event.pull_request.merged == true)
1823
runs-on: ubuntu-latest
1924
steps:
2025
- name: Checkout repository
2126
uses: actions/checkout@v3
27+
with:
28+
# Fetch all history for allowing proper git operations
29+
fetch-depth: 0
2230

2331
- name: Set up Python 3
2432
uses: actions/setup-python@v3
2533
with:
2634
python-version: '3.x'
2735

28-
- name: Run TOC generation script
36+
- name: Install dependencies
2937
run: |
30-
chmod +x .scripts/idxtool.py
31-
python3 .scripts/idxtool.py --toc
32-
33-
- name: Commit TOC updates
34-
run: |
35-
git config --global user.name 'LouisShark'
36-
git config --global user.email '[email protected]'
37-
git add .
38-
git commit -m "docs: Update TOC.md" || echo "No changes to commit"
39-
git pull --rebase
40-
git push origin HEAD:main
41-
env:
42-
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
43-
44-
build_toc_on_push:
45-
if: github.event_name == 'push'
46-
runs-on: ubuntu-latest
47-
steps:
48-
- name: Checkout repository
49-
uses: actions/checkout@v3
50-
51-
- name: Set up Python 3
52-
uses: actions/setup-python@v3
53-
with:
54-
python-version: '3.x'
38+
python -m pip install --upgrade pip
39+
if [ -f .scripts/requirements.txt ]; then pip install -r .scripts/requirements.txt; fi
5540
5641
- name: Run TOC generation script
5742
run: |
@@ -62,9 +47,9 @@ jobs:
6247
run: |
6348
git config --global user.name 'LouisShark'
6449
git config --global user.email '[email protected]'
65-
git add .
66-
git commit -m "docs: Update TOC.md" || echo "No changes to commit"
67-
git pull --rebase
50+
git add TOC.md prompts/*/TOC.md
51+
git commit -m "docs: Update TOC.md files" || echo "No changes to commit"
52+
git pull --rebase origin main
6853
git push origin HEAD:main
6954
env:
7055
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

.scripts/idxtool.py

+41-12
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ def rebuild_toc(toc_out: str = '') -> Tuple[bool, str]:
9696

9797
# Write a header for the TOC file
9898
out = []
99-
out.append("# ChatGPT System Prompts - Table of Contents\n\n")
99+
out.append("# ChatGPT System Prompts \n\n")
100100
out.append("This document contains a table of contents for the ChatGPT System Prompts repository.\n\n")
101101

102102
# Add links to TOC.md files in prompts directory subdirectories
@@ -197,21 +197,37 @@ def find_gptfile(keyword, verbose=True):
197197

198198
def generate_toc_for_prompts_dirs() -> Tuple[bool, str]:
199199
"""
200-
Generates a single TOC.md file for each of the three main directories under prompts:
201-
gpts, official-product, and opensource-prj.
202-
For gpts directory, uses the original GPT-specific TOC generation logic.
203-
For other directories, includes all markdown files in the directory and its subdirectories.
200+
Generates a single TOC.md file for each directory under prompts:
201+
- For the gpts directory, uses the original GPT-specific TOC generation logic.
202+
- For all other directories (including newly added ones), uses the generic recursive logic.
203+
204+
This function automatically detects all subdirectories under prompts, ensuring future-proof
205+
extensibility without requiring code changes when new directories are added.
204206
"""
205207
prompts_base_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'prompts'))
206208
if not os.path.exists(prompts_base_path):
207209
return (False, f"Prompts directory '{prompts_base_path}' does not exist.")
208210

209-
print(f"Generating TOC.md files for main directories under '{prompts_base_path}'")
211+
print(f"Generating TOC.md files for all directories under '{prompts_base_path}'")
210212
success = True
211213
messages = []
212214

213-
# Main directories we want to process
214-
main_dirs = ["gpts", "official-product", "opensource-prj"]
215+
# Dynamically discover all directories under prompts/
216+
try:
217+
all_dirs = [d for d in os.listdir(prompts_base_path)
218+
if os.path.isdir(os.path.join(prompts_base_path, d))]
219+
except Exception as e:
220+
return (False, f"Error scanning prompts directory: {str(e)}")
221+
222+
if not all_dirs:
223+
return (False, "No subdirectories found under prompts/")
224+
225+
# Define which directory needs special GPT-specific handling
226+
# If you need to change the behavior, you only need to change this constant
227+
SPECIAL_DIR = "gpts"
228+
229+
# Track if special directory was found and processed
230+
special_dir_processed = False
215231

216232
def collect_files_recursively(dir_path, base_path=None):
217233
"""
@@ -278,8 +294,16 @@ def collect_files_recursively(dir_path, base_path=None):
278294
return result
279295

280296
def generate_gpts_toc(dir_path):
281-
"""Generate TOC.md for gpts directory using the original GPT-specific logic.
282-
The file is completely regenerated, not preserving any existing content."""
297+
"""
298+
Generate TOC.md for gpts directory using the original GPT-specific logic.
299+
The file is completely regenerated, not preserving any existing content.
300+
301+
Args:
302+
dir_path: Path to the gpts directory
303+
304+
Returns:
305+
A tuple (success, message) indicating success/failure and a descriptive message
306+
"""
283307
toc_path = os.path.join(dir_path, TOC_FILENAME)
284308
try:
285309
with open(toc_path, 'w', encoding='utf-8') as toc_file:
@@ -321,14 +345,15 @@ def gpts_sorter(key):
321345
return (False, f"Error generating TOC.md for 'gpts': {str(e)}")
322346

323347
# Process each top-level directory under prompts/
324-
for dirname in main_dirs:
348+
for dirname in sorted(all_dirs): # Sort for consistent processing order
325349
dir_path = os.path.join(prompts_base_path, dirname)
326350
if not os.path.isdir(dir_path):
327351
messages.append(f"Directory '{dirname}' does not exist, skipping")
328352
continue
329353

330354
# For gpts directory, use the original GPT-specific logic
331-
if dirname == "gpts":
355+
if dirname == SPECIAL_DIR:
356+
special_dir_processed = True
332357
ok, msg = generate_gpts_toc(dir_path)
333358
success = success and ok
334359
messages.append(msg)
@@ -398,6 +423,10 @@ def gpts_sorter(key):
398423
success = False
399424
messages.append(f"Error generating TOC.md for '{dirname}': {str(e)}")
400425

426+
# Warn if special directory was expected but not found
427+
if not special_dir_processed and SPECIAL_DIR in all_dirs:
428+
messages.append(f"Warning: Special directory '{SPECIAL_DIR}' was found but could not be processed")
429+
401430
result_message = "\n".join(messages)
402431
return (success, result_message)
403432

prompts/gpts/TOC.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# gpts
1+
# gpts - Table of Contents
22

33
## GPTs (1107 total)
44

prompts/official-product/TOC.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# official-product
1+
# official-product - Table of Contents
22

33
## Subdirectories
44

prompts/opensource-prj/TOC.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# opensource-prj
1+
# opensource-prj - Table of Contents
22

33
- [Claude_Sentience](./Claude_Sentience.md)
44
- [RestGPT](./RestGPT.md)

0 commit comments

Comments
 (0)