|
#!/usr/bin/env python3 |
|
"""Publish sanitized ilsaux documentation as multiple GitHub gists. |
|
|
|
Breaks the documentation into category-based gists to avoid GitHub's |
|
rendering limits, then creates a master TOC gist linking them all. |
|
|
|
Requires: gh CLI authenticated with gist scope. |
|
|
|
Usage: |
|
python3 scripts/ilsaux/publish-ilsaux-gists.py [--dry-run] [--delete-old] |
|
""" |
|
|
|
import json |
|
import os |
|
import re |
|
import subprocess |
|
import sys |
|
|
|
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) |
|
WORKSPACE = os.path.dirname(os.path.dirname(SCRIPT_DIR)) |
|
STAGING_DIR = "/tmp/ilsaux-gist" |
|
MANIFEST_PATH = os.path.join(SCRIPT_DIR, "gist-manifest.json") |
|
|
|
# GitHub username for constructing gist URLs |
|
GITHUB_USER = "rayvoelker" |
|
|
|
# Old single gist to optionally delete |
|
OLD_SINGLE_GIST_ID = "cce2e74ff232c461e6c6b0e9a620a24f" |
|
|
|
# Gist group definitions: (group_name, description, source_subdir, file_filter) |
|
# source_subdir is relative to STAGING_DIR |
|
# file_filter: None means all files, or a callable(filename) -> bool |
|
GIST_GROUPS = [ |
|
{ |
|
"name": "reports", |
|
"description": "ilsaux Report Documentation (52 reports) - CHPL Sierra ILS", |
|
"source_dirs": ["docs/reports"], |
|
"prefix": "reports--", |
|
}, |
|
{ |
|
"name": "modules", |
|
"description": "ilsaux Sierra:: Perl Module Documentation (16 modules) - CHPL", |
|
"source_dirs": ["docs/modules"], |
|
"prefix": "modules--", |
|
}, |
|
{ |
|
"name": "framework", |
|
"description": "ilsaux Framework & Archive Plan - CHPL Sierra ILS", |
|
"source_dirs": ["docs/framework"], |
|
"extra_files": ["docs/archive-plan.md"], |
|
"prefix": "framework--", |
|
}, |
|
{ |
|
"name": "manifests-small", |
|
"description": "ilsaux Manifests (CSVs & Text) - CHPL Sierra ILS", |
|
"source_dirs": ["docs/manifests"], |
|
"prefix": "manifests--", |
|
"exclude": {"file-manifest.csv", "script-content.json"}, |
|
}, |
|
{ |
|
"name": "manifests-large", |
|
"description": "ilsaux Large Manifests (file listing & script content) - CHPL", |
|
"source_dirs": ["docs/manifests"], |
|
"prefix": "manifests--", |
|
"only": {"file-manifest.csv", "script-content.json"}, |
|
}, |
|
{ |
|
"name": "scripts", |
|
"description": "ilsaux Documentation Generator Scripts - CHPL Sierra ILS", |
|
"source_dirs": ["scripts"], |
|
"prefix": "scripts--", |
|
}, |
|
] |
|
|
|
MASTER_TOC_PLACEHOLDER = "{{MASTER_TOC_URL}}" |
|
|
|
|
|
def run_sanitizer(): |
|
"""Run sanitize-for-gist.py and return the staging directory path.""" |
|
sanitize_script = os.path.join(SCRIPT_DIR, "sanitize-for-gist.py") |
|
result = subprocess.run( |
|
[sys.executable, sanitize_script], |
|
capture_output=True, |
|
text=True, |
|
) |
|
# Sanitizer prints status to stderr, staging dir path to stdout |
|
sys.stderr.write(result.stderr) |
|
if result.returncode != 0: |
|
print("ERROR: Sanitization failed.", file=sys.stderr) |
|
sys.exit(1) |
|
staging = result.stdout.strip() |
|
if not os.path.isdir(staging): |
|
print(f"ERROR: Staging directory not found: {staging}", file=sys.stderr) |
|
sys.exit(1) |
|
return staging |
|
|
|
|
|
def collect_group_files(group, staging_dir): |
|
"""Collect files for a gist group. Returns dict of {gist_filename: local_path}.""" |
|
files = {} |
|
prefix = group.get("prefix", "") |
|
exclude = group.get("exclude", set()) |
|
only = group.get("only", None) |
|
|
|
for src_dir in group.get("source_dirs", []): |
|
full_dir = os.path.join(staging_dir, src_dir) |
|
if not os.path.isdir(full_dir): |
|
print(f" WARNING: {full_dir} not found", file=sys.stderr) |
|
continue |
|
for name in sorted(os.listdir(full_dir)): |
|
path = os.path.join(full_dir, name) |
|
if not os.path.isfile(path): |
|
continue |
|
if only is not None and name not in only: |
|
continue |
|
if name in exclude: |
|
continue |
|
gist_name = f"{prefix}{name}" |
|
files[gist_name] = path |
|
|
|
# Extra individual files |
|
for extra in group.get("extra_files", []): |
|
path = os.path.join(staging_dir, extra) |
|
if os.path.isfile(path): |
|
gist_name = f"{prefix}{os.path.basename(path)}" |
|
files[gist_name] = path |
|
|
|
return files |
|
|
|
|
|
def filename_to_anchor(filename): |
|
"""Convert a gist filename to GitHub's anchor slug format. |
|
|
|
GitHub gist anchors: lowercase, dots become hyphens, collapse multiple hyphens. |
|
Format: #file-{slug} |
|
""" |
|
slug = filename.lower() |
|
slug = slug.replace(".", "-") |
|
# Collapse consecutive hyphens |
|
slug = re.sub(r"-+", "-", slug) |
|
slug = slug.strip("-") |
|
return f"#file-{slug}" |
|
|
|
|
|
def gist_file_url(gist_url, filename): |
|
"""Build a deep link URL to a specific file in a gist.""" |
|
anchor = filename_to_anchor(filename) |
|
return f"{gist_url}{anchor}" |
|
|
|
|
|
def generate_group_readme(group, files, master_toc_url=None): |
|
"""Generate a 00-README.md for a sub-gist group.""" |
|
toc_link = master_toc_url or MASTER_TOC_PLACEHOLDER |
|
name = group["name"] |
|
desc = group["description"] |
|
|
|
lines = [ |
|
f"# {desc}", |
|
"", |
|
f"**Category:** {name}", |
|
f"**Files:** {len(files)}", |
|
f"**Master Index:** [{toc_link}]({toc_link})", |
|
"", |
|
"---", |
|
"", |
|
"## Files in This Gist", |
|
"", |
|
] |
|
|
|
for fname in sorted(files.keys()): |
|
anchor = filename_to_anchor(fname) |
|
lines.append(f"- [{fname}]({anchor})") |
|
|
|
lines.append("") |
|
return "\n".join(lines) |
|
|
|
|
|
def load_manifest(): |
|
"""Load existing gist manifest, or return empty structure.""" |
|
if os.path.exists(MANIFEST_PATH): |
|
with open(MANIFEST_PATH) as f: |
|
return json.load(f) |
|
return {"version": 1, "groups": {}, "old_single_gist_id": OLD_SINGLE_GIST_ID} |
|
|
|
|
|
def save_manifest(manifest): |
|
"""Save gist manifest to disk.""" |
|
with open(MANIFEST_PATH, "w") as f: |
|
json.dump(manifest, f, indent=2) |
|
f.write("\n") |
|
|
|
|
|
def gist_exists(gist_id): |
|
"""Check if a gist exists (returns True/False).""" |
|
result = subprocess.run( |
|
["gh", "api", f"/gists/{gist_id}", "--silent"], |
|
capture_output=True, |
|
) |
|
return result.returncode == 0 |
|
|
|
|
|
def create_gist(files_dict, description, dry_run=False): |
|
"""Create a new public gist. Returns (gist_id, gist_url).""" |
|
if dry_run: |
|
print(f" [DRY RUN] Would create gist: {description}", file=sys.stderr) |
|
print(f" [DRY RUN] Files: {len(files_dict)}", file=sys.stderr) |
|
return "dry-run-id", "https://gist.github.com/dry-run" |
|
|
|
# Write files to a temp directory for gh gist create |
|
import tempfile |
|
with tempfile.TemporaryDirectory() as tmpdir: |
|
paths = [] |
|
for gist_name, content_or_path in files_dict.items(): |
|
dest = os.path.join(tmpdir, gist_name) |
|
if isinstance(content_or_path, str) and os.path.isfile(content_or_path): |
|
# It's a file path, copy it |
|
import shutil |
|
shutil.copy2(content_or_path, dest) |
|
else: |
|
# It's content string |
|
with open(dest, "w") as f: |
|
f.write(content_or_path) |
|
paths.append(dest) |
|
|
|
result = subprocess.run( |
|
["gh", "gist", "create", "--public", "--desc", description] + paths, |
|
capture_output=True, |
|
text=True, |
|
) |
|
|
|
if result.returncode != 0: |
|
print(f"ERROR: gh gist create failed: {result.stderr}", file=sys.stderr) |
|
return None, None |
|
|
|
gist_url = result.stdout.strip() |
|
# Extract gist ID from URL |
|
gist_id = gist_url.rstrip("/").split("/")[-1] |
|
return gist_id, gist_url |
|
|
|
|
|
def update_gist(gist_id, files_dict, description=None, dry_run=False): |
|
"""Update an existing gist with new file contents. Returns success bool.""" |
|
if dry_run: |
|
print(f" [DRY RUN] Would update gist {gist_id}", file=sys.stderr) |
|
print(f" [DRY RUN] Files: {len(files_dict)}", file=sys.stderr) |
|
return True |
|
|
|
# Build the JSON payload for gh api PATCH |
|
payload = {"files": {}} |
|
if description: |
|
payload["description"] = description |
|
|
|
for gist_name, content_or_path in files_dict.items(): |
|
if isinstance(content_or_path, str) and os.path.isfile(content_or_path): |
|
with open(content_or_path, "r", errors="replace") as f: |
|
content = f.read() |
|
else: |
|
content = content_or_path |
|
payload["files"][gist_name] = {"content": content} |
|
|
|
result = subprocess.run( |
|
["gh", "api", "--method", "PATCH", f"/gists/{gist_id}", "--input", "-"], |
|
input=json.dumps(payload), |
|
capture_output=True, |
|
text=True, |
|
) |
|
|
|
if result.returncode != 0: |
|
print(f"ERROR: Failed to update gist {gist_id}: {result.stderr}", file=sys.stderr) |
|
return False |
|
|
|
return True |
|
|
|
|
|
def create_or_update_gist(gist_id, files_dict, description, dry_run=False): |
|
"""Create or update a gist. Returns (gist_id, gist_url, created_new).""" |
|
if gist_id and gist_exists(gist_id): |
|
ok = update_gist(gist_id, files_dict, description, dry_run) |
|
if ok: |
|
url = f"https://gist.github.com/{GITHUB_USER}/{gist_id}" |
|
return gist_id, url, False |
|
else: |
|
print(f" Update failed for {gist_id}, will recreate", file=sys.stderr) |
|
|
|
# Create new |
|
new_id, new_url = create_gist(files_dict, description, dry_run) |
|
return new_id, new_url, True |
|
|
|
|
|
def generate_master_toc(group_data, staging_dir): |
|
"""Generate the master 00-INDEX.md with real gist URLs and deep links.""" |
|
# Read the original 00-INDEX.md as a base for content |
|
original_index = os.path.join(staging_dir, "docs/00-INDEX.md") |
|
if not os.path.exists(original_index): |
|
print("ERROR: docs/00-INDEX.md not found in staging", file=sys.stderr) |
|
sys.exit(1) |
|
|
|
with open(original_index) as f: |
|
original_content = f.read() |
|
|
|
# Now rewrite the index with multi-gist links |
|
lines = [] |
|
lines.append("# ilsaux -- ILS Auxiliary Server Documentation") |
|
lines.append("") |
|
lines.append("**System:** Sierra ILS report automation server at Cincinnati & Hamilton County Public Library (CHPL)") |
|
lines.append("**Contents:** 94 documentation files covering 51 reports, 16 Perl modules, cron framework, and migration plan") |
|
lines.append("**Credentials:** All sensitive values replaced with `[REDACTED-*]` markers (see bottom of this file)") |
|
lines.append("") |
|
lines.append("---") |
|
lines.append("") |
|
lines.append("## Documentation Gists") |
|
lines.append("") |
|
lines.append("This documentation is split across multiple gists to stay within GitHub's rendering limits.") |
|
lines.append("") |
|
lines.append("| Category | Files | Description | Link |") |
|
lines.append("|----------|-------|-------------|------|") |
|
|
|
group_order = ["reports", "modules", "framework", "manifests-small", "manifests-large", "scripts"] |
|
group_labels = { |
|
"reports": ("Reports", "52 report docs (51 reports + template)"), |
|
"modules": ("Modules", "16 Sierra:: Perl module docs"), |
|
"framework": ("Framework", "Cron framework, config format, archive plan"), |
|
"manifests-small": ("Manifests (Small)", "7 renderable CSVs and text files"), |
|
"manifests-large": ("Manifests (Large)", "Full file listing + script content JSON"), |
|
"scripts": ("Scripts", "13 Python/bash generator scripts"), |
|
} |
|
|
|
for gname in group_order: |
|
gd = group_data[gname] |
|
label, desc = group_labels[gname] |
|
url = gd["gist_url"] |
|
count = gd["file_count"] |
|
lines.append(f"| **{label}** | {count} | {desc} | [View gist]({url}) |") |
|
|
|
lines.append("") |
|
lines.append("---") |
|
lines.append("") |
|
|
|
# Extract and rewrite the report tables with deep links into the reports gist |
|
reports_url = group_data["reports"]["gist_url"] |
|
modules_url = group_data["modules"]["gist_url"] |
|
framework_url = group_data["framework"]["gist_url"] |
|
manifests_small_url = group_data["manifests-small"]["gist_url"] |
|
manifests_large_url = group_data["manifests-large"]["gist_url"] |
|
scripts_url = group_data["scripts"]["gist_url"] |
|
|
|
# Rewrite the Active Reports section with deep links |
|
lines.append("## Active Reports -- Quick Reference") |
|
lines.append("") |
|
lines.append("### Shelf-List Reports (6 -- HIGH PRIORITY)") |
|
lines.append("") |
|
lines.append("These are the highest-value reports, actively used for collection management.") |
|
lines.append("") |
|
lines.append("| Report | Full Name | Schedule | Last Run | Doc File |") |
|
lines.append("|--------|-----------|----------|----------|----------|") |
|
|
|
# Parse original content for report tables |
|
# We'll extract from the original and rewrite links |
|
_rewrite_report_tables(original_content, lines, reports_url) |
|
|
|
lines.append("") |
|
lines.append("---") |
|
lines.append("") |
|
|
|
# Sierra:: Modules section |
|
lines.append("## Sierra:: Modules (16)") |
|
lines.append("") |
|
lines.append("Custom Perl modules in `Modules/Sierra/` providing database access, location mapping, and ILS integration.") |
|
lines.append("") |
|
lines.append("| Module | Purpose | Used By | Doc File |") |
|
lines.append("|--------|---------|---------|----------|") |
|
|
|
_rewrite_module_table(original_content, lines, modules_url) |
|
|
|
lines.append("") |
|
lines.append("---") |
|
lines.append("") |
|
|
|
# Manifest files section |
|
lines.append("## Manifest Files (9)") |
|
lines.append("") |
|
lines.append("Machine-readable data files generated by the analysis scripts.") |
|
lines.append("") |
|
lines.append("| File | Format | Contents | Gist |") |
|
lines.append("|------|--------|----------|------|") |
|
|
|
_rewrite_manifest_table(original_content, lines, manifests_small_url, manifests_large_url) |
|
|
|
lines.append("") |
|
lines.append("---") |
|
lines.append("") |
|
|
|
# Scripts section |
|
lines.append("## Scripts (13)") |
|
lines.append("") |
|
lines.append("Python scripts (stdlib-only, rerunnable) that generated this documentation from the live ilsaux server.") |
|
lines.append("") |
|
|
|
_rewrite_scripts_section(original_content, lines, scripts_url) |
|
|
|
lines.append("") |
|
lines.append("---") |
|
lines.append("") |
|
|
|
# Framework section |
|
lines.append("## Framework & Architecture") |
|
lines.append("") |
|
lines.append("All reports follow the same execution pattern through `generic-cron.sh`:") |
|
lines.append("") |
|
lines.append("```") |
|
lines.append("cron schedule -> <report>-cron.sh -> generic-cron.sh -> perl ./$SOURCEFILE -> Sierra::DB -> PostgreSQL") |
|
lines.append("```") |
|
lines.append("") |
|
fw_cron = gist_file_url(framework_url, "framework--generic-cron-framework.md") |
|
fw_cfg = gist_file_url(framework_url, "framework--config-file-format.md") |
|
lines.append(f"See [generic-cron-framework.md]({fw_cron}) for the full execution flow.") |
|
lines.append(f"See [config-file-format.md]({fw_cfg}) for the Config::Simple `.cfg` credential format.") |
|
lines.append("") |
|
lines.append("---") |
|
lines.append("") |
|
|
|
# Migration section |
|
lines.append("## Migration & Archive Plan") |
|
lines.append("") |
|
archive_link = gist_file_url(framework_url, "framework--archive-plan.md") |
|
lines.append(f"See [archive-plan.md]({archive_link}) for the full classification and migration priorities.") |
|
lines.append("") |
|
lines.append("**Key numbers:**") |
|
lines.append("- **6** active-critical shelf-list reports (migrate first)") |
|
lines.append("- **16** other active reports (evaluate for migration)") |
|
lines.append("- **2** inactive-recent (review with stakeholders)") |
|
lines.append("- **27** obsolete (archive as historical record)") |
|
lines.append("- **49.6 GB** total server size; 65.9% is active report data") |
|
lines.append("- **316** credential references that need rotation before any migration") |
|
lines.append("") |
|
lines.append("---") |
|
lines.append("") |
|
|
|
# Credential safety note |
|
lines.append("## Credential Safety Note") |
|
lines.append("") |
|
sanitize_link = gist_file_url(scripts_url, "scripts--sanitize-for-gist.py") |
|
lines.append(f"All sensitive values in these gists have been replaced by the sanitizer ([sanitize-for-gist.py]({sanitize_link})):") |
|
lines.append("") |
|
lines.append("| Marker | Meaning |") |
|
lines.append("|--------|---------|") |
|
lines.append("| `[REDACTED-PASSWORD]` | Database or service password |") |
|
lines.append("| `[REDACTED-USER]` | Database username or service account |") |
|
lines.append("| `[REDACTED-HOST]` | Internal hostname or domain (*.plch.net, *.iii.com, etc.) |") |
|
lines.append("| `[REDACTED-EMAIL]` | Internal email address |") |
|
lines.append("| `[INTERNAL-HOST]` | Short internal hostname reference |") |
|
lines.append("") |
|
cred_link = gist_file_url(manifests_small_url, "manifests--credential-locations.csv") |
|
lines.append(f"The [credential-locations.csv]({cred_link}) file lists where credentials appear (file + line + type) but contains **no actual credential values**.") |
|
|
|
return "\n".join(lines) + "\n" |
|
|
|
|
|
def _rewrite_report_tables(original, lines, reports_url): |
|
"""Extract report table rows from original and rewrite links.""" |
|
# Match table rows like: | slitemdata | Item Data ... | ... | [reports--slitemdata.md](...) | |
|
pattern = re.compile( |
|
r"^\| (\S+) \| (.+?) \| (.+?) \| (.+?) \| \[reports--(\S+?)\]\([^)]*\) \|$", |
|
re.MULTILINE, |
|
) |
|
|
|
in_shelf = True # First table is shelf-list |
|
found_other = False |
|
|
|
for m in pattern.finditer(original): |
|
report, full_name, schedule, last_run, filename = m.groups() |
|
gist_fname = f"reports--{filename}" |
|
deep_link = gist_file_url(reports_url, gist_fname) |
|
|
|
# Detect transition from shelf-list to other reports |
|
line_pos = m.start() |
|
preceding = original[max(0, line_pos - 200):line_pos] |
|
if "Other Active Reports" in preceding and not found_other: |
|
found_other = True |
|
in_shelf = False |
|
lines.append("") |
|
lines.append("### Other Active Reports (16)") |
|
lines.append("") |
|
lines.append("| Report | Full Name | Schedule | Last Run | Doc File |") |
|
lines.append("|--------|-----------|----------|----------|----------|") |
|
|
|
lines.append(f"| {report} | {full_name} | {schedule} | {last_run} | [{gist_fname}]({deep_link}) |") |
|
|
|
# Add note about inactive |
|
lines.append("") |
|
archive_link = gist_file_url( |
|
# framework gist has archive-plan |
|
reports_url.replace(reports_url.split("/")[-1], ""), # won't work, use group_data |
|
"framework--archive-plan.md", |
|
) |
|
lines.append("**Inactive and obsolete reports** (2 inactive-recent + 27 obsolete) are classified in the Framework gist.") |
|
|
|
|
|
def _rewrite_module_table(original, lines, modules_url): |
|
"""Extract module table rows and rewrite links.""" |
|
pattern = re.compile( |
|
r"^\| (Sierra::\S+) \| (.+?) \| (.+?) \| \[modules--(\S+?)\]\([^)]*\) \|$", |
|
re.MULTILINE, |
|
) |
|
for m in pattern.finditer(original): |
|
module, purpose, used_by, filename = m.groups() |
|
gist_fname = f"modules--{filename}" |
|
deep_link = gist_file_url(modules_url, gist_fname) |
|
lines.append(f"| {module} | {purpose} | {used_by} | [{gist_fname}]({deep_link}) |") |
|
|
|
|
|
def _rewrite_manifest_table(original, lines, small_url, large_url): |
|
"""Extract manifest table rows and rewrite links.""" |
|
pattern = re.compile( |
|
r"^\| \[manifests--(\S+?)\]\([^)]*\) \| (\S+) \| (.+?) \|$", |
|
re.MULTILINE, |
|
) |
|
|
|
large_files = {"file-manifest.csv", "script-content.json"} |
|
|
|
for m in pattern.finditer(original): |
|
filename, fmt, contents = m.groups() |
|
gist_fname = f"manifests--{filename}" |
|
url = large_url if filename in large_files else small_url |
|
deep_link = gist_file_url(url, gist_fname) |
|
gist_label = "Large" if filename in large_files else "Small" |
|
lines.append(f"| [{gist_fname}]({deep_link}) | {fmt} | {contents} | {gist_label} |") |
|
|
|
|
|
def _rewrite_scripts_section(original, lines, scripts_url): |
|
"""Rewrite scripts section with deep links.""" |
|
# Manifest generators |
|
lines.append("### Manifest Generators (7)") |
|
lines.append("") |
|
lines.append("| Script | Output |") |
|
lines.append("|--------|--------|") |
|
|
|
script_manifest = [ |
|
("manifest-tree.py", "`file-manifest.csv`, `directory-tree.txt`"), |
|
("manifest-perl-deps.py", "`perl-dependencies.csv`"), |
|
("manifest-script-content.py", "`script-content.json`"), |
|
("manifest-cron.py", "`cron-schedule.csv`"), |
|
("manifest-report-status.py", "`report-status.csv`"), |
|
("manifest-git.py", "`git-summaries.csv`"), |
|
("manifest-summary.py", "`summary-report.txt`"), |
|
] |
|
|
|
for script, output in script_manifest: |
|
gist_fname = f"scripts--{script}" |
|
deep_link = gist_file_url(scripts_url, gist_fname) |
|
lines.append(f"| [{gist_fname}]({deep_link}) | {output} |") |
|
|
|
lines.append("") |
|
lines.append("### Documentation Generators (3)") |
|
lines.append("") |
|
lines.append("| Script | Output |") |
|
lines.append("|--------|--------|") |
|
|
|
doc_generators = [ |
|
("generate-report-docs.py", "51 report docs in `reports--*.md`"), |
|
("generate-module-docs.py", "16 module docs in `modules--*.md`"), |
|
("generate-framework-doc.py", "Framework docs"), |
|
] |
|
|
|
for script, output in doc_generators: |
|
gist_fname = f"scripts--{script}" |
|
deep_link = gist_file_url(scripts_url, gist_fname) |
|
lines.append(f"| [{gist_fname}]({deep_link}) | {output} |") |
|
|
|
lines.append("") |
|
lines.append("### Utilities (3)") |
|
lines.append("") |
|
lines.append("| Script | Purpose |") |
|
lines.append("|--------|---------|") |
|
|
|
utilities = [ |
|
("generate-archive-plan.py", "Generates archive-plan.md (migration classification and priorities)"), |
|
("sanitize-for-gist.py", "Redacts credentials and internal hostnames for safe publishing"), |
|
("publish-ilsaux-gist.sh", "Original single-gist publisher (kept for reference)"), |
|
] |
|
|
|
for script, purpose in utilities: |
|
gist_fname = f"scripts--{script}" |
|
deep_link = gist_file_url(scripts_url, gist_fname) |
|
lines.append(f"| [{gist_fname}]({deep_link}) | {purpose} |") |
|
|
|
|
|
def delete_gist(gist_id, dry_run=False): |
|
"""Delete a gist by ID.""" |
|
if dry_run: |
|
print(f" [DRY RUN] Would delete gist {gist_id}", file=sys.stderr) |
|
return True |
|
result = subprocess.run( |
|
["gh", "gist", "delete", gist_id], |
|
capture_output=True, |
|
text=True, |
|
) |
|
if result.returncode != 0: |
|
print(f"ERROR: Failed to delete gist {gist_id}: {result.stderr}", file=sys.stderr) |
|
return False |
|
return True |
|
|
|
|
|
def main(): |
|
import argparse |
|
parser = argparse.ArgumentParser(description="Publish ilsaux docs as multiple gists") |
|
parser.add_argument("--dry-run", action="store_true", help="Show what would happen without API calls") |
|
parser.add_argument("--delete-old", action="store_true", help="Delete the old single gist after publish") |
|
args = parser.parse_args() |
|
|
|
dry_run = args.dry_run |
|
|
|
print("=== ilsaux Multi-Gist Publisher ===\n") |
|
|
|
# Phase 1: Sanitize |
|
print("[1/8] Sanitizing documentation ...") |
|
staging_dir = run_sanitizer() |
|
print(f" Staging: {staging_dir}\n") |
|
|
|
# Phase 2: Group files |
|
print("[2/8] Grouping files ...") |
|
grouped = {} |
|
for group in GIST_GROUPS: |
|
name = group["name"] |
|
files = collect_group_files(group, staging_dir) |
|
grouped[name] = {"group": group, "files": files} |
|
print(f" {name}: {len(files)} files") |
|
print() |
|
|
|
# Phase 3: Generate per-gist 00-README.md (with placeholder) |
|
print("[3/8] Generating per-gist README files ...") |
|
for name, data in grouped.items(): |
|
readme_content = generate_group_readme(data["group"], data["files"]) |
|
data["files"]["00-README.md"] = readme_content |
|
print(f" {name}: 00-README.md added ({len(data['files'])} total)") |
|
print() |
|
|
|
# Phase 4: Create or update sub-gists |
|
print("[4/8] Creating/updating sub-gists ...") |
|
manifest = load_manifest() |
|
group_data = {} |
|
|
|
for name, data in grouped.items(): |
|
existing_id = manifest.get("groups", {}).get(name, {}).get("gist_id") |
|
desc = data["group"]["description"] |
|
|
|
gist_id, gist_url, created = create_or_update_gist( |
|
existing_id, data["files"], desc, dry_run |
|
) |
|
|
|
if not gist_id: |
|
print(f"ERROR: Failed to create/update gist for {name}", file=sys.stderr) |
|
sys.exit(1) |
|
|
|
action = "Created" if created else "Updated" |
|
print(f" {action} {name}: {gist_url}") |
|
|
|
group_data[name] = { |
|
"gist_id": gist_id, |
|
"gist_url": gist_url, |
|
"file_count": len(data["files"]), |
|
} |
|
print() |
|
|
|
# Phase 5: Generate master TOC |
|
print("[5/8] Generating master TOC ...") |
|
master_toc_content = generate_master_toc(group_data, staging_dir) |
|
if dry_run: |
|
print(f" [DRY RUN] Master TOC: {len(master_toc_content)} bytes") |
|
print() |
|
|
|
# Phase 6: Create or update master TOC gist |
|
print("[6/8] Creating/updating master TOC gist ...") |
|
master_id = manifest.get("master_toc_gist_id") |
|
master_files = {"00-INDEX.md": master_toc_content} |
|
master_desc = "ilsaux ILS Auxiliary Server - Master Documentation Index (CHPL)" |
|
|
|
master_gist_id, master_gist_url, master_created = create_or_update_gist( |
|
master_id, master_files, master_desc, dry_run |
|
) |
|
|
|
if not master_gist_id: |
|
print("ERROR: Failed to create/update master TOC gist", file=sys.stderr) |
|
sys.exit(1) |
|
|
|
action = "Created" if master_created else "Updated" |
|
print(f" {action} master TOC: {master_gist_url}\n") |
|
|
|
# Phase 7: Back-patch sub-gist READMEs |
|
print("[7/8] Back-patching sub-gist READMEs with master TOC URL ...") |
|
for name, data in grouped.items(): |
|
readme = generate_group_readme( |
|
data["group"], data["files"], master_toc_url=master_gist_url |
|
) |
|
gist_id = group_data[name]["gist_id"] |
|
|
|
if dry_run: |
|
print(f" [DRY RUN] Would patch {name} 00-README.md") |
|
else: |
|
ok = update_gist(gist_id, {"00-README.md": readme}, dry_run=False) |
|
if ok: |
|
print(f" Patched {name}") |
|
else: |
|
print(f" WARNING: Failed to patch {name} README", file=sys.stderr) |
|
print() |
|
|
|
# Phase 8: Save manifest |
|
print("[8/8] Saving manifest ...") |
|
manifest["version"] = 1 |
|
manifest["master_toc_gist_id"] = master_gist_id |
|
manifest["master_toc_gist_url"] = master_gist_url |
|
manifest["old_single_gist_id"] = OLD_SINGLE_GIST_ID |
|
manifest["groups"] = group_data |
|
|
|
if dry_run: |
|
print(f" [DRY RUN] Would save manifest to {MANIFEST_PATH}") |
|
print(f" [DRY RUN] Manifest content:") |
|
print(json.dumps(manifest, indent=2)) |
|
else: |
|
save_manifest(manifest) |
|
print(f" Saved: {MANIFEST_PATH}") |
|
print() |
|
|
|
# Optional: delete old gist |
|
if args.delete_old: |
|
print(f"Deleting old single gist {OLD_SINGLE_GIST_ID} ...") |
|
if delete_gist(OLD_SINGLE_GIST_ID, dry_run): |
|
print(" Deleted.") |
|
else: |
|
print(" WARNING: Could not delete old gist.", file=sys.stderr) |
|
print() |
|
|
|
# Summary |
|
print("=== DONE ===") |
|
print(f"Master TOC: {master_gist_url}") |
|
for name in ["reports", "modules", "framework", "manifests-small", "manifests-large", "scripts"]: |
|
gd = group_data[name] |
|
print(f" {name} ({gd['file_count']} files): {gd['gist_url']}") |
|
if not dry_run: |
|
print(f"\nManifest saved to: {MANIFEST_PATH}") |
|
print("Commit it with: git add scripts/ilsaux/gist-manifest.json") |
|
|
|
|
|
if __name__ == "__main__": |
|
main() |