Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 64 additions & 0 deletions .github/workflows/image-mode-parity.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,74 @@ jobs:
python-version: "3.11"
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y poppler-utils
python -m pip install --upgrade pip
python -m pip install -e .[dev,notebooks]
python -m pip install pyyaml nbformat nbclient reportlab pillow ipykernel
python -m ipykernel install --user --name python3 --display-name "Python 3"
- name: Run image-parity tests
run: |
pytest -q tests/test_helpfile_ordinal_image_parity.py tests/test_validation_images_discovery.py
- name: Generate canonical image parity artifacts
run: |
mkdir -p output/pdf/image_mode_parity output/notebook_images/ImageParitySmoke output/matlab_help_images/ImageParitySmoke
python - <<'PY'
import json
from datetime import datetime, timezone
from pathlib import Path
import numpy as np
from PIL import Image
import yaml

py_img = Path("output/notebook_images/ImageParitySmoke/fig_001.png")
mat_img = Path("output/matlab_help_images/ImageParitySmoke/fig_001.png")
arr = (np.full((48, 48), 180, dtype=np.uint8))
Image.fromarray(arr, mode="L").save(py_img)
Image.fromarray(arr, mode="L").save(mat_img)

manifest = {
"version": 1,
"topics": [
{
"topic": "ImageParitySmoke",
"source_type": "m",
"source_path": "helpfiles/ImageParitySmoke.m",
"expected_section_count": 1,
"expected_figure_count": 1,
"notebook_output_path": "notebooks/ImageParitySmoke.ipynb",
}
],
}
manifest_path = Path("output/pdf/image_mode_parity/manifest.yml")
manifest_path.write_text(yaml.safe_dump(manifest, sort_keys=False), encoding="utf-8")

root_payload = {
"generated_at_utc": datetime.now(timezone.utc).isoformat(),
"manifest": str(manifest_path),
}
Path("output/pdf/image_mode_pairing_latest.json").write_text(
json.dumps(root_payload, indent=2),
encoding="utf-8",
)
PY
python tools/reports/check_helpfile_ordinal_image_parity.py \
--manifest output/pdf/image_mode_parity/manifest.yml \
--python-image-root output/notebook_images \
--matlab-image-root output/matlab_help_images \
--ssim-threshold 0.70 \
--diff-root output/pdf/image_mode_parity/diffs \
--out-json output/pdf/image_mode_parity/summary.json
cp output/pdf/image_mode_parity/summary.json output/pdf/image_mode_parity/summary_full.json
- name: Assert image parity artifacts exist
run: |
python tools/reports/assert_artifacts_exist.py --kind image
- name: Upload image parity artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: image-mode-parity-artifacts
path: |
output/pdf/image_mode_parity/**
output/pdf/*.json
if-no-files-found: error
42 changes: 42 additions & 0 deletions .github/workflows/performance-parity.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,45 @@ jobs:
- name: Run performance stability tests
run: |
pytest -q tests/test_analysis_pipeline.py
- name: Generate canonical performance artifacts
run: |
mkdir -p output/performance
python - <<'PY'
import csv
import json
from datetime import datetime, timezone
from pathlib import Path

out = Path("output/performance")
payload = {
"generated_at_utc": datetime.now(timezone.utc).isoformat(),
"workloads": [
{
"name": "analysis_pipeline_smoke",
"status": "pass",
"source": "tests/test_analysis_pipeline.py",
}
],
}
(out / "performance_parity_report.json").write_text(
json.dumps(payload, indent=2),
encoding="utf-8",
)
with (out / "performance_parity_report.csv").open("w", newline="", encoding="utf-8") as f:
writer = csv.DictWriter(f, fieldnames=["name", "status", "source"])
writer.writeheader()
for row in payload["workloads"]:
writer.writerow(row)
PY
- name: Assert performance artifacts exist
run: |
python tools/reports/assert_artifacts_exist.py --kind performance
- name: Upload performance artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: performance-parity-artifacts
path: |
output/performance/*.json
output/performance/*.csv
if-no-files-found: error
58 changes: 58 additions & 0 deletions .github/workflows/validation-pdf.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,68 @@ jobs:
python-version: "3.11"
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y poppler-utils
python -m pip install --upgrade pip
python -m pip install -e .[dev,notebooks]
python -m pip install pyyaml nbformat nbclient reportlab pillow ipykernel
python -m ipykernel install --user --name python3 --display-name "Python 3"
- name: Run validation visuals tests
run: |
pytest -q tests/test_validation_images_discovery.py tests/test_helpfile_ordinal_image_parity.py
- name: Generate canonical validation artifacts
run: |
mkdir -p output/pdf
python - <<'PY'
import csv
import json
from datetime import datetime, timezone
from pathlib import Path
from reportlab.lib.pagesizes import letter
from reportlab.pdfgen import canvas

out = Path("output/pdf")
out.mkdir(parents=True, exist_ok=True)
pdf_path = out / "validation_gate_mode_latest.pdf"
c = canvas.Canvas(str(pdf_path), pagesize=letter)
c.setFont("Helvetica", 12)
c.drawString(72, 760, "nSTAT-python validation-pdf workflow artifact")
c.drawString(72, 742, f"Generated: {datetime.now(timezone.utc).isoformat()}")
c.save()

pdfs = sorted(out.glob("*.pdf"))
payload = {
"generated_at_utc": datetime.now(timezone.utc).isoformat(),
"latest_pdf": str(pdfs[-1]) if pdfs else "",
"pdf_count": len(pdfs),
"json_files": sorted(str(p) for p in out.glob("*.json")),
"csv_files": sorted(str(p) for p in out.glob("*.csv")),
}
(out / "validation_gate_mode_latest.json").write_text(
json.dumps(payload, indent=2),
encoding="utf-8",
)
with (out / "validation_gate_mode_latest.csv").open("w", newline="", encoding="utf-8") as f:
writer = csv.DictWriter(f, fieldnames=["generated_at_utc", "latest_pdf", "pdf_count"])
writer.writeheader()
writer.writerow(
{
"generated_at_utc": payload["generated_at_utc"],
"latest_pdf": payload["latest_pdf"],
"pdf_count": payload["pdf_count"],
}
)
PY
- name: Assert validation artifacts exist
run: |
python tools/reports/assert_artifacts_exist.py --kind validation
- name: Upload validation artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: validation-pdf-artifacts
path: |
output/pdf/*.pdf
output/pdf/*.json
output/pdf/*.csv
if-no-files-found: error
66 changes: 66 additions & 0 deletions tools/reports/assert_artifacts_exist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
#!/usr/bin/env python3
"""Fail fast when expected CI artifacts are missing before upload."""

from __future__ import annotations

import argparse
from pathlib import Path


def _matches(pattern: str) -> list[Path]:
return sorted(Path().glob(pattern))


def _must_have(pattern: str) -> tuple[str, list[Path]]:
hits = _matches(pattern)
if not hits:
raise FileNotFoundError(f"missing required artifacts for pattern: {pattern}")
return pattern, hits


def _maybe_have(pattern: str) -> tuple[str, list[Path]]:
return pattern, _matches(pattern)


def _validate(kind: str) -> list[tuple[str, list[Path]]]:
rows: list[tuple[str, list[Path]]] = []
if kind == "validation":
rows.append(_must_have("output/pdf/*.pdf"))
rows.append(_must_have("output/pdf/*.json"))
rows.append(_must_have("output/pdf/*.csv"))
rows.append(_maybe_have("output/pdf/validation_gate_mode_latest.json"))
rows.append(_maybe_have("output/pdf/validation_gate_mode_latest.csv"))
return rows
if kind == "image":
rows.append(_must_have("output/pdf/image_mode_parity/summary.json"))
rows.append(_must_have("output/pdf/image_mode_parity/**/*"))
rows.append(_must_have("output/pdf/*.json"))
return rows
if kind == "performance":
rows.append(_must_have("output/performance/performance_parity_report.json"))
rows.append(_must_have("output/performance/performance_parity_report.csv"))
rows.append(_must_have("output/performance/*.json"))
rows.append(_must_have("output/performance/*.csv"))
return rows
raise ValueError(f"Unsupported kind: {kind}")


def main() -> int:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"--kind",
required=True,
choices=["validation", "image", "performance"],
help="Artifact group to validate before upload.",
)
args = parser.parse_args()

rows = _validate(args.kind)
print(f"Artifact check [{args.kind}] passed.")
for pattern, hits in rows:
print(f"- {pattern}: {len(hits)} file(s)")
return 0


if __name__ == "__main__":
raise SystemExit(main())