From 2ed27b51b22d85dd356812f521b14890b10c6e38 Mon Sep 17 00:00:00 2001 From: Michael Kantor <6068672+kantorcodes@users.noreply.github.com> Date: Sat, 4 Apr 2026 18:16:10 -0400 Subject: [PATCH 1/2] feat: support multi-plugin marketplace repos Signed-off-by: Michael Kantor <6068672+kantorcodes@users.noreply.github.com> --- README.md | 10 ++ action/README.md | 24 +-- action/action.yml | 2 +- schemas/scan-result.v1.json | 89 +++++++++-- schemas/verify-result.v1.json | 72 +++++++-- src/codex_plugin_scanner/action_runner.py | 46 ++++-- src/codex_plugin_scanner/cli.py | 36 ++++- .../marketplace_support.py | 19 ++- src/codex_plugin_scanner/models.py | 14 ++ src/codex_plugin_scanner/repo_detect.py | 137 ++++++++++++++++ src/codex_plugin_scanner/reporting.py | 51 +++++- src/codex_plugin_scanner/scanner.py | 150 ++++++++++++++---- src/codex_plugin_scanner/verification.py | 124 ++++++++++++++- .../.agents/plugins/marketplace.json | 43 +++++ .../alpha-plugin/.codex-plugin/plugin.json | 18 +++ .../plugins/alpha-plugin/.codexignore | 1 + .../plugins/alpha-plugin/LICENSE | 3 + .../plugins/alpha-plugin/README.md | 1 + .../plugins/alpha-plugin/SECURITY.md | 1 + .../alpha-plugin/skills/example/SKILL.md | 6 + .../beta-plugin/.codex-plugin/plugin.json | 6 + .../beta-plugin/skills/example/SKILL.md | 6 + tests/test_action_runner.py | 37 +++++ tests/test_cli.py | 9 ++ tests/test_scanner.py | 13 ++ tests/test_schema_contracts.py | 16 ++ tests/test_verification.py | 13 ++ 27 files changed, 849 insertions(+), 98 deletions(-) create mode 100644 src/codex_plugin_scanner/repo_detect.py create mode 100644 tests/fixtures/multi-plugin-repo/.agents/plugins/marketplace.json create mode 100644 tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/.codex-plugin/plugin.json create mode 100644 tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/.codexignore create mode 100644 tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/LICENSE create mode 100644 tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/README.md create mode 100644 tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/SECURITY.md create mode 100644 tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/skills/example/SKILL.md create mode 100644 tests/fixtures/multi-plugin-repo/plugins/beta-plugin/.codex-plugin/plugin.json create mode 100644 tests/fixtures/multi-plugin-repo/plugins/beta-plugin/skills/example/SKILL.md diff --git a/README.md b/README.md index c45f50e..5e3e004 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,8 @@ pipx run codex-plugin-scanner verify . min_score: 80 ``` +If your repository uses a Codex marketplace root like `.agents/plugins/marketplace.json`, keep `plugin_dir: "."`. The scanner will discover local `./plugins/...` entries automatically, scan each local plugin manifest, and skip remote marketplace entries instead of treating the repo root as a single plugin. + ## Use After `$plugin-creator` `codex-plugin-scanner` is designed as the quality gate between plugin creation and distribution: @@ -120,6 +122,9 @@ codex-plugin-scanner ./my-plugin --cisco-skill-scan on --cisco-policy strict # Summary scan (legacy form still works) codex-plugin-scanner scan ./my-plugin --format json --profile public-marketplace +# Scan a multi-plugin repo from the marketplace root +codex-plugin-scanner scan . --format json + # Rule-oriented lint (with optional mechanical fixes) codex-plugin-scanner lint ./my-plugin --list-rules codex-plugin-scanner lint ./my-plugin --explain README_MISSING @@ -127,6 +132,7 @@ codex-plugin-scanner lint ./my-plugin --fix --profile strict-security # Runtime readiness verification codex-plugin-scanner verify ./my-plugin --format json +codex-plugin-scanner verify . --format json codex-plugin-scanner verify ./my-plugin --online --format text # Artifact-backed submission gate @@ -148,6 +154,8 @@ The scanner follows the current Codex plugin packaging conventions more closely: `lint --fix` preserves or adds the documented `./` prefixes instead of stripping them away. +For repo-scoped marketplaces, `scan`, `lint`, `verify`, and `doctor` can target the repository root directly. `submit` remains intentionally single-plugin so the emitted artifact points at one concrete plugin package. + ## Config + Baseline Example ```toml @@ -252,6 +260,8 @@ jobs: upload_sarif: true ``` +For a multi-plugin repo, the same workflow can stay pointed at `plugin_dir: "."` as long as the repository has `.agents/plugins/marketplace.json` with local `./plugins/...` entries. + Local pre-commit style hook: ```yaml diff --git a/action/README.md b/action/README.md index 3626a8d..d3dfbc3 100644 --- a/action/README.md +++ b/action/README.md @@ -8,18 +8,20 @@ This README is intentionally root-ready for a dedicated GitHub Marketplace actio ```yaml - name: Scan Codex Plugin - uses: your-org/hol-codex-plugin-scanner-action@v1 + uses: hashgraph-online/hol-codex-plugin-scanner-action@v1 with: plugin_dir: "./my-plugin" min_score: 70 fail_on_severity: high ``` +If your repository exposes multiple plugins from `.agents/plugins/marketplace.json`, keep `plugin_dir: "."`. The action will discover local `./plugins/...` entries automatically, scan each local plugin, and skip remote marketplace entries. + ## Inputs | Input | Description | Default | |-------|-------------|---------| -| `plugin_dir` | Path to the plugin directory to scan | `.` | +| `plugin_dir` | Path to a single plugin directory or a repo marketplace root | `.` | | `mode` | Execution mode: `scan`, `lint`, `verify`, or `submit` | `scan` | | `format` | Output format: `text`, `json`, `markdown`, `sarif` | `text` | | `output` | Write report to this file path | `""` | @@ -78,7 +80,7 @@ Mode notes: ### Basic scan with minimum score gate ```yaml -- uses: your-org/hol-codex-plugin-scanner-action@v1 +- uses: hashgraph-online/hol-codex-plugin-scanner-action@v1 with: plugin_dir: "." min_score: 70 @@ -96,7 +98,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 - - uses: your-org/hol-codex-plugin-scanner-action@v1 + - uses: hashgraph-online/hol-codex-plugin-scanner-action@v1 with: plugin_dir: "." mode: scan @@ -105,10 +107,12 @@ jobs: upload_sarif: true ``` +This `plugin_dir: "."` pattern is correct for both single-plugin repositories and multi-plugin marketplace repositories. When `.agents/plugins/marketplace.json` exists, the action switches into repository mode and scans each local plugin entry declared under `./plugins/...`. + ### With Cisco skill scanning ```yaml -- uses: your-org/hol-codex-plugin-scanner-action@v1 +- uses: hashgraph-online/hol-codex-plugin-scanner-action@v1 with: plugin_dir: "." cisco_skill_scan: on @@ -120,7 +124,7 @@ The action installs the scanner with its published `cisco` extra enabled, so the ### Export registry payload for Codex ecosystem automation ```yaml -- uses: your-org/hol-codex-plugin-scanner-action@v1 +- uses: hashgraph-online/hol-codex-plugin-scanner-action@v1 id: scan with: plugin_dir: "." @@ -153,7 +157,7 @@ jobs: - name: Scan plugin and submit if eligible id: scan - uses: your-org/hol-codex-plugin-scanner-action@v1 + uses: hashgraph-online/hol-codex-plugin-scanner-action@v1 with: plugin_dir: "." min_score: 80 @@ -172,7 +176,7 @@ Use a fine-grained token with `issues:write` on `hashgraph-online/awesome-codex- ### Markdown report as PR comment ```yaml -- uses: your-org/hol-codex-plugin-scanner-action@v1 +- uses: hashgraph-online/hol-codex-plugin-scanner-action@v1 id: scan with: plugin_dir: "." @@ -214,12 +218,12 @@ The source bundle for this action lives in the main scanner repository under `ac Set `mode` to one of `scan`, `lint`, `verify`, or `submit`. ```yaml -- uses: your-org/hol-codex-plugin-scanner-action@v1 +- uses: hashgraph-online/hol-codex-plugin-scanner-action@v1 with: mode: verify plugin_dir: "." ``` -For `submit` mode, use `registry_payload_output` to control artifact path. +For `submit` mode, point `plugin_dir` at one concrete plugin directory. Repository-mode discovery is supported for `scan`, `lint`, and `verify`, but `submit` intentionally remains single-plugin. For `scan` mode, set `upload_sarif: true` to emit and upload SARIF automatically instead of wiring a separate upload step by hand. diff --git a/action/action.yml b/action/action.yml index f627ae3..9b1820d 100644 --- a/action/action.yml +++ b/action/action.yml @@ -8,7 +8,7 @@ inputs: required: false default: "scan" plugin_dir: - description: "Path to the plugin directory to scan (default: repository root)" + description: "Path to a single plugin directory or a repo marketplace root (default: repository root)" required: false default: "." format: diff --git a/schemas/scan-result.v1.json b/schemas/scan-result.v1.json index eab8ab1..5f15984 100644 --- a/schemas/scan-result.v1.json +++ b/schemas/scan-result.v1.json @@ -9,6 +9,7 @@ "profile", "policy_pass", "verify_pass", + "scope", "score", "raw_score", "effective_score", @@ -37,6 +38,10 @@ "verify_pass": { "type": "boolean" }, + "scope": { + "type": "string", + "enum": ["plugin", "repository"] + }, "score": { "type": "integer", "minimum": 0, @@ -95,6 +100,32 @@ "pluginDir": { "type": "string", "minLength": 1 + }, + "repository": { + "type": "object", + "additionalProperties": false, + "required": ["marketplaceFile", "localPluginCount"], + "properties": { + "marketplaceFile": { + "type": ["string", "null"] + }, + "localPluginCount": { + "type": "integer", + "minimum": 0 + } + } + }, + "plugins": { + "type": "array", + "items": { + "$ref": "#/$defs/pluginSummary" + } + }, + "skippedTargets": { + "type": "array", + "items": { + "$ref": "#/$defs/skippedTarget" + } } }, "$defs": { @@ -131,7 +162,6 @@ }, "findingRef": { "type": "object", - "additionalProperties": false, "required": ["ruleId", "severity", "title", "description", "source"], "properties": { "ruleId": {"type": "string", "minLength": 1}, @@ -145,16 +175,20 @@ } }, "finding": { - "allOf": [ - {"$ref": "#/$defs/findingRef"}, - { - "type": "object", - "required": ["category"], - "properties": { - "category": {"type": "string", "minLength": 1} - } - } - ] + "type": "object", + "additionalProperties": false, + "required": ["ruleId", "severity", "title", "description", "source", "category"], + "properties": { + "ruleId": {"type": "string", "minLength": 1}, + "severity": {"$ref": "#/$defs/severity"}, + "title": {"type": "string", "minLength": 1}, + "description": {"type": "string", "minLength": 1}, + "remediation": {"type": ["string", "null"]}, + "filePath": {"type": ["string", "null"]}, + "lineNumber": {"type": ["integer", "null"], "minimum": 1}, + "source": {"type": "string", "minLength": 1}, + "category": {"type": "string", "minLength": 1} + } }, "check": { "type": "object", @@ -185,6 +219,39 @@ "items": {"$ref": "#/$defs/check"} } } + }, + "pluginSummary": { + "type": "object", + "additionalProperties": false, + "required": ["name", "pluginDir", "score", "grade", "summary"], + "properties": { + "name": {"type": "string", "minLength": 1}, + "pluginDir": {"type": "string", "minLength": 1}, + "score": {"type": "integer", "minimum": 0, "maximum": 100}, + "grade": {"type": "string", "enum": ["A", "B", "C", "D", "F"]}, + "summary": { + "type": "object", + "additionalProperties": false, + "required": ["findings", "integrations"], + "properties": { + "findings": {"$ref": "#/$defs/severityCounts"}, + "integrations": { + "type": "array", + "items": {"$ref": "#/$defs/integration"} + } + } + } + } + }, + "skippedTarget": { + "type": "object", + "additionalProperties": false, + "required": ["name", "reason", "sourcePath"], + "properties": { + "name": {"type": "string", "minLength": 1}, + "reason": {"type": "string", "minLength": 1}, + "sourcePath": {"type": ["string", "null"]} + } } } } diff --git a/schemas/verify-result.v1.json b/schemas/verify-result.v1.json index c1ea771..05ea5f9 100644 --- a/schemas/verify-result.v1.json +++ b/schemas/verify-result.v1.json @@ -3,7 +3,7 @@ "title": "VerifyResultV1", "type": "object", "additionalProperties": false, - "required": ["verify_pass", "workspace", "cases"], + "required": ["verify_pass", "workspace", "scope", "cases"], "properties": { "verify_pass": { "type": "boolean" @@ -12,19 +12,69 @@ "type": "string", "minLength": 1 }, + "scope": { + "type": "string", + "enum": ["plugin", "repository"] + }, + "repository": { + "type": "object", + "additionalProperties": false, + "required": ["marketplaceFile", "localPluginCount"], + "properties": { + "marketplaceFile": {"type": ["string", "null"]}, + "localPluginCount": {"type": "integer", "minimum": 0} + } + }, + "plugins": { + "type": "array", + "items": { + "$ref": "#/$defs/pluginVerifySummary" + } + }, + "skippedTargets": { + "type": "array", + "items": { + "$ref": "#/$defs/skippedTarget" + } + }, "cases": { "type": "array", "items": { - "type": "object", - "additionalProperties": false, - "required": ["component", "name", "passed", "message", "classification"], - "properties": { - "component": {"type": "string", "minLength": 1}, - "name": {"type": "string", "minLength": 1}, - "passed": {"type": "boolean"}, - "message": {"type": "string"}, - "classification": {"type": "string", "minLength": 1} - } + "$ref": "#/$defs/case" + } + } + }, + "$defs": { + "case": { + "type": "object", + "additionalProperties": false, + "required": ["component", "name", "passed", "message", "classification"], + "properties": { + "component": {"type": "string", "minLength": 1}, + "name": {"type": "string", "minLength": 1}, + "passed": {"type": "boolean"}, + "message": {"type": "string"}, + "classification": {"type": "string", "minLength": 1} + } + }, + "pluginVerifySummary": { + "type": "object", + "additionalProperties": false, + "required": ["name", "workspace", "verify_pass"], + "properties": { + "name": {"type": ["string", "null"]}, + "workspace": {"type": "string", "minLength": 1}, + "verify_pass": {"type": "boolean"} + } + }, + "skippedTarget": { + "type": "object", + "additionalProperties": false, + "required": ["name", "reason", "sourcePath"], + "properties": { + "name": {"type": "string", "minLength": 1}, + "reason": {"type": "string", "minLength": 1}, + "sourcePath": {"type": ["string", "null"]} } } } diff --git a/src/codex_plugin_scanner/action_runner.py b/src/codex_plugin_scanner/action_runner.py index 8cddfd8..4c10980 100644 --- a/src/codex_plugin_scanner/action_runner.py +++ b/src/codex_plugin_scanner/action_runner.py @@ -22,7 +22,7 @@ find_existing_submission_issue, resolve_submission_metadata, ) -from .verification import verify_plugin +from .verification import build_verification_payload, verify_plugin def _parse_csv(value: str) -> tuple[str, ...]: @@ -98,20 +98,7 @@ def _render_scan_output(result, *, output_format: str, profile: str, policy_pass def _render_verify_output(verification, *, output_format: str) -> str: - payload = { - "verify_pass": verification.verify_pass, - "workspace": verification.workspace, - "cases": [ - { - "component": case.component, - "name": case.name, - "passed": case.passed, - "message": case.message, - "classification": case.classification, - } - for case in verification.cases - ], - } + payload = build_verification_payload(verification) if output_format == "json": return json.dumps(payload, indent=2) return _build_verification_text(payload) @@ -154,8 +141,16 @@ def _build_step_summary_lines( submission_issues: list[SubmissionIssue], submission_eligible: bool, verify_pass: bool | None = None, + scope: str = "plugin", + local_plugin_count: int | None = None, + skipped_target_count: int | None = None, ) -> tuple[str, ...]: lines = ["## HOL Codex Plugin Scanner", "", f"- Mode: {mode}"] + lines.append(f"- Scope: {scope}") + if local_plugin_count is not None: + lines.append(f"- Local plugins scanned: {local_plugin_count}") + if skipped_target_count is not None: + lines.append(f"- Skipped marketplace entries: {skipped_target_count}") if score: lines.append(f"- Score: {score}/100") if grade: @@ -233,6 +228,9 @@ def main() -> int: "submission_issue_numbers": "", } verify_pass_for_summary: bool | None = None + scan_scope = "plugin" + local_plugin_count: int | None = None + skipped_target_count: int | None = None if mode in {"scan", "lint", "submit"}: args = _build_scan_args( @@ -249,6 +247,10 @@ def main() -> int: args, Path(plugin_dir).resolve(), ) + scan_scope = getattr(result, "scope", "plugin") + if scan_scope == "repository": + local_plugin_count = len(result.plugin_results) + skipped_target_count = len(result.skipped_targets) rendered = "" artifact_path = "" verification = None @@ -274,6 +276,13 @@ def main() -> int: policy_pass=policy_eval.policy_pass, ) else: + if scan_scope != "plugin": + print( + "Submission mode requires a single plugin directory. " + "Point plugin_dir at one plugin instead of a repo marketplace root.", + file=sys.stderr, + ) + return 1 verification = verify_plugin(Path(plugin_dir).resolve(), online=online) artifact_path = output_path or "plugin-quality.json" artifact = build_quality_artifact( @@ -404,6 +413,10 @@ def main() -> int: elif mode == "verify": verification = verify_plugin(Path(plugin_dir).resolve(), online=online) + scan_scope = getattr(verification, "scope", "plugin") + if scan_scope == "repository": + local_plugin_count = len(verification.plugin_results) + skipped_target_count = len(verification.skipped_targets) rendered = _render_verify_output(verification, output_format=output_format) verify_pass_for_summary = verification.verify_pass if output_path: @@ -438,6 +451,9 @@ def main() -> int: submission_issues=submission_issues, submission_eligible=submission_eligible, verify_pass=verify_pass_for_summary, + scope=scan_scope, + local_plugin_count=local_plugin_count, + skipped_target_count=skipped_target_count, ), ) diff --git a/src/codex_plugin_scanner/cli.py b/src/codex_plugin_scanner/cli.py index 9f25a6a..a893a81 100644 --- a/src/codex_plugin_scanner/cli.py +++ b/src/codex_plugin_scanner/cli.py @@ -20,12 +20,32 @@ from .rules import get_rule_spec, list_rule_specs from .scanner import scan_plugin from .suppressions import apply_severity_overrides, apply_suppressions, compute_effective_score -from .verification import build_doctor_report, verify_plugin +from .verification import build_doctor_report, build_verification_payload, verify_plugin from .version import __version__ def _build_plain_text(result) -> str: - lines = [f"馃敆 Codex Plugin Scanner v{__version__}", f"Scanning: {result.plugin_dir}", ""] + if getattr(result, "scope", "plugin") == "repository": + lines = [ + f"馃敆 Codex Plugin Scanner v{__version__}", + f"Scanning repository: {result.plugin_dir}", + f"Marketplace: {result.marketplace_file or 'not found'}", + f"Local plugins scanned: {len(result.plugin_results)}", + f"Skipped marketplace entries: {len(result.skipped_targets)}", + "", + "Per-plugin scores:", + ] + for plugin in result.plugin_results: + plugin_name = plugin.plugin_name or Path(plugin.plugin_dir).name + lines.append(f" - {plugin_name}: {plugin.score}/100 ({plugin.grade})") + if result.skipped_targets: + lines += ["", "Skipped entries:"] + for skipped in result.skipped_targets: + source_path = f" [{skipped.source_path}]" if skipped.source_path else "" + lines.append(f" - {skipped.name}{source_path}: {skipped.reason}") + lines.append("") + else: + lines = [f"馃敆 Codex Plugin Scanner v{__version__}", f"Scanning: {result.plugin_dir}", ""] for category in result.categories: cat_score = sum(c.points for c in category.checks) cat_max = sum(c.max_points for c in category.checks) @@ -323,11 +343,7 @@ def _run_verify(args: argparse.Namespace) -> int: print(f'Error: "{resolved}" is not a directory.', file=sys.stderr) return 1 verification = verify_plugin(resolved, online=args.online) - payload = { - "verify_pass": verification.verify_pass, - "workspace": verification.workspace, - "cases": [asdict(case) for case in verification.cases], - } + payload = build_verification_payload(verification) if args.format == "json": print(json.dumps(payload, indent=2)) else: @@ -344,6 +360,12 @@ def _run_submit(args: argparse.Namespace) -> int: raw_result, result, profile, policy_eval, _effective_score = _scan_with_policy(args, resolved) except ConfigError: return 1 + if getattr(result, "scope", "plugin") != "plugin": + print( + "Submission requires a single plugin directory. Target one plugin path instead of a repo marketplace root.", + file=sys.stderr, + ) + return 1 verification = verify_plugin(resolved, online=args.online) min_score = args.min_score if args.min_score is not None else POLICY_PROFILES[profile].min_score if result.score < min_score or not policy_eval.policy_pass or not verification.verify_pass: diff --git a/src/codex_plugin_scanner/marketplace_support.py b/src/codex_plugin_scanner/marketplace_support.py index fd56d71..b175ca6 100644 --- a/src/codex_plugin_scanner/marketplace_support.py +++ b/src/codex_plugin_scanner/marketplace_support.py @@ -69,10 +69,12 @@ def extract_marketplace_source(plugin: dict) -> tuple[str | None, str | None]: def source_path_is_safe(context: MarketplaceContext, source_path: str) -> bool: - return is_safe_relative_path(context.marketplace_root, source_path, require_prefix=True) + return is_safe_relative_path(context.repo_root, source_path, require_prefix=True) def source_reference_is_safe(context: MarketplaceContext, source_ref: str) -> bool: + if source_ref == "local": + return True if is_remote_reference(source_ref): return True if urlparse(source_ref).scheme: @@ -84,12 +86,15 @@ def validate_marketplace_path_requirements(context: MarketplaceContext, plugin: source_ref, source_path = extract_marketplace_source(plugin) if source_ref is None: return 'missing "source.source"' - if source_path is None: - return 'missing "source.path"' - if not is_dot_relative_path(source_path): - return f'"source.path" must start with "./": {source_path}' - if not source_path_is_safe(context, source_path): - return f'"source.path" escapes the marketplace root: {source_path}' if not source_reference_is_safe(context, source_ref): return f'"source.source" is unsafe: {source_ref}' + if is_remote_reference(source_ref): + if source_path is None: + return None + elif source_path is None: + return 'missing "source.path"' + if source_path is not None and not is_dot_relative_path(source_path): + return f'"source.path" must start with "./": {source_path}' + if source_path is not None and not source_path_is_safe(context, source_path): + return f'"source.path" escapes the repository root: {source_path}' return None diff --git a/src/codex_plugin_scanner/models.py b/src/codex_plugin_scanner/models.py index 8e43dd9..d0a1dcd 100644 --- a/src/codex_plugin_scanner/models.py +++ b/src/codex_plugin_scanner/models.py @@ -80,6 +80,15 @@ class ScanOptions: cisco_policy: str = "balanced" +@dataclass(frozen=True, slots=True) +class ScanSkipTarget: + """A marketplace entry that was not scanned as a local plugin target.""" + + name: str + reason: str + source_path: str | None = None + + @dataclass(frozen=True, slots=True) class ScanResult: """Full result of scanning a plugin directory.""" @@ -92,6 +101,11 @@ class ScanResult: findings: tuple[Finding, ...] = () severity_counts: dict[str, int] = field(default_factory=dict) integrations: tuple[IntegrationResult, ...] = () + scope: str = "plugin" + plugin_name: str | None = None + plugin_results: tuple[ScanResult, ...] = () + skipped_targets: tuple[ScanSkipTarget, ...] = () + marketplace_file: str | None = None def get_grade(score: int) -> str: diff --git a/src/codex_plugin_scanner/repo_detect.py b/src/codex_plugin_scanner/repo_detect.py new file mode 100644 index 0000000..3f301db --- /dev/null +++ b/src/codex_plugin_scanner/repo_detect.py @@ -0,0 +1,137 @@ +"""Repository layout detection for plugin and marketplace roots.""" + +from __future__ import annotations + +import json +from dataclasses import dataclass +from pathlib import Path + +from .checks.manifest import load_manifest +from .marketplace_support import ( + extract_marketplace_source, + load_marketplace_context, + validate_marketplace_path_requirements, +) +from .models import ScanSkipTarget + + +@dataclass(frozen=True, slots=True) +class LocalPluginTarget: + """A local plugin discovered from a repo marketplace.""" + + name: str + plugin_dir: Path + source_path: str + + +@dataclass(frozen=True, slots=True) +class ScanDiscovery: + """Resolved scan targets for a path.""" + + scope: str + root_dir: Path + marketplace_file: Path | None = None + local_plugins: tuple[LocalPluginTarget, ...] = () + skipped_targets: tuple[ScanSkipTarget, ...] = () + + +def _manifest_name(plugin_dir: Path) -> str | None: + manifest = load_manifest(plugin_dir) + name = manifest.get("name") if isinstance(manifest, dict) else None + return name if isinstance(name, str) and name else None + + +def discover_scan_targets(target_dir: str | Path) -> ScanDiscovery: + """Detect whether a path is a single plugin or a repo marketplace root.""" + + resolved = Path(target_dir).resolve() + manifest_path = resolved / ".codex-plugin" / "plugin.json" + if manifest_path.exists(): + return ScanDiscovery( + scope="plugin", + root_dir=resolved, + local_plugins=( + LocalPluginTarget( + name=_manifest_name(resolved) or resolved.name, + plugin_dir=resolved, + source_path="./", + ), + ), + ) + + try: + context = load_marketplace_context(resolved) + except (json.JSONDecodeError, OSError, ValueError): + context = None + + if context is None: + return ScanDiscovery(scope="plugin", root_dir=resolved) + + plugins = context.payload.get("plugins") + if not isinstance(plugins, list): + return ScanDiscovery( + scope="repository", + root_dir=resolved, + marketplace_file=context.file_path, + skipped_targets=(ScanSkipTarget(name="plugins", reason="marketplace plugins array missing"),), + ) + + local_plugins: list[LocalPluginTarget] = [] + skipped_targets: list[ScanSkipTarget] = [] + for index, plugin in enumerate(plugins): + entry_name = f"plugin[{index}]" + if not isinstance(plugin, dict): + skipped_targets.append(ScanSkipTarget(name=entry_name, reason="marketplace entry is not an object")) + continue + + name = plugin.get("name") + if isinstance(name, str) and name: + entry_name = name + + issue = validate_marketplace_path_requirements(context, plugin) + if issue is not None: + source_ref, source_path = extract_marketplace_source(plugin) + if source_ref and source_ref != "local": + skipped_targets.append( + ScanSkipTarget( + name=entry_name, + reason=f"non-local marketplace source: {source_ref}", + source_path=source_path, + ) + ) + else: + skipped_targets.append(ScanSkipTarget(name=entry_name, reason=issue, source_path=source_path)) + continue + + _source_ref, source_path = extract_marketplace_source(plugin) + if source_path is None: + skipped_targets.append(ScanSkipTarget(name=entry_name, reason='missing "source.path"')) + continue + + plugin_dir = (context.repo_root / source_path).resolve() + plugin_manifest = plugin_dir / ".codex-plugin" / "plugin.json" + if not plugin_manifest.exists(): + skipped_targets.append( + ScanSkipTarget( + name=entry_name, + reason="local plugin manifest not found", + source_path=source_path, + ) + ) + continue + + local_plugins.append( + LocalPluginTarget( + name=_manifest_name(plugin_dir) or entry_name, + plugin_dir=plugin_dir, + source_path=source_path, + ) + ) + + return ScanDiscovery( + scope="repository", + root_dir=resolved, + marketplace_file=context.file_path, + local_plugins=tuple(local_plugins), + skipped_targets=tuple(skipped_targets), + ) diff --git a/src/codex_plugin_scanner/reporting.py b/src/codex_plugin_scanner/reporting.py index 0294cd9..f7cb23f 100644 --- a/src/codex_plugin_scanner/reporting.py +++ b/src/codex_plugin_scanner/reporting.py @@ -23,12 +23,13 @@ def build_json_payload( ) -> dict[str, object]: """Convert a scan result into a JSON-serializable payload.""" - return { + payload = { "schema_version": "scan-result.v1", "tool_version": __version__, "profile": profile, "policy_pass": policy_pass, "verify_pass": verify_pass, + "scope": result.scope, "score": result.score, "raw_score": result.score if raw_score is None else raw_score, "effective_score": result.score if effective_score is None else effective_score, @@ -95,6 +96,42 @@ def build_json_payload( "timestamp": result.timestamp, "pluginDir": result.plugin_dir, } + if result.scope == "repository": + payload["repository"] = { + "marketplaceFile": result.marketplace_file, + "localPluginCount": len(result.plugin_results), + } + payload["plugins"] = [ + { + "name": plugin.plugin_name or plugin.plugin_dir.rsplit("/", 1)[-1], + "pluginDir": plugin.plugin_dir, + "score": plugin.score, + "grade": plugin.grade, + "summary": { + "findings": plugin.severity_counts, + "integrations": [ + { + "name": integration.name, + "status": integration.status, + "message": integration.message, + "findingsCount": integration.findings_count, + "metadata": integration.metadata, + } + for integration in plugin.integrations + ], + }, + } + for plugin in result.plugin_results + ] + payload["skippedTargets"] = [ + { + "name": skipped.name, + "reason": skipped.reason, + "sourcePath": skipped.source_path, + } + for skipped in result.skipped_targets + ] + return payload def format_json( @@ -127,7 +164,7 @@ def format_markdown(result: ScanResult) -> str: lines = [ "# Codex Plugin Scanner Report", "", - f"- Plugin: `{result.plugin_dir}`", + f"- {'Repository' if result.scope == 'repository' else 'Plugin'}: `{result.plugin_dir}`", f"- Score: **{result.score}/100**", f"- Grade: **{result.grade} - {GRADE_LABELS.get(result.grade, 'Unknown')}**", "", @@ -137,6 +174,16 @@ def format_markdown(result: ScanResult) -> str: for severity in Severity: lines.append(f"- {severity.value.title()}: {result.severity_counts.get(severity.value, 0)}") + if result.scope == "repository": + lines += ["", "## Local Plugins", ""] + for plugin in result.plugin_results: + lines.append(f"- **{plugin.plugin_name or plugin.plugin_dir}**: {plugin.score}/100 ({plugin.grade})") + if result.skipped_targets: + lines += ["", "## Skipped Marketplace Entries", ""] + for skipped in result.skipped_targets: + source_path = f" (`{skipped.source_path}`)" if skipped.source_path else "" + lines.append(f"- **{skipped.name}**{source_path}: {skipped.reason}") + lines += ["", "## Categories", ""] for category in result.categories: category_score = sum(check.points for check in category.checks) diff --git a/src/codex_plugin_scanner/scanner.py b/src/codex_plugin_scanner/scanner.py index 2311081..821f5a6 100644 --- a/src/codex_plugin_scanner/scanner.py +++ b/src/codex_plugin_scanner/scanner.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import replace from datetime import datetime, timezone from pathlib import Path @@ -15,17 +16,18 @@ from .integrations.cisco_skill_scanner import CiscoIntegrationStatus from .models import ( CategoryResult, + CheckResult, + Finding, IntegrationResult, ScanOptions, ScanResult, build_severity_counts, get_grade, ) +from .repo_detect import LocalPluginTarget, discover_scan_targets -def _build_integration_results( - skill_security_context, -) -> tuple[IntegrationResult, ...]: +def _build_integration_results(skill_security_context) -> tuple[IntegrationResult, ...]: if skill_security_context.skip_message: return ( IntegrationResult( @@ -59,47 +61,133 @@ def _build_integration_results( ) -def scan_plugin(plugin_dir: str | Path, options: ScanOptions | None = None) -> ScanResult: - """Scan a Codex plugin directory and return a scored result. +def _score_categories(categories: tuple[CategoryResult, ...]) -> int: + earned_points = sum(check.points for category in categories for check in category.checks) + max_points = sum(check.max_points for category in categories for check in category.checks) + return 100 if max_points == 0 else round((earned_points / max_points) * 100) - Args: - plugin_dir: Path to the plugin directory to scan. - Returns: - ScanResult with score 0-100, grade A-F, and per-category breakdowns. - """ - resolved = Path(plugin_dir).resolve() - scan_options = options or ScanOptions() - skill_security_context = resolve_skill_security_context(resolved, scan_options) +def _rebase_finding(finding: Finding, plugin_dir: Path, repo_root: Path) -> Finding: + if not finding.file_path: + return finding + rebased_path = (plugin_dir / finding.file_path).resolve().relative_to(repo_root).as_posix() + return replace(finding, file_path=rebased_path) + + +def _rebase_check_result(check: CheckResult, plugin_dir: Path, repo_root: Path) -> CheckResult: + return replace( + check, + findings=tuple(_rebase_finding(finding, plugin_dir, repo_root) for finding in check.findings), + ) + + +def _rebase_plugin_result(plugin_result: ScanResult, plugin_target: LocalPluginTarget, repo_root: Path) -> ScanResult: + rebased_categories = tuple( + CategoryResult( + name=category.name, + checks=tuple(_rebase_check_result(check, plugin_target.plugin_dir, repo_root) for check in category.checks), + ) + for category in plugin_result.categories + ) + rebased_integrations = tuple( + replace(integration, name=f"{plugin_target.name} / {integration.name}") + for integration in plugin_result.integrations + ) + rebased_findings = tuple( + _rebase_finding(finding, plugin_target.plugin_dir, repo_root) for finding in plugin_result.findings + ) + return replace( + plugin_result, + categories=rebased_categories, + findings=rebased_findings, + severity_counts=build_severity_counts(rebased_findings), + integrations=rebased_integrations, + plugin_name=plugin_target.name, + ) + +def _scan_single_plugin(plugin_dir: Path, options: ScanOptions) -> ScanResult: + skill_security_context = resolve_skill_security_context(plugin_dir, options) categories: list[CategoryResult] = [ - CategoryResult(name="Manifest Validation", checks=run_manifest_checks(resolved)), - CategoryResult(name="Security", checks=run_security_checks(resolved)), - CategoryResult(name="Operational Security", checks=run_operational_security_checks(resolved)), - CategoryResult(name="Best Practices", checks=run_best_practice_checks(resolved)), - CategoryResult(name="Marketplace", checks=run_marketplace_checks(resolved)), + CategoryResult(name="Manifest Validation", checks=run_manifest_checks(plugin_dir)), + CategoryResult(name="Security", checks=run_security_checks(plugin_dir)), + CategoryResult(name="Operational Security", checks=run_operational_security_checks(plugin_dir)), + CategoryResult(name="Best Practices", checks=run_best_practice_checks(plugin_dir)), + CategoryResult(name="Marketplace", checks=run_marketplace_checks(plugin_dir)), CategoryResult( name="Skill Security", - checks=run_skill_security_checks(resolved, scan_options, skill_security_context), + checks=run_skill_security_checks(plugin_dir, options, skill_security_context), ), - CategoryResult(name="Code Quality", checks=run_code_quality_checks(resolved)), + CategoryResult(name="Code Quality", checks=run_code_quality_checks(plugin_dir)), ] - earned_points = sum(check.points for category in categories for check in category.checks) - max_points = sum(check.max_points for category in categories for check in category.checks) - score = 100 if max_points == 0 else round((earned_points / max_points) * 100) - grade = get_grade(score) + score = _score_categories(tuple(categories)) findings = tuple(finding for category in categories for check in category.checks for finding in check.findings) - severity_counts = build_severity_counts(findings) - integrations = _build_integration_results(skill_security_context) - return ScanResult( score=score, - grade=grade, + grade=get_grade(score), categories=tuple(categories), timestamp=datetime.now(timezone.utc).isoformat(), - plugin_dir=str(resolved), + plugin_dir=str(plugin_dir), findings=findings, - severity_counts=severity_counts, - integrations=integrations, + severity_counts=build_severity_counts(findings), + integrations=_build_integration_results(skill_security_context), + scope="plugin", ) + + +def _build_repository_categories( + repo_root: Path, + plugin_results: tuple[ScanResult, ...], +) -> tuple[CategoryResult, ...]: + categories: list[CategoryResult] = [ + CategoryResult(name="Repository Marketplace", checks=run_marketplace_checks(repo_root)), + CategoryResult(name="Repository Operational Security", checks=run_operational_security_checks(repo_root)), + ] + for plugin_result in plugin_results: + for category in plugin_result.categories: + if category.name in {"Marketplace", "Operational Security"}: + continue + plugin_name = plugin_result.plugin_name or Path(plugin_result.plugin_dir).name + categories.append(CategoryResult(name=f"{plugin_name} 路 {category.name}", checks=category.checks)) + return tuple(categories) + + +def _scan_repository(repo_root: Path, options: ScanOptions) -> ScanResult: + discovery = discover_scan_targets(repo_root) + plugin_results = tuple( + _rebase_plugin_result(_scan_single_plugin(target.plugin_dir, options), target, repo_root) + for target in discovery.local_plugins + ) + categories = _build_repository_categories(repo_root, plugin_results) + findings = tuple(finding for category in categories for check in category.checks for finding in check.findings) + repo_scores = [plugin.score for plugin in plugin_results] + repo_category_score = _score_categories(categories[:2]) if categories[:2] else 100 + if categories[:2]: + repo_scores.append(repo_category_score) + score = min(repo_scores) if repo_scores else 0 + return ScanResult( + score=score, + grade=get_grade(score), + categories=categories, + timestamp=datetime.now(timezone.utc).isoformat(), + plugin_dir=str(repo_root), + findings=findings, + severity_counts=build_severity_counts(findings), + integrations=tuple(integration for plugin in plugin_results for integration in plugin.integrations), + scope="repository", + plugin_results=plugin_results, + skipped_targets=discovery.skipped_targets, + marketplace_file=str(discovery.marketplace_file) if discovery.marketplace_file else None, + ) + + +def scan_plugin(plugin_dir: str | Path, options: ScanOptions | None = None) -> ScanResult: + """Scan a Codex plugin directory or repo marketplace root.""" + + resolved = Path(plugin_dir).resolve() + scan_options = options or ScanOptions() + discovery = discover_scan_targets(resolved) + if discovery.scope == "repository": + return _scan_repository(resolved, scan_options) + return _scan_single_plugin(resolved, scan_options) diff --git a/src/codex_plugin_scanner/verification.py b/src/codex_plugin_scanner/verification.py index 9a7604e..d5c2677 100644 --- a/src/codex_plugin_scanner/verification.py +++ b/src/codex_plugin_scanner/verification.py @@ -12,7 +12,7 @@ import urllib.parse import urllib.request from contextlib import suppress -from dataclasses import dataclass +from dataclasses import dataclass, replace from pathlib import Path from . import __version__ @@ -24,7 +24,9 @@ marketplace_label, validate_marketplace_path_requirements, ) +from .models import ScanSkipTarget from .path_support import is_safe_relative_path +from .repo_detect import discover_scan_targets MARKDOWN_LINK_RE = re.compile(r"\[[^]]+\]\(([^)]+)\)") INTERFACE_REQUIRED_FIELDS = ( @@ -61,6 +63,51 @@ class VerificationResult: cases: tuple[VerificationCase, ...] workspace: str traces: tuple[RuntimeTrace, ...] = () + scope: str = "plugin" + plugin_name: str | None = None + plugin_results: tuple[VerificationResult, ...] = () + skipped_targets: tuple[ScanSkipTarget, ...] = () + marketplace_file: str | None = None + + +def build_verification_payload(result: VerificationResult) -> dict[str, object]: + payload: dict[str, object] = { + "verify_pass": result.verify_pass, + "workspace": result.workspace, + "scope": result.scope, + "cases": [ + { + "component": case.component, + "name": case.name, + "passed": case.passed, + "message": case.message, + "classification": case.classification, + } + for case in result.cases + ], + } + if result.scope == "repository": + payload["repository"] = { + "marketplaceFile": result.marketplace_file, + "localPluginCount": len(result.plugin_results), + } + payload["plugins"] = [ + { + "name": plugin.plugin_name, + "workspace": plugin.workspace, + "verify_pass": plugin.verify_pass, + } + for plugin in result.plugin_results + ] + payload["skippedTargets"] = [ + { + "name": skipped.name, + "reason": skipped.reason, + "sourcePath": skipped.source_path, + } + for skipped in result.skipped_targets + ] + return payload def _read_json(path: Path) -> dict | list | None: @@ -778,8 +825,8 @@ def _check_assets(plugin_dir: Path) -> list[VerificationCase]: ] -def verify_plugin(plugin_dir: str | Path, *, online: bool = False) -> VerificationResult: - resolved = Path(plugin_dir).resolve() +def _verify_single_plugin(plugin_dir: Path, *, online: bool) -> VerificationResult: + resolved = plugin_dir.resolve() mcp_cases, traces = _check_mcp(resolved, online=online) cases: list[VerificationCase] = [ *_check_manifest(resolved), @@ -794,7 +841,78 @@ def verify_plugin(plugin_dir: str | Path, *, online: bool = False) -> Verificati cases=tuple(cases), workspace=str(resolved), traces=tuple(traces), + scope="plugin", + ) + + +def _prefixed_cases(plugin_name: str, cases: tuple[VerificationCase, ...]) -> tuple[VerificationCase, ...]: + return tuple( + VerificationCase( + component=case.component, + name=f"{plugin_name} 路 {case.name}", + passed=case.passed, + message=case.message, + classification=case.classification, + ) + for case in cases + ) + + +def _prefixed_traces(plugin_name: str, traces: tuple[RuntimeTrace, ...]) -> tuple[RuntimeTrace, ...]: + return tuple( + RuntimeTrace( + component=trace.component, + name=f"{plugin_name} 路 {trace.name}", + command=trace.command, + returncode=trace.returncode, + stdout=trace.stdout, + stderr=trace.stderr, + timed_out=trace.timed_out, + ) + for trace in traces + ) + + +def _verify_repository(repo_root: Path, *, online: bool) -> VerificationResult: + discovery = discover_scan_targets(repo_root) + marketplace_cases = tuple(_check_marketplace(repo_root)) + plugin_results = tuple( + replace( + _verify_single_plugin(target.plugin_dir, online=online), + plugin_name=target.name, + ) + for target in discovery.local_plugins + ) + prefixed_plugin_cases = tuple( + case + for plugin_result in plugin_results + for case in _prefixed_cases(plugin_result.plugin_name or "plugin", plugin_result.cases) ) + prefixed_plugin_traces = tuple( + trace + for plugin_result in plugin_results + for trace in _prefixed_traces(plugin_result.plugin_name or "plugin", plugin_result.traces) + ) + cases = marketplace_cases + prefixed_plugin_cases + verify_pass = all(case.passed for case in cases) and bool(plugin_results) + return VerificationResult( + verify_pass=verify_pass, + cases=cases, + workspace=str(repo_root), + traces=prefixed_plugin_traces, + scope="repository", + plugin_results=plugin_results, + skipped_targets=discovery.skipped_targets, + marketplace_file=str(discovery.marketplace_file) if discovery.marketplace_file else None, + ) + + +def verify_plugin(plugin_dir: str | Path, *, online: bool = False) -> VerificationResult: + resolved = Path(plugin_dir).resolve() + discovery = discover_scan_targets(resolved) + if discovery.scope == "repository": + return _verify_repository(resolved, online=online) + return _verify_single_plugin(resolved, online=online) def build_doctor_report(plugin_dir: str | Path, component: str) -> dict[str, object]: diff --git a/tests/fixtures/multi-plugin-repo/.agents/plugins/marketplace.json b/tests/fixtures/multi-plugin-repo/.agents/plugins/marketplace.json new file mode 100644 index 0000000..8175bb4 --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/.agents/plugins/marketplace.json @@ -0,0 +1,43 @@ +{ + "name": "local-example-plugins", + "interface": { + "displayName": "Local Example Plugins" + }, + "plugins": [ + { + "name": "alpha-plugin", + "source": { + "source": "local", + "path": "./plugins/alpha-plugin" + }, + "policy": { + "installation": "AVAILABLE", + "authentication": "ON_INSTALL" + }, + "category": "Productivity" + }, + { + "name": "beta-plugin", + "source": { + "source": "local", + "path": "./plugins/beta-plugin" + }, + "policy": { + "installation": "AVAILABLE", + "authentication": "ON_INSTALL" + }, + "category": "Productivity" + }, + { + "name": "remote-plugin", + "source": { + "source": "https://example.com/plugins/remote-plugin" + }, + "policy": { + "installation": "AVAILABLE", + "authentication": "ON_INSTALL" + }, + "category": "Productivity" + } + ] +} diff --git a/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/.codex-plugin/plugin.json b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/.codex-plugin/plugin.json new file mode 100644 index 0000000..4b70a42 --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/.codex-plugin/plugin.json @@ -0,0 +1,18 @@ +{ + "name": "alpha-plugin", + "version": "1.0.0", + "description": "Alpha plugin for repository-mode scanning.", + "author": "HOL", + "repository": "https://github.com/hashgraph-online/alpha-plugin", + "license": "Apache-2.0", + "skills": "./skills/", + "interface": { + "displayName": "Alpha Plugin", + "shortDescription": "Alpha plugin", + "developerName": "HOL", + "category": "Productivity", + "websiteURL": "https://hol.org", + "privacyPolicyURL": "https://hol.org/privacy", + "termsOfServiceURL": "https://hol.org/terms" + } +} diff --git a/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/.codexignore b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/.codexignore new file mode 100644 index 0000000..849ddff --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/.codexignore @@ -0,0 +1 @@ +dist/ diff --git a/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/LICENSE b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/LICENSE new file mode 100644 index 0000000..3f8673d --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/LICENSE @@ -0,0 +1,3 @@ +Apache License +Version 2.0, January 2004 +https://www.apache.org/licenses/LICENSE-2.0 diff --git a/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/README.md b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/README.md new file mode 100644 index 0000000..134ed07 --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/README.md @@ -0,0 +1 @@ +# Alpha Plugin diff --git a/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/SECURITY.md b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/SECURITY.md new file mode 100644 index 0000000..8dbb2f9 --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/SECURITY.md @@ -0,0 +1 @@ +# Security diff --git a/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/skills/example/SKILL.md b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/skills/example/SKILL.md new file mode 100644 index 0000000..fd91d43 --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/plugins/alpha-plugin/skills/example/SKILL.md @@ -0,0 +1,6 @@ +--- +name: example +description: Example repository plugin skill. +--- + +Use the alpha plugin skill. diff --git a/tests/fixtures/multi-plugin-repo/plugins/beta-plugin/.codex-plugin/plugin.json b/tests/fixtures/multi-plugin-repo/plugins/beta-plugin/.codex-plugin/plugin.json new file mode 100644 index 0000000..6710b97 --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/plugins/beta-plugin/.codex-plugin/plugin.json @@ -0,0 +1,6 @@ +{ + "name": "beta-plugin", + "version": "1.0.0", + "description": "Beta plugin with intentionally incomplete metadata.", + "skills": "./skills/" +} diff --git a/tests/fixtures/multi-plugin-repo/plugins/beta-plugin/skills/example/SKILL.md b/tests/fixtures/multi-plugin-repo/plugins/beta-plugin/skills/example/SKILL.md new file mode 100644 index 0000000..75df565 --- /dev/null +++ b/tests/fixtures/multi-plugin-repo/plugins/beta-plugin/skills/example/SKILL.md @@ -0,0 +1,6 @@ +--- +name: example +description: Example repository plugin skill. +--- + +Use the beta plugin skill. diff --git a/tests/test_action_runner.py b/tests/test_action_runner.py index e760bab..ea8329d 100644 --- a/tests/test_action_runner.py +++ b/tests/test_action_runner.py @@ -142,3 +142,40 @@ def test_action_runner_verify_mode_writes_human_report(monkeypatch, tmp_path, ca assert "mode=verify" in github_output.read_text(encoding="utf-8") assert "verify_pass=true" in github_output.read_text(encoding="utf-8") assert "Report written to" in capsys.readouterr().out + + +def test_action_runner_repository_scan_defaults_to_marketplace_root(monkeypatch, tmp_path) -> None: + output_path = tmp_path / "github-output.txt" + summary_path = tmp_path / "step-summary.md" + + monkeypatch.setenv("PLUGIN_DIR", str(FIXTURES / "multi-plugin-repo")) + monkeypatch.setenv("FORMAT", "json") + monkeypatch.setenv("OUTPUT", "") + monkeypatch.setenv("MIN_SCORE", "0") + monkeypatch.setenv("FAIL_ON", "none") + monkeypatch.setenv("CISCO_SCAN", "off") + monkeypatch.setenv("CISCO_POLICY", "balanced") + monkeypatch.setenv("SUBMISSION_ENABLED", "false") + monkeypatch.setenv("SUBMISSION_SCORE_THRESHOLD", "80") + monkeypatch.setenv("SUBMISSION_REPOS", "hashgraph-online/awesome-codex-plugins") + monkeypatch.setenv("SUBMISSION_TOKEN", "") + monkeypatch.setenv("SUBMISSION_LABELS", "plugin-submission") + monkeypatch.setenv("SUBMISSION_CATEGORY", "Community Plugins") + monkeypatch.setenv("SUBMISSION_PLUGIN_NAME", "") + monkeypatch.setenv("SUBMISSION_PLUGIN_URL", "") + monkeypatch.setenv("SUBMISSION_PLUGIN_DESCRIPTION", "") + monkeypatch.setenv("SUBMISSION_AUTHOR", "") + monkeypatch.setenv("WRITE_STEP_SUMMARY", "true") + monkeypatch.setenv("REGISTRY_PAYLOAD_OUTPUT", "") + monkeypatch.setenv("GITHUB_OUTPUT", str(output_path)) + monkeypatch.setenv("GITHUB_STEP_SUMMARY", str(summary_path)) + + exit_code = main() + + assert exit_code == 0 + summary_text = summary_path.read_text(encoding="utf-8") + assert "- Scope: repository" in summary_text + assert "- Local plugins scanned: 2" in summary_text + assert "- Skipped marketplace entries: 1" in summary_text + output_lines = output_path.read_text(encoding="utf-8").splitlines() + assert any(line.startswith("score=") for line in output_lines) diff --git a/tests/test_cli.py b/tests/test_cli.py index a380cc5..adf1630 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -289,6 +289,15 @@ def test_submit_writes_artifact(self, tmp_path): parsed = json.loads(artifact.read_text(encoding="utf-8")) assert parsed["schema_version"] == "plugin-quality.v1" + def test_scan_json_reports_repository_scope_for_marketplace_repo(self, capsys): + rc = main(["scan", str(FIXTURES / "multi-plugin-repo"), "--format", "json"]) + assert rc == 0 + parsed = json.loads(capsys.readouterr().out) + assert parsed["scope"] == "repository" + assert parsed["repository"]["localPluginCount"] == 2 + assert len(parsed["plugins"]) == 2 + assert parsed["skippedTargets"][0]["name"] == "remote-plugin" + def test_submit_blocks_on_verify_fail(self, tmp_path): artifact = tmp_path / "plugin-quality.json" rc = main(["submit", str(FIXTURES / "bad-plugin"), "--attest", str(artifact)]) diff --git a/tests/test_scanner.py b/tests/test_scanner.py index d22ab4f..68f1c7d 100644 --- a/tests/test_scanner.py +++ b/tests/test_scanner.py @@ -112,3 +112,16 @@ def test_with_marketplace_plugin(self): result = scan_plugin(FIXTURES / "with-marketplace") mp_cat = next(c for c in result.categories if c.name == "Marketplace") assert sum(c.points for c in mp_cat.checks) == 15 + + def test_marketplace_repo_scans_all_local_plugins(self): + result = scan_plugin(FIXTURES / "multi-plugin-repo") + + assert result.scope == "repository" + assert result.score < 100 + assert result.grade in {"A", "B", "C", "D", "F"} + assert len(result.plugin_results) == 2 + assert {plugin.plugin_name for plugin in result.plugin_results} == {"alpha-plugin", "beta-plugin"} + assert any(category.name.startswith("alpha-plugin 路 ") for category in result.categories) + assert any(category.name.startswith("beta-plugin 路 ") for category in result.categories) + assert any(category.name == "Repository Marketplace" for category in result.categories) + assert any(skip.name == "remote-plugin" for skip in result.skipped_targets) diff --git a/tests/test_schema_contracts.py b/tests/test_schema_contracts.py index 03d1335..ac6a36b 100644 --- a/tests/test_schema_contracts.py +++ b/tests/test_schema_contracts.py @@ -25,6 +25,14 @@ def test_scan_output_matches_schema_required_keys(capsys): validate(instance=payload, schema=schema) +def test_repository_scan_output_matches_schema_required_keys(capsys): + rc = main(["scan", str(FIXTURES / "multi-plugin-repo"), "--format", "json"]) + assert rc == 0 + payload = json.loads(capsys.readouterr().out) + schema = json.loads((ROOT / "schemas" / "scan-result.v1.json").read_text(encoding="utf-8")) + validate(instance=payload, schema=schema) + + def test_verify_output_matches_schema_required_keys(capsys): rc = main(["verify", str(FIXTURES / "good-plugin"), "--format", "json"]) assert rc == 0 @@ -33,6 +41,14 @@ def test_verify_output_matches_schema_required_keys(capsys): validate(instance=payload, schema=schema) +def test_repository_verify_output_matches_schema_required_keys(capsys): + rc = main(["verify", str(FIXTURES / "multi-plugin-repo"), "--format", "json"]) + assert rc == 1 + payload = json.loads(capsys.readouterr().out) + schema = json.loads((ROOT / "schemas" / "verify-result.v1.json").read_text(encoding="utf-8")) + validate(instance=payload, schema=schema) + + def test_submit_artifact_matches_schema_required_keys(tmp_path): artifact = tmp_path / "plugin-quality.json" rc = main(["submit", str(FIXTURES / "good-plugin"), "--attest", str(artifact)]) diff --git a/tests/test_verification.py b/tests/test_verification.py index 10fdcd5..404d749 100644 --- a/tests/test_verification.py +++ b/tests/test_verification.py @@ -29,6 +29,19 @@ def test_verify_plugin_handles_non_object_marketplace_payload(tmp_path: Path): assert any(case.component == "marketplace" and case.classification == "schema" for case in result.cases) +def test_verify_plugin_marketplace_repo_checks_all_local_plugins(): + fixtures = Path(__file__).parent / "fixtures" + result = verify_plugin(fixtures / "multi-plugin-repo") + + assert result.scope == "repository" + assert result.verify_pass is False + assert len(result.plugin_results) == 2 + assert {plugin.plugin_name for plugin in result.plugin_results} == {"alpha-plugin", "beta-plugin"} + assert any(case.name.startswith("alpha-plugin 路 ") for case in result.cases) + assert any(case.name.startswith("beta-plugin 路 ") for case in result.cases) + assert any(skip.name == "remote-plugin" for skip in result.skipped_targets) + + def test_verify_plugin_reports_real_workspace_path() -> None: result = verify_plugin(FIXTURES / "good-plugin") assert Path(result.workspace).exists() From e82267dd4a69e76bd7d5781a6b4eee56bcf83388 Mon Sep 17 00:00:00 2001 From: Michael Kantor <6068672+kantorcodes@users.noreply.github.com> Date: Sat, 4 Apr 2026 18:30:02 -0400 Subject: [PATCH 2/2] fix: handle remote marketplace entries cleanly Signed-off-by: Michael Kantor <6068672+kantorcodes@users.noreply.github.com> --- schemas/scan-result.v1.json | 1 + src/codex_plugin_scanner/repo_detect.py | 24 ++++++++++++------------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/schemas/scan-result.v1.json b/schemas/scan-result.v1.json index 5f15984..b0eab87 100644 --- a/schemas/scan-result.v1.json +++ b/schemas/scan-result.v1.json @@ -162,6 +162,7 @@ }, "findingRef": { "type": "object", + "additionalProperties": false, "required": ["ruleId", "severity", "title", "description", "source"], "properties": { "ruleId": {"type": "string", "minLength": 1}, diff --git a/src/codex_plugin_scanner/repo_detect.py b/src/codex_plugin_scanner/repo_detect.py index 3f301db..7eb669c 100644 --- a/src/codex_plugin_scanner/repo_detect.py +++ b/src/codex_plugin_scanner/repo_detect.py @@ -88,22 +88,22 @@ def discover_scan_targets(target_dir: str | Path) -> ScanDiscovery: if isinstance(name, str) and name: entry_name = name + source_ref, source_path = extract_marketplace_source(plugin) + if source_ref and source_ref != "local": + skipped_targets.append( + ScanSkipTarget( + name=entry_name, + reason=f"non-local marketplace source: {source_ref}", + source_path=source_path, + ) + ) + continue + issue = validate_marketplace_path_requirements(context, plugin) if issue is not None: - source_ref, source_path = extract_marketplace_source(plugin) - if source_ref and source_ref != "local": - skipped_targets.append( - ScanSkipTarget( - name=entry_name, - reason=f"non-local marketplace source: {source_ref}", - source_path=source_path, - ) - ) - else: - skipped_targets.append(ScanSkipTarget(name=entry_name, reason=issue, source_path=source_path)) + skipped_targets.append(ScanSkipTarget(name=entry_name, reason=issue, source_path=source_path)) continue - _source_ref, source_path = extract_marketplace_source(plugin) if source_path is None: skipped_targets.append(ScanSkipTarget(name=entry_name, reason='missing "source.path"')) continue