From 829fed65a71b66eec51d557a9c175f21ed4399d0 Mon Sep 17 00:00:00 2001
From: Michael Kantor <6068672+kantorcodes@users.noreply.github.com>
Date: Thu, 2 Apr 2026 21:08:44 -0700
Subject: [PATCH 1/2] feat: align scanner with codex marketplace spec
Signed-off-by: Michael Kantor <6068672+kantorcodes@users.noreply.github.com>
---
README.md | 37 +-
action/README.md | 31 +-
action/action.yml | 22 ++
docs/codex-spec-alignment-todo.md | 86 +++++
docs/prd-v2-codex-spec-alignment.md | 102 +++++
src/codex_plugin_scanner/action_runner.py | 12 +
src/codex_plugin_scanner/checks/manifest.py | 118 ++----
.../checks/manifest_support.py | 61 +++
.../checks/marketplace.py | 347 ++++++++++--------
src/codex_plugin_scanner/lint_fixes.py | 70 +++-
.../marketplace_support.py | 95 +++++
src/codex_plugin_scanner/path_support.py | 46 +++
src/codex_plugin_scanner/verification.py | 197 +++++++---
.../good-plugin/.codex-plugin/plugin.json | 2 +-
.../with-marketplace/marketplace.json | 1 +
tests/test_action_bundle.py | 17 +-
tests/test_action_runner.py | 3 +
tests/test_config.py | 4 +-
tests/test_lint_fixes.py | 70 ++++
tests/test_manifest.py | 39 ++
tests/test_marketplace.py | 107 ++++++
tests/test_verification.py | 99 ++++-
22 files changed, 1248 insertions(+), 318 deletions(-)
create mode 100644 docs/codex-spec-alignment-todo.md
create mode 100644 docs/prd-v2-codex-spec-alignment.md
create mode 100644 src/codex_plugin_scanner/checks/manifest_support.py
create mode 100644 src/codex_plugin_scanner/marketplace_support.py
create mode 100644 src/codex_plugin_scanner/path_support.py
create mode 100644 tests/test_lint_fixes.py
diff --git a/README.md b/README.md
index dd829e1..cf1d3d3 100644
--- a/README.md
+++ b/README.md
@@ -89,7 +89,7 @@ The scanner evaluates only the surfaces a plugin actually exposes, then normaliz
| Security | 24 | `SECURITY.md`, `LICENSE`, hardcoded secret detection, dangerous MCP commands, MCP transport hardening, risky approval defaults |
| Operational Security | 20 | SHA-pinned GitHub Actions, `write-all`, privileged untrusted checkout patterns, Dependabot, dependency lockfiles |
| Best Practices | 15 | `README.md`, skills directory, `SKILL.md` frontmatter, committed `.env`, `.codexignore` |
-| Marketplace | 15 | `marketplace.json` validity, policy fields, safe source paths |
+| Marketplace | 15 | `.agents/plugins/marketplace.json` validity, legacy `marketplace.json` compatibility, policy fields, safe source paths |
| Skill Security | 15 | Cisco integration status, elevated skill findings, analyzability |
| Code Quality | 10 | `eval`, `new Function`, shell-injection patterns |
@@ -125,6 +125,7 @@ codex-plugin-scanner lint ./my-plugin --fix --profile strict-security
# Runtime readiness verification
codex-plugin-scanner verify ./my-plugin --format json
+codex-plugin-scanner verify ./my-plugin --online --format text
# Artifact-backed submission gate
codex-plugin-scanner submit ./my-plugin --profile public-marketplace --attest dist/plugin-quality.json
@@ -133,6 +134,18 @@ codex-plugin-scanner submit ./my-plugin --profile public-marketplace --attest di
codex-plugin-scanner doctor ./my-plugin --component mcp --bundle dist/doctor.zip
```
+## Codex Spec Alignment
+
+The scanner follows the current Codex plugin packaging conventions more closely:
+
+- local manifest paths should use `./` prefixes
+- `.agents/plugins/marketplace.json` is the preferred marketplace manifest location
+- root `marketplace.json` is still supported in compatibility mode
+- `interface` metadata no longer requires an undocumented `type` field
+- `verify` performs an MCP initialize handshake before probing declared capabilities
+
+`lint --fix` preserves or adds the documented `./` prefixes instead of stripping them away.
+
## Config + Baseline Example
```toml
@@ -217,12 +230,24 @@ The scanner currently detects or validates:
Add the scanner to a plugin repository CI job:
```yaml
-- name: Install scanner
- run: pip install codex-plugin-scanner
+permissions:
+ contents: read
+ security-events: write
-- name: Scan plugin
- run: codex-plugin-scanner ./my-plugin --fail-on-severity high --format sarif --output codex-plugin-scanner.sarif
- continue-on-error: true
+jobs:
+ scan-plugin:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+ - uses: hashgraph-online/hol-codex-plugin-scanner-action@v1
+ with:
+ plugin_dir: "."
+ mode: scan
+ profile: public-marketplace
+ min_score: 80
+ fail_on_severity: high
+ format: sarif
+ upload_sarif: true
```
Local pre-commit style hook:
diff --git a/action/README.md b/action/README.md
index a9e72a5..054f0c4 100644
--- a/action/README.md
+++ b/action/README.md
@@ -20,12 +20,15 @@ This README is intentionally root-ready for a dedicated GitHub Marketplace actio
| Input | Description | Default |
|-------|-------------|---------|
| `plugin_dir` | Path to the plugin directory to scan | `.` |
+| `mode` | Execution mode: `scan`, `lint`, `verify`, or `submit` | `scan` |
| `format` | Output format: `text`, `json`, `markdown`, `sarif` | `text` |
| `output` | Write report to this file path | `""` |
| `profile` | Policy profile: `default`, `public-marketplace`, or `strict-security` | `default` |
| `config` | Optional path to `.codex-plugin-scanner.toml` | `""` |
| `baseline` | Optional path to a baseline suppression file | `""` |
| `online` | Enable live network probing for `verify` mode | `false` |
+| `upload_sarif` | Upload the generated SARIF report to GitHub code scanning when `mode: scan` | `false` |
+| `sarif_category` | SARIF category used during GitHub code scanning upload | `codex-plugin-scanner` |
| `write_step_summary` | Write a concise markdown summary to the GitHub Actions job summary | `true` |
| `registry_payload_output` | Write a machine-readable Codex ecosystem payload JSON file for registry or awesome-list automation | `""` |
| `min_score` | Fail if score is below this threshold (0-100) | `0` |
@@ -51,6 +54,8 @@ This README is intentionally root-ready for a dedicated GitHub Marketplace actio
| `score` | Numeric score (0-100) |
| `grade` | Letter grade (A-F) |
| `grade_label` | Human-readable grade label |
+| `policy_pass` | `true` when the selected policy profile passed |
+| `verify_pass` | `true` when runtime verification passed |
| `max_severity` | Highest finding severity, or `none` |
| `findings_total` | Total number of findings across all severities |
| `report_path` | Path to the rendered report file, if `output` was set |
@@ -82,12 +87,22 @@ Mode notes:
### SARIF output for GitHub Code Scanning
```yaml
-- uses: your-org/hol-codex-plugin-scanner-action@v1
- with:
- plugin_dir: "."
- format: sarif
- output: codex-plugin-scanner.sarif
- fail_on_severity: high
+permissions:
+ contents: read
+ security-events: write
+
+jobs:
+ scan:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+ - uses: your-org/hol-codex-plugin-scanner-action@v1
+ with:
+ plugin_dir: "."
+ mode: scan
+ format: sarif
+ fail_on_severity: high
+ upload_sarif: true
```
### With Cisco skill scanning
@@ -109,7 +124,7 @@ Mode notes:
with:
plugin_dir: "."
format: sarif
- output: codex-plugin-scanner.sarif
+ upload_sarif: true
registry_payload_output: codex-plugin-registry-payload.json
- name: Show trust signals
@@ -205,3 +220,5 @@ Set `mode` to one of `scan`, `lint`, `verify`, or `submit`.
```
For `submit` mode, use `registry_payload_output` to control artifact path.
+
+For `scan` mode, set `upload_sarif: true` to emit and upload SARIF automatically instead of wiring a separate upload step by hand.
diff --git a/action/action.yml b/action/action.yml
index 0cad71f..e25d376 100644
--- a/action/action.yml
+++ b/action/action.yml
@@ -35,6 +35,14 @@ inputs:
description: "Enable live network probing for verify mode"
required: false
default: "false"
+ upload_sarif:
+ description: "Upload the generated SARIF report to GitHub code scanning. Requires security-events: write in the calling workflow."
+ required: false
+ default: "false"
+ sarif_category:
+ description: "SARIF category used when upload_sarif is enabled"
+ required: false
+ default: "codex-plugin-scanner"
write_step_summary:
description: "Write a concise markdown summary to the GitHub Actions job summary"
required: false
@@ -114,6 +122,12 @@ outputs:
grade_label:
description: "The human-readable grade label"
value: ${{ steps.scan.outputs.grade_label }}
+ policy_pass:
+ description: "Whether the selected policy profile passed"
+ value: ${{ steps.scan.outputs.policy_pass }}
+ verify_pass:
+ description: "Whether runtime verification passed"
+ value: ${{ steps.scan.outputs.verify_pass }}
max_severity:
description: "The most severe finding in the scan result, or none"
value: ${{ steps.scan.outputs.max_severity }}
@@ -184,6 +198,7 @@ runs:
CONFIG: ${{ inputs.config }}
BASELINE: ${{ inputs.baseline }}
ONLINE: ${{ inputs.online }}
+ UPLOAD_SARIF: ${{ inputs.upload_sarif }}
WRITE_STEP_SUMMARY: ${{ inputs.write_step_summary }}
REGISTRY_PAYLOAD_OUTPUT: ${{ inputs.registry_payload_output }}
MIN_SCORE: ${{ inputs.min_score }}
@@ -202,3 +217,10 @@ runs:
SUBMISSION_AUTHOR: ${{ inputs.submission_author }}
GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }}
run: python3 -m codex_plugin_scanner.action_runner
+
+ - name: Upload SARIF
+ if: ${{ inputs.upload_sarif == 'true' && inputs.mode == 'scan' && steps.scan.outputs.report_path != '' }}
+ uses: github/codeql-action/upload-sarif@51f77329afa6477de8c49fc9c7046c15b9a4e79d
+ with:
+ sarif_file: ${{ steps.scan.outputs.report_path }}
+ category: ${{ inputs.sarif_category }}
diff --git a/docs/codex-spec-alignment-todo.md b/docs/codex-spec-alignment-todo.md
new file mode 100644
index 0000000..a4e06b1
--- /dev/null
+++ b/docs/codex-spec-alignment-todo.md
@@ -0,0 +1,86 @@
+# Codex Spec Alignment Todo
+
+## Implementation workstreams
+
+### Marketplace model and compatibility
+
+- Add a shared marketplace loader that resolves the preferred path `.agents/plugins/marketplace.json` and a deprecated fallback `marketplace.json`.
+- Normalize marketplace context around two roots:
+ - repository root
+ - marketplace file parent directory
+- Replace string-only `plugin.source` handling with support for `source.source` and `source.path`.
+- Add validation helpers for:
+ - required `plugins[].policy.installation`
+ - required `plugins[].policy.authentication`
+ - required `plugins[].category`
+ - optional marketplace `interface.displayName`
+ - `source.path` `./` prefix
+ - `source.path` staying within the marketplace root
+- Keep legacy root marketplace parsing but mark it as compatibility mode in messages/findings.
+
+### Manifest and autofix alignment
+
+- Remove `interface.type` from the required interface metadata set.
+- Centralize path normalization logic so manifest and marketplace checks share the same `./`-prefixed relative-path policy.
+- Update autofix behavior to:
+ - preserve existing valid `./` prefixes
+ - add `./` for eligible local paths in plugin and marketplace JSON
+ - avoid mutating remote URLs or non-path string fields
+
+### MCP verification lifecycle
+
+- Introduce a small JSON-RPC transport helper for newline-delimited stdio MCP sessions.
+- Send `initialize` with:
+ - `protocolVersion`
+ - `capabilities`
+ - `clientInfo`
+- Parse the initialize result and, when successful, send `notifications/initialized`.
+- Probe optional capabilities with:
+ - `tools/list`
+ - `resources/list`
+ - `prompts/list`
+- Record requests and responses in runtime traces for `doctor`.
+- Preserve strict timeouts and guaranteed subprocess cleanup.
+
+### Action ergonomics
+
+- Extend `action/action.yml` inputs with:
+ - `upload_sarif`
+ - `sarif_category`
+- Extend Action outputs with:
+ - `policy_pass`
+ - `verify_pass`
+- Update `action_runner.py` to emit those outputs and write a default SARIF path when upload is requested.
+- Add a conditional `github/codeql-action/upload-sarif` step pinned by SHA.
+
+### Documentation and fixtures
+
+- Update README examples for:
+ - `.agents/plugins/marketplace.json`
+ - `lint --fix`
+ - SARIF upload usage
+- Add or update fixtures for:
+ - valid Codex marketplace repo layout
+ - legacy marketplace compatibility
+ - MCP stdio stub server handshake
+
+## Test plan
+
+- `tests/test_marketplace.py`
+ - preferred marketplace path
+ - legacy fallback
+ - `source.path` prefix and containment
+ - required category and policy fields
+- `tests/test_manifest.py`
+ - interface metadata passes without `type`
+- `tests/test_verification.py`
+ - MCP initialize + initialized
+ - capability enumeration traces
+ - doctor bundle includes real lifecycle traces
+- `tests/test_cli.py`
+ - `lint --fix` preserves or adds `./`
+- `tests/test_action_runner.py`
+ - SARIF upload path preparation
+ - `policy_pass` and `verify_pass` outputs
+- `tests/test_action_bundle.py`
+ - Action metadata and new upload inputs
diff --git a/docs/prd-v2-codex-spec-alignment.md b/docs/prd-v2-codex-spec-alignment.md
new file mode 100644
index 0000000..b380d95
--- /dev/null
+++ b/docs/prd-v2-codex-spec-alignment.md
@@ -0,0 +1,102 @@
+# PRD: v2 Codex Spec Alignment and Runtime Hardening
+
+## Summary
+
+`codex-plugin-scanner` should become the default readiness gate for Codex plugins by aligning its validation and verification logic to the current Codex plugin and marketplace conventions, hardening MCP runtime verification, and reducing GitHub Action adoption friction.
+
+This release is intentionally scoped to the highest-confidence gaps identified in [code-scanner-research.md](/Users/michaelkantor/CascadeProjects/hashgraph-online/code-scanner-research.md):
+
+- first-class support for Codex repo marketplaces at `.agents/plugins/marketplace.json`
+- path semantics that preserve documented `./`-prefixed relative paths instead of stripping them
+- manifest validation that stops requiring undocumented `interface.type`
+- MCP stdio verification that performs a lifecycle-compliant initialize flow
+- GitHub Action ergonomics for SARIF upload and policy/verification outputs
+
+## Problem
+
+The scanner already has strong policy, scoring, suppression, and CI foundations, but it still diverges from the current Codex packaging contract in a few critical ways:
+
+- marketplace validation assumes a legacy root `marketplace.json` shape instead of the Codex repo marketplace path and schema
+- safe autofix rewrites documented `./` path prefixes away
+- runtime MCP verification sends an empty `initialize` payload and does not send `notifications/initialized`
+- Action adoption still requires manual SARIF wiring even though SARIF is a first-class output
+
+Those gaps create false negatives, false positives, and trust issues for plugin authors who are following the docs correctly.
+
+## Goals
+
+- Validate Codex marketplaces at `.agents/plugins/marketplace.json` using the documented `plugins[].source.path` object shape.
+- Keep legacy root `marketplace.json` support only as a compatibility fallback with an explicit deprecation signal.
+- Preserve and autofix documented `./`-prefixed relative paths for manifest and marketplace references.
+- Perform a protocol-grade MCP stdio initialize flow and capture richer traces for `doctor`.
+- Let plugin authors opt into SARIF upload directly from the scanner Action with least-privilege guidance.
+
+## Non-goals
+
+- Replacing `$plugin-creator`
+- Adding network-on-by-default verification
+- Reworking the existing score model or registry artifact schemas beyond what is required for this spec-alignment release
+- Building a generic MCP remote inspector beyond safe reachability and stdio lifecycle verification
+
+## Users
+
+- Codex plugin authors shipping repository-local plugins
+- Teams maintaining repo-local marketplaces of plugins
+- Registry maintainers consuming scanner artifacts and SARIF
+
+## Scope
+
+### 1. Marketplace spec alignment
+
+- Default marketplace location becomes `.agents/plugins/marketplace.json`.
+- Validation accepts:
+ - `name: string`
+ - optional `interface.displayName`
+ - `plugins: []`
+ - each plugin entry with:
+ - `source: { source: string, path: string }`
+ - `policy.installation`
+ - `policy.authentication`
+ - `category`
+- `source.path` must:
+ - start with `./`
+ - resolve inside the marketplace root
+- legacy root `marketplace.json` remains supported in v2 with a compatibility warning path in validation/verification/docs.
+
+### 2. Manifest and path semantics
+
+- `interface.type` is no longer required for publishability.
+- interface asset paths and manifest-declared local paths must preserve `./` prefixes where Codex expects them.
+- autofix upgrades eligible local paths to documented `./` form instead of stripping prefixes.
+
+### 3. MCP verification hardening
+
+- stdio MCP verification sends:
+ - `initialize` with `protocolVersion`, `capabilities`, and `clientInfo`
+ - `notifications/initialized`
+ - optional capability probes for `tools/list`, `resources/list`, and `prompts/list` when declared by the server
+- traces recorded for `doctor` include the full request/response sequence and timeout classification.
+
+### 4. Action ergonomics
+
+- composite Action adds optional SARIF upload support:
+ - `upload_sarif`
+ - `sarif_category`
+- Action outputs explicitly include `policy_pass` and `verify_pass`.
+- docs provide the required `security-events: write` guidance for SARIF upload.
+
+## Acceptance criteria
+
+- A plugin repo with `.agents/plugins/marketplace.json` and `./`-prefixed `source.path` passes marketplace checks.
+- `lint --fix` never strips a valid `./` prefix from manifest or marketplace paths.
+- a manifest with a publishable `interface` object but no `interface.type` passes interface metadata checks.
+- MCP stdio verification records a successful initialize + initialized exchange against a compliant stub server.
+- `doctor --bundle` contains real stdio trace output for the MCP lifecycle exchange.
+- the GitHub Action can scan in SARIF mode and optionally upload the generated SARIF when permissions are present.
+
+## Verification plan
+
+- unit tests for marketplace parsing, path normalization, manifest interface metadata, MCP lifecycle traces, and Action runner outputs
+- CLI tests covering `lint --fix`, `verify`, and `doctor --bundle`
+- targeted Action tests for `upload_sarif`, `policy_pass`, and `verify_pass`
+- full `pytest`, `ruff check`, `ruff format --check`, and `python -m build`
diff --git a/src/codex_plugin_scanner/action_runner.py b/src/codex_plugin_scanner/action_runner.py
index 2167798..8cddfd8 100644
--- a/src/codex_plugin_scanner/action_runner.py
+++ b/src/codex_plugin_scanner/action_runner.py
@@ -183,6 +183,7 @@ def main() -> int:
output_path = _read_env("OUTPUT")
write_step_summary = _read_bool_env("WRITE_STEP_SUMMARY", default=True)
registry_payload_output = _read_env("REGISTRY_PAYLOAD_OUTPUT")
+ upload_sarif = _read_bool_env("UPLOAD_SARIF")
profile = _read_env("PROFILE", "default")
config = _read_env("CONFIG")
baseline = _read_env("BASELINE")
@@ -220,6 +221,8 @@ def main() -> int:
"score": "",
"grade": "",
"grade_label": "",
+ "policy_pass": "",
+ "verify_pass": "",
"max_severity": "",
"findings_total": "",
"report_path": "",
@@ -250,6 +253,12 @@ def main() -> int:
artifact_path = ""
verification = None
if mode == "scan":
+ if upload_sarif:
+ if output_format != "sarif":
+ print("upload_sarif requires format=sarif.", file=sys.stderr)
+ return 1
+ if not output_path:
+ output_path = "codex-plugin-scanner.sarif"
rendered = _render_scan_output(
result,
output_format=output_format,
@@ -296,6 +305,8 @@ def main() -> int:
"score": str(result.score),
"grade": result.grade,
"grade_label": GRADE_LABELS.get(result.grade, "Unknown"),
+ "policy_pass": "true" if policy_eval.policy_pass else "false",
+ "verify_pass": "true" if verification is not None and verification.verify_pass else "",
"max_severity": max_severity(result.findings).value if result.findings else "none",
"findings_total": str(sum(result.severity_counts.values())),
}
@@ -403,6 +414,7 @@ def main() -> int:
else:
print(rendered)
return_code = 1 if not verification.verify_pass else 0
+ output_values["verify_pass"] = "true" if verification.verify_pass else "false"
else:
print(f"Unsupported mode: {mode}", file=sys.stderr)
return 1
diff --git a/src/codex_plugin_scanner/checks/manifest.py b/src/codex_plugin_scanner/checks/manifest.py
index bbb51cf..be8266d 100644
--- a/src/codex_plugin_scanner/checks/manifest.py
+++ b/src/codex_plugin_scanner/checks/manifest.py
@@ -6,15 +6,22 @@
import re
from pathlib import Path
-from ..models import CheckResult, Finding, Severity
+from ..models import CheckResult, Severity
+from .manifest_support import (
+ HEX_COLOR_RE,
+ INTERFACE_ASSET_FIELDS,
+ INTERFACE_METADATA_FIELDS,
+ INTERFACE_URL_FIELDS,
+ RECOMMENDED_FIELDS,
+ interface_asset_paths,
+ is_https_url,
+ load_interface,
+ manifest_finding,
+ safe_manifest_path,
+)
SEMVER_RE = re.compile(r"^\d+\.\d+\.\d+$")
KEBAB_RE = re.compile(r"^[a-z0-9]+(-[a-z0-9]+)*$")
-RECOMMENDED_FIELDS = ("author", "homepage", "repository", "license", "keywords")
-INTERFACE_METADATA_FIELDS = ("type", "displayName", "shortDescription", "longDescription", "developerName", "category")
-INTERFACE_URL_FIELDS = ("websiteURL", "privacyPolicyURL", "termsOfServiceURL")
-INTERFACE_ASSET_FIELDS = ("composerIcon", "logo")
-HEX_COLOR_RE = re.compile(r"^#[0-9A-Fa-f]{6}$")
def load_manifest(plugin_dir: Path) -> dict | None:
@@ -27,54 +34,12 @@ def load_manifest(plugin_dir: Path) -> dict | None:
return None
-def _manifest_finding(
- rule_id: str,
- title: str,
- description: str,
- remediation: str,
- *,
- severity: Severity = Severity.MEDIUM,
-) -> Finding:
- return Finding(
- rule_id=rule_id,
- severity=severity,
- category="manifest-validation",
- title=title,
- description=description,
- remediation=remediation,
- file_path=".codex-plugin/plugin.json",
- )
-
-
-def _is_safe_relative_path(plugin_dir: Path, value: str) -> bool:
- candidate = Path(value)
- if candidate.is_absolute():
- return False
- resolved = (plugin_dir / candidate).resolve()
- try:
- resolved.relative_to(plugin_dir.resolve())
- except ValueError:
- return False
- return True
-
-
-def _load_interface(manifest: dict | None) -> dict | None:
- if manifest is None:
- return None
- interface = manifest.get("interface")
- if interface is None:
- return None
- if isinstance(interface, dict):
- return interface
- return {}
-
-
def check_plugin_json_exists(plugin_dir: Path) -> CheckResult:
exists = (plugin_dir / ".codex-plugin" / "plugin.json").exists()
findings = ()
if not exists:
findings = (
- _manifest_finding(
+ manifest_finding(
"PLUGIN_JSON_MISSING",
"plugin.json is missing",
"Codex plugins must declare .codex-plugin/plugin.json.",
@@ -104,7 +69,7 @@ def check_valid_json(plugin_dir: Path) -> CheckResult:
max_points=4,
message="plugin.json is not valid JSON",
findings=(
- _manifest_finding(
+ manifest_finding(
"PLUGIN_JSON_INVALID",
"plugin.json is invalid JSON",
"The plugin manifest could not be parsed.",
@@ -124,7 +89,7 @@ def check_required_fields(plugin_dir: Path) -> CheckResult:
max_points=5,
message="Cannot parse plugin.json",
findings=(
- _manifest_finding(
+ manifest_finding(
"PLUGIN_JSON_REQUIRED_FIELDS_UNCHECKED",
"Required fields could not be validated",
"Required manifest fields cannot be validated until plugin.json parses cleanly.",
@@ -143,7 +108,7 @@ def check_required_fields(plugin_dir: Path) -> CheckResult:
message="All required fields (name, version, description) are present.",
)
findings = tuple(
- _manifest_finding(
+ manifest_finding(
f"PLUGIN_JSON_MISSING_{field.upper()}",
f'Manifest field "{field}" is missing',
f'The manifest does not define a valid string for "{field}".',
@@ -187,7 +152,7 @@ def check_semver(plugin_dir: Path) -> CheckResult:
max_points=3,
message=f'Version "{version}" does not follow semver (expected X.Y.Z).',
findings=(
- _manifest_finding(
+ manifest_finding(
"PLUGIN_JSON_BAD_SEMVER",
"Manifest version is not semver",
f'The version "{version}" does not match the documented semver format.',
@@ -224,7 +189,7 @@ def check_kebab_case(plugin_dir: Path) -> CheckResult:
max_points=2,
message=f'Name "{name}" should be kebab-case.',
findings=(
- _manifest_finding(
+ manifest_finding(
"PLUGIN_JSON_BAD_NAME",
"Manifest name is not kebab-case",
f'The plugin name "{name}" does not follow the recommended kebab-case style.',
@@ -270,7 +235,7 @@ def check_recommended_metadata(plugin_dir: Path) -> CheckResult:
)
findings = tuple(
- _manifest_finding(
+ manifest_finding(
f"PLUGIN_JSON_RECOMMENDED_{field.upper()}",
f'Recommended field "{field}" is missing',
f'The manifest is missing the documented recommended field "{field}".',
@@ -291,7 +256,7 @@ def check_recommended_metadata(plugin_dir: Path) -> CheckResult:
def check_interface_metadata(plugin_dir: Path) -> CheckResult:
manifest = load_manifest(plugin_dir)
- interface = _load_interface(manifest)
+ interface = load_interface(manifest)
if interface is None:
return CheckResult(
name="Interface metadata complete if declared",
@@ -310,7 +275,7 @@ def check_interface_metadata(plugin_dir: Path) -> CheckResult:
max_points=3,
message="Manifest interface must be a JSON object.",
findings=(
- _manifest_finding(
+ manifest_finding(
"PLUGIN_JSON_INTERFACE_INVALID",
"Manifest interface is not a JSON object",
'The "interface" field must be an object when it is declared.',
@@ -358,7 +323,7 @@ def check_interface_metadata(plugin_dir: Path) -> CheckResult:
)
findings = tuple(
- _manifest_finding(
+ manifest_finding(
f"PLUGIN_JSON_INTERFACE_{field.upper()}",
f'Interface field "{field}" is missing or invalid',
f'The interface object is missing a valid "{field}" value.',
@@ -377,21 +342,9 @@ def check_interface_metadata(plugin_dir: Path) -> CheckResult:
)
-def _is_https_url(value: object) -> bool:
- return isinstance(value, str) and value.startswith("https://")
-
-
-def _interface_asset_paths(value: object) -> list[str]:
- if isinstance(value, str):
- return [value]
- if isinstance(value, list):
- return [item for item in value if isinstance(item, str)]
- return []
-
-
def check_interface_assets(plugin_dir: Path) -> CheckResult:
manifest = load_manifest(plugin_dir)
- interface = _load_interface(manifest)
+ interface = load_interface(manifest)
if interface is None:
return CheckResult(
name="Interface links and assets valid if declared",
@@ -410,7 +363,7 @@ def check_interface_assets(plugin_dir: Path) -> CheckResult:
max_points=3,
message="Manifest interface must be a JSON object.",
findings=(
- _manifest_finding(
+ manifest_finding(
"PLUGIN_JSON_INTERFACE_INVALID",
"Manifest interface is not a JSON object",
'The "interface" field must be an object when it is declared.',
@@ -421,24 +374,27 @@ def check_interface_assets(plugin_dir: Path) -> CheckResult:
issues: list[str] = []
for field in INTERFACE_URL_FIELDS:
- if not _is_https_url(interface.get(field)):
+ if not is_https_url(interface.get(field)):
issues.append(field)
for field in INTERFACE_ASSET_FIELDS:
value = interface.get(field)
if (
not isinstance(value, str)
- or not _is_safe_relative_path(plugin_dir, value)
+ or not safe_manifest_path(plugin_dir, value, require_exists=True)
or not (plugin_dir / value).exists()
):
issues.append(field)
- screenshots = _interface_asset_paths(interface.get("screenshots"))
+ screenshots = interface_asset_paths(interface.get("screenshots"))
if not screenshots:
issues.append("screenshots")
else:
for screenshot in screenshots:
- if not _is_safe_relative_path(plugin_dir, screenshot) or not (plugin_dir / screenshot).exists():
+ if (
+ not safe_manifest_path(plugin_dir, screenshot, require_exists=True)
+ or not (plugin_dir / screenshot).exists()
+ ):
issues.append("screenshots")
break
@@ -452,7 +408,7 @@ def check_interface_assets(plugin_dir: Path) -> CheckResult:
)
findings = tuple(
- _manifest_finding(
+ manifest_finding(
f"PLUGIN_JSON_INTERFACE_ASSET_{field.upper()}",
f'Interface asset or URL "{field}" is invalid',
f'The interface field "{field}" must use HTTPS or point to a safe in-repo asset.',
@@ -484,13 +440,15 @@ def check_declared_paths_safe(plugin_dir: Path) -> CheckResult:
unsafe: list[str] = []
skills_path = manifest.get("skills")
- if isinstance(skills_path, str) and not _is_safe_relative_path(plugin_dir, skills_path):
+ if isinstance(skills_path, str) and not safe_manifest_path(plugin_dir, skills_path):
unsafe.append(f"skills={skills_path}")
apps = manifest.get("apps")
+ if isinstance(apps, str):
+ apps = [apps]
if isinstance(apps, list):
for app in apps:
- if isinstance(app, str) and not _is_safe_relative_path(plugin_dir, app):
+ if isinstance(app, str) and not safe_manifest_path(plugin_dir, app):
unsafe.append(f"apps={app}")
if not unsafe:
@@ -503,7 +461,7 @@ def check_declared_paths_safe(plugin_dir: Path) -> CheckResult:
)
findings = tuple(
- _manifest_finding(
+ manifest_finding(
"PLUGIN_JSON_UNSAFE_PATH",
"Manifest declares an unsafe path",
f'The manifest path "{entry}" resolves outside the plugin directory or is absolute.',
diff --git a/src/codex_plugin_scanner/checks/manifest_support.py b/src/codex_plugin_scanner/checks/manifest_support.py
new file mode 100644
index 0000000..603006d
--- /dev/null
+++ b/src/codex_plugin_scanner/checks/manifest_support.py
@@ -0,0 +1,61 @@
+"""Manifest helper constants and functions."""
+
+from __future__ import annotations
+
+import re
+from pathlib import Path
+
+from ..models import Finding, Severity
+from ..path_support import is_safe_relative_path
+
+RECOMMENDED_FIELDS = ("author", "homepage", "repository", "license", "keywords")
+INTERFACE_METADATA_FIELDS = ("displayName", "shortDescription", "longDescription", "developerName", "category")
+INTERFACE_URL_FIELDS = ("websiteURL", "privacyPolicyURL", "termsOfServiceURL")
+INTERFACE_ASSET_FIELDS = ("composerIcon", "logo")
+HEX_COLOR_RE = re.compile(r"^#[0-9A-Fa-f]{6}$")
+
+
+def manifest_finding(
+ rule_id: str,
+ title: str,
+ description: str,
+ remediation: str,
+ *,
+ severity: Severity = Severity.MEDIUM,
+) -> Finding:
+ return Finding(
+ rule_id=rule_id,
+ severity=severity,
+ category="manifest-validation",
+ title=title,
+ description=description,
+ remediation=remediation,
+ file_path=".codex-plugin/plugin.json",
+ )
+
+
+def load_interface(manifest: dict | None) -> dict | None:
+ if manifest is None:
+ return None
+ interface = manifest.get("interface")
+ if interface is None:
+ return None
+ if isinstance(interface, dict):
+ return interface
+ return {}
+
+
+def is_https_url(value: object) -> bool:
+ return isinstance(value, str) and value.startswith("https://")
+
+
+def interface_asset_paths(value: object) -> list[str]:
+ if isinstance(value, str):
+ return [value]
+ if isinstance(value, list):
+ return [item for item in value if isinstance(item, str)]
+ return []
+
+
+def safe_manifest_path(plugin_dir: Path, value: str, *, require_exists: bool = False) -> bool:
+ return is_safe_relative_path(plugin_dir, value, require_prefix=True, require_exists=require_exists)
diff --git a/src/codex_plugin_scanner/checks/marketplace.py b/src/codex_plugin_scanner/checks/marketplace.py
index b35b498..d4c8c23 100644
--- a/src/codex_plugin_scanner/checks/marketplace.py
+++ b/src/codex_plugin_scanner/checks/marketplace.py
@@ -1,193 +1,244 @@
-"""Marketplace validation checks (15 points)."""
+"""Marketplace validation checks."""
from __future__ import annotations
import json
from pathlib import Path
-from urllib.parse import urlparse
+from ..marketplace_support import (
+ extract_marketplace_source,
+ find_marketplace_file,
+ load_marketplace_context,
+ marketplace_label,
+ source_path_is_safe,
+ source_reference_is_safe,
+ validate_marketplace_path_requirements,
+)
from ..models import CheckResult, Finding, Severity
-def _is_safe_source(plugin_dir: Path, source: str) -> bool:
- if source.startswith(("https://", "git+", "github://")):
- return True
- if urlparse(source).scheme:
- return False
- candidate = Path(source)
- if candidate.is_absolute():
- return False
- resolved = (plugin_dir / candidate).resolve()
+def _marketplace_finding(rule_id: str, title: str, description: str, remediation: str, *, file_path: str) -> Finding:
+ return Finding(
+ rule_id=rule_id,
+ severity=Severity.MEDIUM,
+ category="marketplace",
+ title=title,
+ description=description,
+ remediation=remediation,
+ file_path=file_path,
+ )
+
+
+def _not_applicable_result(name: str, message: str) -> CheckResult:
+ return CheckResult(
+ name=name,
+ passed=True,
+ points=0,
+ max_points=0,
+ message=message,
+ applicable=False,
+ )
+
+
+def _load_context(plugin_dir: Path) -> tuple[object | None, str | None]:
+ marketplace_file = find_marketplace_file(plugin_dir)
+ if marketplace_file is None:
+ return None, None
+ path, _legacy = marketplace_file
try:
- resolved.relative_to(plugin_dir.resolve())
+ return load_marketplace_context(plugin_dir), str(path.relative_to(plugin_dir))
+ except json.JSONDecodeError:
+ return False, str(path.relative_to(plugin_dir))
except ValueError:
- return False
- return True
+ return False, str(path.relative_to(plugin_dir))
def check_marketplace_json(plugin_dir: Path) -> CheckResult:
- mp = plugin_dir / "marketplace.json"
- if not mp.exists():
- return CheckResult(
- name="marketplace.json valid",
- passed=True,
- points=0,
- max_points=0,
- message="No marketplace.json found, check not applicable",
- applicable=False,
- )
- try:
- data = json.loads(mp.read_text(encoding="utf-8"))
- except json.JSONDecodeError:
+ context, relative_path = _load_context(plugin_dir)
+ if context is None:
+ return _not_applicable_result("marketplace.json valid", "No marketplace manifest found, check not applicable")
+ if context is False:
+ file_path = relative_path or "marketplace.json"
return CheckResult(
name="marketplace.json valid",
passed=False,
points=0,
max_points=5,
- message="marketplace.json is not valid JSON",
+ message=f"{file_path} is not valid JSON",
findings=(
- Finding(
- rule_id="MARKETPLACE_JSON_INVALID",
- severity=Severity.MEDIUM,
- category="marketplace",
- title="marketplace.json is invalid JSON",
- description="The marketplace manifest could not be parsed.",
- remediation="Fix the JSON syntax in marketplace.json.",
- file_path="marketplace.json",
+ _marketplace_finding(
+ "MARKETPLACE_JSON_INVALID",
+ "Marketplace manifest is invalid JSON",
+ "The marketplace manifest could not be parsed.",
+ "Fix the JSON syntax in the marketplace manifest.",
+ file_path=file_path,
),
),
)
- if not data.get("name") or not isinstance(data.get("name"), str):
+ file_path = marketplace_label(context)
+ payload = context.payload
+ if not payload.get("name") or not isinstance(payload.get("name"), str):
return CheckResult(
name="marketplace.json valid",
passed=False,
points=0,
max_points=5,
- message='marketplace.json missing "name" field',
+ message=f'{file_path} missing "name" field',
findings=(
- Finding(
- rule_id="MARKETPLACE_NAME_MISSING",
- severity=Severity.LOW,
- category="marketplace",
- title='marketplace.json is missing "name"',
- description='The marketplace manifest must define a "name" field.',
- remediation='Add a string "name" field to marketplace.json.',
- file_path="marketplace.json",
+ _marketplace_finding(
+ "MARKETPLACE_NAME_MISSING",
+ "Marketplace name is missing",
+ 'The marketplace manifest must define a "name" field.',
+ 'Add a string "name" field to the marketplace manifest.',
+ file_path=file_path,
),
),
)
- if not isinstance(data.get("plugins"), list):
+
+ plugins = payload.get("plugins")
+ if not isinstance(plugins, list):
return CheckResult(
name="marketplace.json valid",
passed=False,
points=0,
max_points=5,
- message='marketplace.json missing "plugins" array',
+ message=f'{file_path} missing "plugins" array',
findings=(
- Finding(
- rule_id="MARKETPLACE_PLUGINS_MISSING",
- severity=Severity.MEDIUM,
- category="marketplace",
- title='marketplace.json is missing the "plugins" array',
- description='The marketplace manifest must declare its plugin list in a "plugins" array.',
- remediation='Add a "plugins" array to marketplace.json.',
- file_path="marketplace.json",
+ _marketplace_finding(
+ "MARKETPLACE_PLUGINS_MISSING",
+ "Marketplace plugins array is missing",
+ 'The marketplace manifest must declare its plugin list in a "plugins" array.',
+ 'Add a "plugins" array to the marketplace manifest.',
+ file_path=file_path,
),
),
)
- for i, plugin in enumerate(data["plugins"]):
- if not plugin.get("source") or not isinstance(plugin.get("source"), str):
+
+ for index, plugin in enumerate(plugins):
+ if not isinstance(plugin, dict):
return CheckResult(
name="marketplace.json valid",
passed=False,
points=0,
max_points=5,
- message=f'marketplace.json plugin[{i}] missing "source" field',
+ message=f"{file_path} plugin[{index}] must be an object",
findings=(
- Finding(
- rule_id="MARKETPLACE_SOURCE_MISSING",
- severity=Severity.MEDIUM,
- category="marketplace",
- title="Marketplace plugin source is missing",
- description=f'plugin[{i}] in marketplace.json is missing a "source" field.',
- remediation='Add a "source" string for each marketplace entry.',
- file_path="marketplace.json",
+ _marketplace_finding(
+ "MARKETPLACE_SOURCE_MISSING",
+ "Marketplace plugin entry is invalid",
+ f"plugin[{index}] in the marketplace manifest must be an object.",
+ "Replace the plugin entry with an object containing source, policy, and category fields.",
+ file_path=file_path,
),
),
)
+ if context.legacy:
+ source_ref, _source_path = extract_marketplace_source(plugin)
+ if source_ref is None:
+ return CheckResult(
+ name="marketplace.json valid",
+ passed=False,
+ points=0,
+ max_points=5,
+ message=f'{file_path} plugin[{index}] missing "source" field',
+ findings=(
+ _marketplace_finding(
+ "MARKETPLACE_SOURCE_MISSING",
+ "Marketplace plugin source is missing",
+ f'plugin[{index}] in the marketplace manifest is missing a "source" field.',
+ 'Add a "source" field for each marketplace entry.',
+ file_path=file_path,
+ ),
+ ),
+ )
+ else:
+ issue = validate_marketplace_path_requirements(context, plugin)
+ if issue is not None:
+ return CheckResult(
+ name="marketplace.json valid",
+ passed=False,
+ points=0,
+ max_points=5,
+ message=f"{file_path} plugin[{index}] {issue}",
+ findings=(
+ _marketplace_finding(
+ "MARKETPLACE_SOURCE_MISSING",
+ "Marketplace source object is incomplete",
+ f"plugin[{index}] in the marketplace manifest has an invalid source object: {issue}.",
+ 'Add a source object with both "source" and "./"-prefixed "path" fields.',
+ file_path=file_path,
+ ),
+ ),
+ )
if not plugin.get("policy") or not isinstance(plugin.get("policy"), dict):
return CheckResult(
name="marketplace.json valid",
passed=False,
points=0,
max_points=5,
- message=f'marketplace.json plugin[{i}] missing "policy" field',
+ message=f'{file_path} plugin[{index}] missing "policy" field',
findings=(
- Finding(
- rule_id="MARKETPLACE_POLICY_MISSING",
- severity=Severity.MEDIUM,
- category="marketplace",
- title="Marketplace policy is missing",
- description=f'plugin[{i}] in marketplace.json is missing a "policy" object.',
- remediation='Add a "policy" object for each marketplace entry.',
- file_path="marketplace.json",
+ _marketplace_finding(
+ "MARKETPLACE_POLICY_MISSING",
+ "Marketplace policy is missing",
+ f'plugin[{index}] in the marketplace manifest is missing a "policy" object.',
+ 'Add a "policy" object for each marketplace entry.',
+ file_path=file_path,
),
),
)
+
+ compatibility = " in compatibility mode" if context.legacy else ""
return CheckResult(
- name="marketplace.json valid", passed=True, points=5, max_points=5, message="marketplace.json is valid"
+ name="marketplace.json valid",
+ passed=True,
+ points=5,
+ max_points=5,
+ message=f"{file_path} is valid{compatibility}",
)
def check_policy_fields(plugin_dir: Path) -> CheckResult:
- mp = plugin_dir / "marketplace.json"
- if not mp.exists():
- return CheckResult(
- name="Policy fields present",
- passed=True,
- points=0,
- max_points=0,
- message="No marketplace.json found, check not applicable",
- applicable=False,
- )
- try:
- data = json.loads(mp.read_text(encoding="utf-8"))
- except json.JSONDecodeError:
- return CheckResult(
- name="Policy fields present",
- passed=True,
- points=5,
- max_points=5,
- message="Cannot parse marketplace.json, skipping check",
- )
-
- plugins = data.get("plugins", [])
- if not plugins:
+ context, relative_path = _load_context(plugin_dir)
+ if context is None:
+ return _not_applicable_result("Policy fields present", "No marketplace manifest found, check not applicable")
+ if context is False:
return CheckResult(
name="Policy fields present",
passed=True,
points=5,
max_points=5,
- message="No plugins in marketplace.json, nothing to check",
+ message=f"Cannot parse {relative_path or 'marketplace.json'}, skipping check",
)
issues: list[str] = []
- for i, plugin in enumerate(plugins):
+ for index, plugin in enumerate(context.payload.get("plugins", [])):
+ if not isinstance(plugin, dict):
+ issues.append(f"plugin[{index}] must be an object")
+ continue
policy = plugin.get("policy") or {}
- if not policy.get("installation"):
- issues.append(f"plugin[{i}]: missing policy.installation")
- if not policy.get("authentication"):
- issues.append(f"plugin[{i}]: missing policy.authentication")
+ if not isinstance(policy, dict):
+ issues.append(f"plugin[{index}] missing policy object")
+ continue
+ if not isinstance(policy.get("installation"), str) or not policy.get("installation"):
+ issues.append(f"plugin[{index}] missing policy.installation")
+ if not isinstance(policy.get("authentication"), str) or not policy.get("authentication"):
+ issues.append(f"plugin[{index}] missing policy.authentication")
+ if not isinstance(plugin.get("category"), str) or not plugin.get("category"):
+ issues.append(f"plugin[{index}] missing category")
if not issues:
+ compatibility = " in compatibility mode" if context.legacy else ""
return CheckResult(
name="Policy fields present",
passed=True,
points=5,
max_points=5,
- message="All plugins have required policy fields",
+ message=f"All marketplace policy fields are present{compatibility}",
)
+
+ file_path = marketplace_label(context)
return CheckResult(
name="Policy fields present",
passed=False,
@@ -195,14 +246,12 @@ def check_policy_fields(plugin_dir: Path) -> CheckResult:
max_points=5,
message=f"Policy issues: {', '.join(issues[:3])}",
findings=tuple(
- Finding(
- rule_id="MARKETPLACE_POLICY_FIELDS_MISSING",
- severity=Severity.MEDIUM,
- category="marketplace",
- title="Marketplace policy fields are incomplete",
- description=issue,
- remediation="Add both policy.installation and policy.authentication for each marketplace entry.",
- file_path="marketplace.json",
+ _marketplace_finding(
+ "MARKETPLACE_POLICY_FIELDS_MISSING",
+ "Marketplace policy fields are incomplete",
+ issue,
+ "Add policy.installation, policy.authentication, and category for each marketplace entry.",
+ file_path=file_path,
)
for issue in issues
),
@@ -210,44 +259,48 @@ def check_policy_fields(plugin_dir: Path) -> CheckResult:
def check_sources_safe(plugin_dir: Path) -> CheckResult:
- mp = plugin_dir / "marketplace.json"
- if not mp.exists():
- return CheckResult(
- name="Marketplace sources are safe",
- passed=True,
- points=0,
- max_points=0,
- message="No marketplace.json found, check not applicable",
- applicable=False,
+ context, _relative_path = _load_context(plugin_dir)
+ if context is None:
+ return _not_applicable_result(
+ "Marketplace sources are safe",
+ "No marketplace manifest found, check not applicable",
)
-
- try:
- data = json.loads(mp.read_text(encoding="utf-8"))
- except json.JSONDecodeError:
- return CheckResult(
- name="Marketplace sources are safe",
- passed=True,
- points=0,
- max_points=0,
- message="Cannot parse marketplace.json, skipping source safety checks",
- applicable=False,
+ if context is False:
+ return _not_applicable_result(
+ "Marketplace sources are safe",
+ "Cannot parse marketplace manifest, skipping source safety checks",
)
unsafe: list[str] = []
- for index, plugin in enumerate(data.get("plugins", [])):
- source = plugin.get("source")
- if isinstance(source, str) and not _is_safe_source(plugin_dir, source):
- unsafe.append(f"plugin[{index}]={source}")
+ for index, plugin in enumerate(context.payload.get("plugins", [])):
+ if not isinstance(plugin, dict):
+ unsafe.append(f"plugin[{index}]=invalid-entry")
+ continue
+ source_ref, source_path = extract_marketplace_source(plugin)
+ if context.legacy:
+ if source_ref is not None and not source_reference_is_safe(context, source_ref):
+ unsafe.append(f"plugin[{index}]={source_ref}")
+ continue
+ if source_ref is None or not source_reference_is_safe(context, source_ref):
+ unsafe.append(f"plugin[{index}].source.source={source_ref or 'missing'}")
+ if source_path is None:
+ unsafe.append(f"plugin[{index}].source.path=missing")
+ elif not source_path.startswith("./"):
+ unsafe.append(f'plugin[{index}].source.path must start with "./": {source_path}')
+ elif not source_path_is_safe(context, source_path):
+ unsafe.append(f"plugin[{index}].source.path escapes root: {source_path}")
if not unsafe:
+ compatibility = " in compatibility mode" if context.legacy else ""
return CheckResult(
name="Marketplace sources are safe",
passed=True,
points=5,
max_points=5,
- message="Marketplace sources are relative-safe or remote URLs.",
+ message=f"Marketplace sources are safe{compatibility}.",
)
+ file_path = marketplace_label(context)
return CheckResult(
name="Marketplace sources are safe",
passed=False,
@@ -255,14 +308,12 @@ def check_sources_safe(plugin_dir: Path) -> CheckResult:
max_points=5,
message=f"Unsafe marketplace sources detected: {', '.join(unsafe)}",
findings=tuple(
- Finding(
- rule_id="MARKETPLACE_UNSAFE_SOURCE",
- severity=Severity.MEDIUM,
- category="marketplace",
- title="Marketplace source escapes the plugin directory",
- description=f'The marketplace source "{entry}" is absolute or resolves outside the plugin directory.',
- remediation="Use a relative in-repo path or an explicit remote URL for marketplace sources.",
- file_path="marketplace.json",
+ _marketplace_finding(
+ "MARKETPLACE_UNSAFE_SOURCE",
+ "Marketplace source is unsafe",
+ entry,
+ 'Use remote HTTPS sources or "./"-prefixed in-repo paths that stay within the marketplace root.',
+ file_path=file_path,
)
for entry in unsafe
),
diff --git a/src/codex_plugin_scanner/lint_fixes.py b/src/codex_plugin_scanner/lint_fixes.py
index af1322d..3573994 100644
--- a/src/codex_plugin_scanner/lint_fixes.py
+++ b/src/codex_plugin_scanner/lint_fixes.py
@@ -6,6 +6,9 @@
from pathlib import Path
from typing import Any
+from .marketplace_support import LEGACY_MARKETPLACE_PATH, PREFERRED_MARKETPLACE_PATH
+from .path_support import normalize_codex_relative_path
+
_TEMPLATE_FILES: dict[str, str] = {
".codexignore": "# Local Codex scanner ignore list\n",
"README.md": "# Plugin\n\nDescribe your Codex plugin here.\n",
@@ -13,18 +16,60 @@
"LICENSE": "MIT License\n",
}
+_JSON_FILES = (Path(".codex-plugin/plugin.json"), PREFERRED_MARKETPLACE_PATH, LEGACY_MARKETPLACE_PATH)
+
+
+def _normalize_manifest_json(value: dict[str, Any]) -> dict[str, Any]:
+ normalized = dict(value)
+ skills = normalized.get("skills")
+ if isinstance(skills, str):
+ normalized["skills"] = normalize_codex_relative_path(skills)
-_JSON_FILES = ("plugin.json", "marketplace.json")
+ apps = normalized.get("apps")
+ if isinstance(apps, str):
+ normalized["apps"] = normalize_codex_relative_path(apps)
+ elif isinstance(apps, list):
+ normalized["apps"] = [normalize_codex_relative_path(item) if isinstance(item, str) else item for item in apps]
+ interface = normalized.get("interface")
+ if isinstance(interface, dict):
+ interface = dict(interface)
+ for key in ("composerIcon", "logo"):
+ value = interface.get(key)
+ if isinstance(value, str):
+ interface[key] = normalize_codex_relative_path(value)
+ screenshots = interface.get("screenshots")
+ if isinstance(screenshots, list):
+ interface["screenshots"] = [
+ normalize_codex_relative_path(item) if isinstance(item, str) else item for item in screenshots
+ ]
+ normalized["interface"] = interface
+ return normalized
-def _normalize_json_paths(value: Any) -> Any:
- if isinstance(value, dict):
- return {k: _normalize_json_paths(v) for k, v in value.items()}
- if isinstance(value, list):
- return [_normalize_json_paths(item) for item in value]
- if isinstance(value, str) and value.startswith("./"):
- return value[2:]
- return value
+
+def _normalize_marketplace_json(value: dict[str, Any]) -> dict[str, Any]:
+ normalized = dict(value)
+ plugins = normalized.get("plugins")
+ if not isinstance(plugins, list):
+ return normalized
+ normalized_plugins: list[Any] = []
+ for plugin in plugins:
+ if not isinstance(plugin, dict):
+ normalized_plugins.append(plugin)
+ continue
+ next_plugin = dict(plugin)
+ source = next_plugin.get("source")
+ if isinstance(source, dict):
+ source = dict(source)
+ path_value = source.get("path")
+ if isinstance(path_value, str):
+ source["path"] = normalize_codex_relative_path(path_value)
+ next_plugin["source"] = source
+ elif isinstance(source, str):
+ next_plugin["source"] = normalize_codex_relative_path(source)
+ normalized_plugins.append(next_plugin)
+ normalized["plugins"] = normalized_plugins
+ return normalized
def apply_safe_autofixes(plugin_dir: Path) -> list[str]:
@@ -46,7 +91,12 @@ def apply_safe_autofixes(plugin_dir: Path) -> list[str]:
except (json.JSONDecodeError, OSError):
continue
- normalized = _normalize_json_paths(parsed)
+ if relative_path == Path(".codex-plugin/plugin.json") and isinstance(parsed, dict):
+ normalized = _normalize_manifest_json(parsed)
+ elif relative_path in {PREFERRED_MARKETPLACE_PATH, LEGACY_MARKETPLACE_PATH} and isinstance(parsed, dict):
+ normalized = _normalize_marketplace_json(parsed)
+ else:
+ normalized = parsed
rendered = json.dumps(normalized, indent=2, sort_keys=True) + "\n"
if rendered != original:
target.write_text(rendered, encoding="utf-8")
diff --git a/src/codex_plugin_scanner/marketplace_support.py b/src/codex_plugin_scanner/marketplace_support.py
new file mode 100644
index 0000000..fd56d71
--- /dev/null
+++ b/src/codex_plugin_scanner/marketplace_support.py
@@ -0,0 +1,95 @@
+"""Marketplace discovery and schema helpers."""
+
+from __future__ import annotations
+
+import json
+from dataclasses import dataclass
+from pathlib import Path
+from urllib.parse import urlparse
+
+from .path_support import is_dot_relative_path, is_remote_reference, is_safe_relative_path
+
+PREFERRED_MARKETPLACE_PATH = Path(".agents/plugins/marketplace.json")
+LEGACY_MARKETPLACE_PATH = Path("marketplace.json")
+
+
+@dataclass(frozen=True, slots=True)
+class MarketplaceContext:
+ file_path: Path
+ repo_root: Path
+ marketplace_root: Path
+ payload: dict
+ legacy: bool
+
+
+def find_marketplace_file(repo_root: Path) -> tuple[Path, bool] | None:
+ preferred = repo_root / PREFERRED_MARKETPLACE_PATH
+ if preferred.exists():
+ return preferred, False
+ legacy = repo_root / LEGACY_MARKETPLACE_PATH
+ if legacy.exists():
+ return legacy, True
+ return None
+
+
+def load_marketplace_context(repo_root: Path) -> MarketplaceContext | None:
+ marketplace_file = find_marketplace_file(repo_root)
+ if marketplace_file is None:
+ return None
+ file_path, legacy = marketplace_file
+ resolved_repo_root = repo_root.resolve()
+ resolved_file_path = file_path.resolve()
+ payload = json.loads(file_path.read_text(encoding="utf-8"))
+ if not isinstance(payload, dict):
+ raise ValueError("marketplace payload must be an object")
+ return MarketplaceContext(
+ file_path=resolved_file_path,
+ repo_root=resolved_repo_root,
+ marketplace_root=resolved_file_path.parent,
+ payload=payload,
+ legacy=legacy,
+ )
+
+
+def marketplace_label(context: MarketplaceContext) -> str:
+ return str(context.file_path.relative_to(context.repo_root))
+
+
+def extract_marketplace_source(plugin: dict) -> tuple[str | None, str | None]:
+ source = plugin.get("source")
+ if isinstance(source, str):
+ return source, None
+ if isinstance(source, dict):
+ source_ref = source.get("source")
+ source_path = source.get("path")
+ normalized_source_ref = source_ref if isinstance(source_ref, str) and source_ref else None
+ normalized_source_path = source_path if isinstance(source_path, str) and source_path else None
+ return normalized_source_ref, normalized_source_path
+ return None, None
+
+
+def source_path_is_safe(context: MarketplaceContext, source_path: str) -> bool:
+ return is_safe_relative_path(context.marketplace_root, source_path, require_prefix=True)
+
+
+def source_reference_is_safe(context: MarketplaceContext, source_ref: str) -> bool:
+ if is_remote_reference(source_ref):
+ return True
+ if urlparse(source_ref).scheme:
+ return False
+ return is_safe_relative_path(context.repo_root, source_ref)
+
+
+def validate_marketplace_path_requirements(context: MarketplaceContext, plugin: dict) -> str | None:
+ source_ref, source_path = extract_marketplace_source(plugin)
+ if source_ref is None:
+ return 'missing "source.source"'
+ if source_path is None:
+ return 'missing "source.path"'
+ if not is_dot_relative_path(source_path):
+ return f'"source.path" must start with "./": {source_path}'
+ if not source_path_is_safe(context, source_path):
+ return f'"source.path" escapes the marketplace root: {source_path}'
+ if not source_reference_is_safe(context, source_ref):
+ return f'"source.source" is unsafe: {source_ref}'
+ return None
diff --git a/src/codex_plugin_scanner/path_support.py b/src/codex_plugin_scanner/path_support.py
new file mode 100644
index 0000000..f2722f1
--- /dev/null
+++ b/src/codex_plugin_scanner/path_support.py
@@ -0,0 +1,46 @@
+"""Shared path validation and normalization helpers."""
+
+from __future__ import annotations
+
+from pathlib import Path
+from urllib.parse import urlparse
+
+REMOTE_PREFIXES = ("https://", "git+", "github://")
+
+
+def is_remote_reference(value: str) -> bool:
+ return value.startswith(REMOTE_PREFIXES)
+
+
+def is_dot_relative_path(value: str) -> bool:
+ return value.startswith("./")
+
+
+def is_safe_relative_path(
+ root: Path,
+ value: str,
+ *,
+ require_prefix: bool = False,
+ require_exists: bool = False,
+) -> bool:
+ candidate = Path(value)
+ if candidate.is_absolute():
+ return False
+ if require_prefix and not is_dot_relative_path(value):
+ return False
+ resolved = (root / candidate).resolve()
+ try:
+ resolved.relative_to(root.resolve())
+ except ValueError:
+ return False
+ return not (require_exists and not resolved.exists())
+
+
+def normalize_codex_relative_path(value: str) -> str:
+ if not value or is_remote_reference(value):
+ return value
+ if urlparse(value).scheme or Path(value).is_absolute():
+ return value
+ if value.startswith("./") or value.startswith("../"):
+ return value
+ return f"./{value}"
diff --git a/src/codex_plugin_scanner/verification.py b/src/codex_plugin_scanner/verification.py
index 9897a92..65e3fe9 100644
--- a/src/codex_plugin_scanner/verification.py
+++ b/src/codex_plugin_scanner/verification.py
@@ -13,11 +13,17 @@
from pathlib import Path
from .checks.manifest import load_manifest
-from .checks.marketplace import _is_safe_source
+from .checks.manifest_support import safe_manifest_path
+from .marketplace_support import (
+ extract_marketplace_source,
+ load_marketplace_context,
+ marketplace_label,
+ validate_marketplace_path_requirements,
+)
+from .path_support import is_safe_relative_path
MARKDOWN_LINK_RE = re.compile(r"\[[^]]+\]\(([^)]+)\)")
INTERFACE_REQUIRED_FIELDS = (
- "type",
"displayName",
"shortDescription",
"developerName",
@@ -61,15 +67,7 @@ def _read_json(path: Path) -> dict | list | None:
def _is_safe_relative_asset(plugin_dir: Path, value: str) -> bool:
- candidate = Path(value)
- if candidate.is_absolute():
- return False
- resolved = (plugin_dir / candidate).resolve()
- try:
- resolved.relative_to(plugin_dir.resolve())
- except ValueError:
- return False
- return resolved.exists() and resolved.is_file()
+ return is_safe_relative_path(plugin_dir, value, require_prefix=True, require_exists=True)
def _check_manifest(plugin_dir: Path) -> list[VerificationCase]:
@@ -211,55 +209,63 @@ def _check_manifest(plugin_dir: Path) -> list[VerificationCase]:
def _check_marketplace(plugin_dir: Path) -> list[VerificationCase]:
- marketplace = plugin_dir / "marketplace.json"
- if not marketplace.exists():
+ try:
+ context = load_marketplace_context(plugin_dir)
+ except json.JSONDecodeError:
return [
VerificationCase(
"marketplace",
- "marketplace optional",
- True,
- "marketplace.json not present",
- "optional",
+ "marketplace manifest parses",
+ False,
+ "Invalid marketplace manifest",
+ "invalid-json",
)
]
-
- payload = _read_json(marketplace)
- if payload is None:
+ except ValueError:
return [
VerificationCase(
"marketplace",
- "marketplace.json parses",
+ "marketplace manifest shape",
False,
- "Invalid marketplace.json",
- "invalid-json",
+ "Marketplace manifest must be a JSON object",
+ "schema",
)
]
- if not isinstance(payload, dict):
+
+ if context is None:
return [
VerificationCase(
"marketplace",
- "marketplace.json shape",
- False,
- "marketplace.json must be an object",
- "schema",
+ "marketplace optional",
+ True,
+ "No marketplace manifest present",
+ "optional",
)
]
+ file_label = marketplace_label(context)
+ compatibility_message = " (legacy compatibility mode)" if context.legacy else ""
cases = [
- VerificationCase("marketplace", "marketplace.json parses", True, "marketplace.json is valid JSON"),
+ VerificationCase(
+ "marketplace",
+ "marketplace manifest parses",
+ True,
+ f"{file_label} is valid JSON{compatibility_message}",
+ "compatibility" if context.legacy else "pass",
+ )
]
- has_name = isinstance(payload.get("name"), str) and bool(payload.get("name"))
+ has_name = isinstance(context.payload.get("name"), str) and bool(context.payload.get("name"))
cases.append(
VerificationCase(
"marketplace",
"marketplace name",
has_name,
- "Marketplace name is declared" if has_name else 'marketplace.json must declare a string "name"',
- "schema" if not has_name else "pass",
+ "Marketplace name is declared" if has_name else f'{file_label} must declare a string "name"',
+ "schema" if not has_name else ("compatibility" if context.legacy else "pass"),
)
)
- plugins = payload.get("plugins")
+ plugins = context.payload.get("plugins")
if not isinstance(plugins, list) or not plugins:
cases.append(
VerificationCase(
@@ -279,11 +285,14 @@ def _check_marketplace(plugin_dir: Path) -> list[VerificationCase]:
if not isinstance(plugin, dict):
discovery_issues.append(f"plugin[{index}] must be an object")
continue
- source = plugin.get("source")
- if not isinstance(source, str) or not source:
- discovery_issues.append(f"plugin[{index}] missing source")
- elif not _is_safe_source(plugin_dir, source):
- discovery_issues.append(f"plugin[{index}] unsafe source {source}")
+ if context.legacy:
+ source_ref, _source_path = extract_marketplace_source(plugin)
+ if not source_ref:
+ discovery_issues.append(f"plugin[{index}] missing source")
+ else:
+ issue = validate_marketplace_path_requirements(context, plugin)
+ if issue is not None:
+ discovery_issues.append(f"plugin[{index}] {issue}")
policy = plugin.get("policy")
if not isinstance(policy, dict):
policy_issues.append(f"plugin[{index}] missing policy object")
@@ -292,6 +301,8 @@ def _check_marketplace(plugin_dir: Path) -> list[VerificationCase]:
policy_issues.append(f"plugin[{index}] missing policy.installation")
if not isinstance(policy.get("authentication"), str) or not policy.get("authentication"):
policy_issues.append(f"plugin[{index}] missing policy.authentication")
+ if not isinstance(plugin.get("category"), str) or not plugin.get("category"):
+ policy_issues.append(f"plugin[{index}] missing category")
cases.append(
VerificationCase(
@@ -299,7 +310,7 @@ def _check_marketplace(plugin_dir: Path) -> list[VerificationCase]:
"discovery simulation",
not discovery_issues,
"Marketplace entries are discoverable" if not discovery_issues else "; ".join(discovery_issues),
- "schema" if discovery_issues else "pass",
+ "schema" if discovery_issues else ("compatibility" if context.legacy else "pass"),
)
)
cases.append(
@@ -308,7 +319,7 @@ def _check_marketplace(plugin_dir: Path) -> list[VerificationCase]:
"policy metadata",
not policy_issues,
"Marketplace policy metadata is complete" if not policy_issues else "; ".join(policy_issues),
- "schema" if policy_issues else "pass",
+ "schema" if policy_issues else ("compatibility" if context.legacy else "pass"),
)
)
return cases
@@ -427,18 +438,74 @@ def _check_mcp_stdio(servers: dict) -> tuple[list[VerificationCase], list[Runtim
)
)
continue
+ transcript: list[str] = []
try:
- if proc.stdin:
- proc.stdin.write('{"jsonrpc":"2.0","id":1,"method":"initialize","params":{}}\n')
- proc.stdin.flush()
- stdout, stderr = proc.communicate(timeout=2)
+ if proc.stdin is None or proc.stdout is None or proc.stderr is None:
+ raise RuntimeError("stdio server did not expose all pipes")
+ initialize_request = {
+ "jsonrpc": "2.0",
+ "id": 1,
+ "method": "initialize",
+ "params": {
+ "protocolVersion": "2024-11-05",
+ "capabilities": {"tools": {}, "resources": {}, "prompts": {}},
+ "clientInfo": {"name": "codex-plugin-scanner", "version": "1.4.0"},
+ },
+ }
+ proc.stdin.write(json.dumps(initialize_request) + "\n")
+ proc.stdin.flush()
+ transcript.append("> " + json.dumps(initialize_request))
+
+ initialize_response_line = proc.stdout.readline()
+ if not initialize_response_line:
+ raise RuntimeError("server did not respond to initialize")
+ transcript.append("< " + initialize_response_line.strip())
+ initialize_response = json.loads(initialize_response_line)
+ result_payload = initialize_response.get("result")
+ if not isinstance(result_payload, dict):
+ raise RuntimeError("server returned an invalid initialize result")
+
+ initialized_notification = {"jsonrpc": "2.0", "method": "notifications/initialized", "params": {}}
+ proc.stdin.write(json.dumps(initialized_notification) + "\n")
+ proc.stdin.flush()
+ transcript.append("> " + json.dumps(initialized_notification))
+
+ capabilities = result_payload.get("capabilities")
+ probe_methods = (
+ ("tools/list", "tools"),
+ ("resources/list", "resources"),
+ ("prompts/list", "prompts"),
+ )
+ request_id = 2
+ if isinstance(capabilities, dict):
+ for method, key in probe_methods:
+ if key not in capabilities:
+ continue
+ request = {"jsonrpc": "2.0", "id": request_id, "method": method, "params": {}}
+ request_id += 1
+ proc.stdin.write(json.dumps(request) + "\n")
+ proc.stdin.flush()
+ transcript.append("> " + json.dumps(request))
+ response_line = proc.stdout.readline()
+ if not response_line:
+ raise RuntimeError(f"server did not respond to {method}")
+ transcript.append("< " + response_line.strip())
+ json.loads(response_line)
+
+ proc.stdin.close()
+ proc.wait(timeout=2)
+ stdout = proc.stdout.read()
+ stderr = proc.stderr.read()
+ transcript_output = "\n".join(transcript)
+ if stdout:
+ transcript_output = f"{transcript_output}\n{stdout}".strip()
traces.append(
RuntimeTrace(
component="mcp",
- name=f"stdio handshake:{name}",
+ name=f"stdio lifecycle:{name}",
command=tuple(command),
returncode=proc.returncode,
- stdout=stdout,
+ stdout=transcript_output,
stderr=stderr,
)
)
@@ -446,24 +513,24 @@ def _check_mcp_stdio(servers: dict) -> tuple[list[VerificationCase], list[Runtim
cases.append(
VerificationCase(
"mcp",
- f"stdio run:{name}",
+ f"stdio initialize:{name}",
False,
stderr or "non-zero exit",
"spawn-failure",
)
)
- elif "error" in stdout.lower():
+ elif "error" in transcript_output.lower():
cases.append(
VerificationCase(
"mcp",
- f"stdio handshake:{name}",
+ f"stdio initialize:{name}",
False,
- stdout.strip(),
+ transcript_output.strip(),
"protocol-failure",
)
)
else:
- cases.append(VerificationCase("mcp", f"stdio handshake:{name}", True, "initialize attempted"))
+ cases.append(VerificationCase("mcp", f"stdio initialize:{name}", True, "initialize completed"))
except subprocess.TimeoutExpired as exc:
proc.kill()
stdout = exc.stdout if isinstance(exc.stdout, str) else ""
@@ -481,15 +548,25 @@ def _check_mcp_stdio(servers: dict) -> tuple[list[VerificationCase], list[Runtim
)
cases.append(VerificationCase("mcp", f"stdio timeout:{name}", False, "process timed out", "timeout"))
except Exception as exc:
- proc.kill()
+ poll = getattr(proc, "poll", None)
+ wait = getattr(proc, "wait", None)
+ if not callable(poll) or poll() is None:
+ proc.kill()
+ if callable(wait):
+ wait(timeout=1)
+ stdout = proc.stdout.read() if proc.stdout is not None else ""
+ stderr = proc.stderr.read() if proc.stderr is not None else ""
+ transcript_output = "\n".join(transcript)
+ if stdout:
+ transcript_output = f"{transcript_output}\n{stdout}".strip()
traces.append(
RuntimeTrace(
component="mcp",
- name=f"stdio run:{name}",
+ name=f"stdio lifecycle:{name}",
command=tuple(command),
returncode=proc.returncode,
- stdout="",
- stderr=str(exc),
+ stdout=transcript_output,
+ stderr=stderr or str(exc),
)
)
cases.append(VerificationCase("mcp", f"stdio run:{name}", False, str(exc), "spawn-failure"))
@@ -539,6 +616,16 @@ def _check_skills(plugin_dir: Path) -> list[VerificationCase]:
skills_root = manifest.get("skills")
if not isinstance(skills_root, str) or not skills_root:
return [VerificationCase("skills", "skills optional", True, "No skills field declared", "optional")]
+ if not safe_manifest_path(plugin_dir, skills_root):
+ return [
+ VerificationCase(
+ "skills",
+ "skills directory",
+ False,
+ f'Skills path "{skills_root}" must stay within the plugin and start with "./"',
+ "schema",
+ )
+ ]
skills_dir = plugin_dir / skills_root
if not skills_dir.exists():
diff --git a/tests/fixtures/good-plugin/.codex-plugin/plugin.json b/tests/fixtures/good-plugin/.codex-plugin/plugin.json
index 6c1b808..5daa5d6 100644
--- a/tests/fixtures/good-plugin/.codex-plugin/plugin.json
+++ b/tests/fixtures/good-plugin/.codex-plugin/plugin.json
@@ -29,5 +29,5 @@
"logo": "./assets/logo.svg",
"screenshots": ["./assets/screenshot.svg"]
},
- "skills": "skills"
+ "skills": "./skills"
}
diff --git a/tests/fixtures/with-marketplace/marketplace.json b/tests/fixtures/with-marketplace/marketplace.json
index 3b0b5c0..a02f66f 100644
--- a/tests/fixtures/with-marketplace/marketplace.json
+++ b/tests/fixtures/with-marketplace/marketplace.json
@@ -3,6 +3,7 @@
"plugins": [
{
"source": "https://github.com/example/plugin",
+ "category": "Developer Tools",
"policy": {
"installation": "auto",
"authentication": "none"
diff --git a/tests/test_action_bundle.py b/tests/test_action_bundle.py
index b85c71d..9062c5e 100644
--- a/tests/test_action_bundle.py
+++ b/tests/test_action_bundle.py
@@ -20,8 +20,12 @@ def test_action_metadata_includes_marketplace_branding_and_fallback_install() ->
assert "config:" in action_text
assert "baseline:" in action_text
assert "online:" in action_text
+ assert "upload_sarif:" in action_text
+ assert "sarif_category:" in action_text
assert "registry_payload_output:" in action_text
assert "submission_enabled:" in action_text
+ assert "policy_pass:" in action_text
+ assert "verify_pass:" in action_text
assert "grade_label:" in action_text
assert "max_severity:" in action_text
assert "findings_total:" in action_text
@@ -29,13 +33,15 @@ def test_action_metadata_includes_marketplace_branding_and_fallback_install() ->
assert "registry_payload_path:" in action_text
assert "submission_issue_urls:" in action_text
assert "python3 -m codex_plugin_scanner.action_runner" in action_text
- assert 'MODE: ${{ inputs.mode }}' in action_text
- assert 'PROFILE: ${{ inputs.profile }}' in action_text
- assert 'CONFIG: ${{ inputs.config }}' in action_text
- assert 'BASELINE: ${{ inputs.baseline }}' in action_text
- assert 'ONLINE: ${{ inputs.online }}' in action_text
+ assert "MODE: ${{ inputs.mode }}" in action_text
+ assert "PROFILE: ${{ inputs.profile }}" in action_text
+ assert "CONFIG: ${{ inputs.config }}" in action_text
+ assert "BASELINE: ${{ inputs.baseline }}" in action_text
+ assert "ONLINE: ${{ inputs.online }}" in action_text
assert "value: ${{ steps.scan.outputs.score }}" in action_text
assert "value: ${{ steps.scan.outputs.grade }}" in action_text
+ assert "value: ${{ steps.scan.outputs.policy_pass }}" in action_text
+ assert "value: ${{ steps.scan.outputs.verify_pass }}" in action_text
assert "value: ${{ steps.scan.outputs.grade_label }}" in action_text
assert "value: ${{ steps.scan.outputs.max_severity }}" in action_text
assert "value: ${{ steps.scan.outputs.findings_total }}" in action_text
@@ -46,6 +52,7 @@ def test_action_metadata_includes_marketplace_branding_and_fallback_install() ->
assert "value: ${{ steps.scan.outputs.submission_issue_urls }}" in action_text
assert "value: ${{ steps.scan.outputs.submission_issue_numbers }}" in action_text
assert "GITHUB_STEP_SUMMARY" in action_text
+ assert "github/codeql-action/upload-sarif@" in action_text
def test_publish_workflow_attaches_marketplace_action_bundle() -> None:
diff --git a/tests/test_action_runner.py b/tests/test_action_runner.py
index 492649d..e760bab 100644
--- a/tests/test_action_runner.py
+++ b/tests/test_action_runner.py
@@ -44,6 +44,8 @@ def test_action_runner_writes_all_outputs(monkeypatch, tmp_path, capsys) -> None
assert "findings_total=0" in output_lines
assert "report_path=" in output_lines
assert "registry_payload_path=" in output_lines
+ assert "policy_pass=true" in output_lines
+ assert "verify_pass=" in output_lines
assert "submission_eligible=false" in output_lines
assert "submission_performed=false" in output_lines
assert "submission_issue_urls=" in output_lines
@@ -138,4 +140,5 @@ def test_action_runner_verify_mode_writes_human_report(monkeypatch, tmp_path, ca
assert exit_code == 0
assert "Verification: PASS" in output_path.read_text(encoding="utf-8")
assert "mode=verify" in github_output.read_text(encoding="utf-8")
+ assert "verify_pass=true" in github_output.read_text(encoding="utf-8")
assert "Report written to" in capsys.readouterr().out
diff --git a/tests/test_config.py b/tests/test_config.py
index 84d745a..bb9e032 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -42,7 +42,7 @@ def test_load_scanner_config_bad_toml(tmp_path: Path):
(tmp_path / ".codex-plugin-scanner.toml").write_text("[scanner\nprofile='x'", encoding="utf-8")
try:
load_scanner_config(tmp_path)
- assert False, "expected ConfigError"
+ raise AssertionError("expected ConfigError")
except ConfigError:
assert True
@@ -51,6 +51,6 @@ def test_load_baseline_bad_json(tmp_path: Path):
(tmp_path / "baseline.json").write_text("[not-valid-json", encoding="utf-8")
try:
load_baseline_rule_ids(tmp_path, "baseline.json")
- assert False, "expected ConfigError"
+ raise AssertionError("expected ConfigError")
except ConfigError:
assert True
diff --git a/tests/test_lint_fixes.py b/tests/test_lint_fixes.py
new file mode 100644
index 0000000..a0daa91
--- /dev/null
+++ b/tests/test_lint_fixes.py
@@ -0,0 +1,70 @@
+"""Tests for spec-aligned safe autofixes."""
+
+import json
+
+from codex_plugin_scanner.lint_fixes import apply_safe_autofixes
+
+
+def test_apply_safe_autofixes_preserves_codex_relative_paths(tmp_path):
+ plugin_manifest = tmp_path / ".codex-plugin"
+ plugin_manifest.mkdir()
+ (plugin_manifest / "plugin.json").write_text(
+ json.dumps(
+ {
+ "name": "demo-plugin",
+ "version": "1.0.0",
+ "description": "demo",
+ "skills": "skills",
+ "apps": ["apps/demo.app.json"],
+ "interface": {
+ "displayName": "Demo Plugin",
+ "shortDescription": "demo",
+ "longDescription": "demo",
+ "developerName": "HOL",
+ "category": "Developer Tools",
+ "capabilities": ["Read"],
+ "websiteURL": "https://example.com",
+ "privacyPolicyURL": "https://example.com/privacy",
+ "termsOfServiceURL": "https://example.com/terms",
+ "composerIcon": "assets/icon.svg",
+ "logo": "./assets/logo.svg",
+ "screenshots": ["assets/screenshot.svg"],
+ },
+ }
+ ),
+ encoding="utf-8",
+ )
+
+ marketplace_dir = tmp_path / ".agents" / "plugins"
+ marketplace_dir.mkdir(parents=True)
+ (marketplace_dir / "marketplace.json").write_text(
+ json.dumps(
+ {
+ "name": "demo-marketplace",
+ "plugins": [
+ {
+ "source": {
+ "source": "https://github.com/hashgraph-online/example-plugin",
+ "path": "plugins/demo",
+ },
+ "policy": {"installation": "manual", "authentication": "none"},
+ "category": "Developer Tools",
+ }
+ ],
+ }
+ ),
+ encoding="utf-8",
+ )
+
+ changes = apply_safe_autofixes(tmp_path)
+
+ manifest_payload = json.loads((plugin_manifest / "plugin.json").read_text(encoding="utf-8"))
+ marketplace_payload = json.loads((marketplace_dir / "marketplace.json").read_text(encoding="utf-8"))
+
+ assert changes
+ assert manifest_payload["skills"] == "./skills"
+ assert manifest_payload["apps"] == ["./apps/demo.app.json"]
+ assert manifest_payload["interface"]["composerIcon"] == "./assets/icon.svg"
+ assert manifest_payload["interface"]["logo"] == "./assets/logo.svg"
+ assert manifest_payload["interface"]["screenshots"] == ["./assets/screenshot.svg"]
+ assert marketplace_payload["plugins"][0]["source"]["path"] == "./plugins/demo"
diff --git a/tests/test_manifest.py b/tests/test_manifest.py
index de48a3c..7a4d959 100644
--- a/tests/test_manifest.py
+++ b/tests/test_manifest.py
@@ -115,6 +115,45 @@ def test_interface_assets_pass_for_good_plugin(self):
r = check_interface_assets(FIXTURES / "good-plugin")
assert r.passed and r.points == 3
+ def test_interface_metadata_does_not_require_type(self, tmp_path: Path):
+ plugin_dir = tmp_path
+ manifest_dir = plugin_dir / ".codex-plugin"
+ manifest_dir.mkdir(parents=True)
+ assets_dir = plugin_dir / "assets"
+ assets_dir.mkdir()
+ (assets_dir / "icon.svg").write_text("", encoding="utf-8")
+ (assets_dir / "logo.svg").write_text("", encoding="utf-8")
+ (assets_dir / "shot.svg").write_text("", encoding="utf-8")
+ (manifest_dir / "plugin.json").write_text(
+ """
+ {
+ "name": "interface-good",
+ "version": "1.0.0",
+ "description": "good interface metadata",
+ "interface": {
+ "displayName": "Good",
+ "shortDescription": "Good",
+ "longDescription": "Good",
+ "developerName": "HOL",
+ "category": "Security",
+ "capabilities": ["Read"],
+ "websiteURL": "https://example.com",
+ "privacyPolicyURL": "https://example.com/privacy",
+ "termsOfServiceURL": "https://example.com/terms",
+ "composerIcon": "./assets/icon.svg",
+ "logo": "./assets/logo.svg",
+ "screenshots": ["./assets/shot.svg"]
+ }
+ }
+ """,
+ encoding="utf-8",
+ )
+
+ result = check_interface_metadata(plugin_dir)
+
+ assert result.passed is True
+ assert result.points == 3
+
def test_interface_assets_fail_for_unsafe_values(self):
with tempfile.TemporaryDirectory() as tmpdir:
plugin_dir = Path(tmpdir)
diff --git a/tests/test_marketplace.py b/tests/test_marketplace.py
index be15d52..393a403 100644
--- a/tests/test_marketplace.py
+++ b/tests/test_marketplace.py
@@ -1,5 +1,6 @@
"""Tests for marketplace checks."""
+import json
import tempfile
from pathlib import Path
@@ -22,6 +23,56 @@ def test_passes_for_valid_marketplace(self):
r = check_marketplace_json(FIXTURES / "with-marketplace")
assert r.passed and r.points == 5
+ def test_passes_for_codex_marketplace_layout(self, tmp_path: Path):
+ marketplace_dir = tmp_path / ".agents" / "plugins"
+ marketplace_dir.mkdir(parents=True)
+ (marketplace_dir / "plugins" / "demo").mkdir(parents=True)
+ (marketplace_dir / "marketplace.json").write_text(
+ json.dumps(
+ {
+ "name": "demo-marketplace",
+ "interface": {"displayName": "Demo Marketplace"},
+ "plugins": [
+ {
+ "source": {
+ "source": "https://github.com/hashgraph-online/example-plugin",
+ "path": "./plugins/demo",
+ },
+ "policy": {"installation": "manual", "authentication": "none"},
+ "category": "Developer Tools",
+ }
+ ],
+ }
+ ),
+ encoding="utf-8",
+ )
+
+ result = check_marketplace_json(tmp_path)
+
+ assert result.passed is True
+ assert result.points == 5
+
+ def test_legacy_root_marketplace_runs_in_compatibility_mode(self, tmp_path: Path):
+ (tmp_path / "marketplace.json").write_text(
+ json.dumps(
+ {
+ "name": "legacy-marketplace",
+ "plugins": [
+ {
+ "source": "https://github.com/hashgraph-online/example-plugin",
+ "policy": {"installation": "manual", "authentication": "none"},
+ }
+ ],
+ }
+ ),
+ encoding="utf-8",
+ )
+
+ result = check_marketplace_json(tmp_path)
+
+ assert result.passed is True
+ assert "compatibility" in result.message.lower()
+
def test_fails_for_invalid_json(self):
with tempfile.TemporaryDirectory() as tmpdir:
mp = Path(tmpdir) / "marketplace.json"
@@ -68,6 +119,33 @@ def test_passes_when_all_fields_present(self):
r = check_policy_fields(FIXTURES / "with-marketplace")
assert r.passed and r.points == 5
+ def test_fails_for_missing_category_in_codex_layout(self, tmp_path: Path):
+ marketplace_dir = tmp_path / ".agents" / "plugins"
+ marketplace_dir.mkdir(parents=True)
+ (marketplace_dir / "plugins" / "demo").mkdir(parents=True)
+ (marketplace_dir / "marketplace.json").write_text(
+ json.dumps(
+ {
+ "name": "demo-marketplace",
+ "plugins": [
+ {
+ "source": {
+ "source": "https://github.com/hashgraph-online/example-plugin",
+ "path": "./plugins/demo",
+ },
+ "policy": {"installation": "manual", "authentication": "none"},
+ }
+ ],
+ }
+ ),
+ encoding="utf-8",
+ )
+
+ result = check_policy_fields(tmp_path)
+
+ assert result.passed is False
+ assert "category" in result.message
+
def test_passes_when_empty_plugins(self):
with tempfile.TemporaryDirectory() as tmpdir:
mp = Path(tmpdir) / "marketplace.json"
@@ -123,3 +201,32 @@ def test_http_marketplace_source_is_unsafe(self):
source_check = next(check for check in results if check.name == "Marketplace sources are safe")
assert source_check.passed is False
assert "http://example.com/plugin" in source_check.message
+
+ def test_codex_marketplace_path_must_start_with_dot_slash(self, tmp_path: Path):
+ marketplace_dir = tmp_path / ".agents" / "plugins"
+ marketplace_dir.mkdir(parents=True)
+ (marketplace_dir / "plugins" / "demo").mkdir(parents=True)
+ (marketplace_dir / "marketplace.json").write_text(
+ json.dumps(
+ {
+ "name": "demo-marketplace",
+ "plugins": [
+ {
+ "source": {
+ "source": "https://github.com/hashgraph-online/example-plugin",
+ "path": "plugins/demo",
+ },
+ "policy": {"installation": "manual", "authentication": "none"},
+ "category": "Developer Tools",
+ }
+ ],
+ }
+ ),
+ encoding="utf-8",
+ )
+
+ results = run_marketplace_checks(tmp_path)
+ source_check = next(check for check in results if check.name == "Marketplace sources are safe")
+
+ assert source_check.passed is False
+ assert "./" in source_check.message
diff --git a/tests/test_verification.py b/tests/test_verification.py
index 992de79..b3a393c 100644
--- a/tests/test_verification.py
+++ b/tests/test_verification.py
@@ -1,6 +1,8 @@
"""Tests for runtime verification engine."""
+import json
import os
+import sys
from pathlib import Path
from codex_plugin_scanner import verification as verification_module
@@ -36,7 +38,7 @@ def test_verify_plugin_reports_real_workspace_path() -> None:
def test_verify_plugin_checks_skill_frontmatter_from_manifest(tmp_path: Path):
(tmp_path / ".codex-plugin").mkdir()
(tmp_path / ".codex-plugin" / "plugin.json").write_text(
- '{"name":"demo","version":"1.0.0","description":"demo","skills":"skills"}',
+ '{"name":"demo","version":"1.0.0","description":"demo","skills":"./skills"}',
encoding="utf-8",
)
(tmp_path / "skills" / "broken").mkdir(parents=True)
@@ -51,14 +53,44 @@ def test_verify_plugin_checks_skill_frontmatter_from_manifest(tmp_path: Path):
def test_verify_plugin_stdio_inherits_process_environment(tmp_path: Path, monkeypatch):
captured_env: dict[str, str] = {}
+ class StubInput:
+ def __init__(self):
+ self.writes: list[str] = []
+
+ def write(self, payload: str):
+ self.writes.append(payload)
+ return len(payload)
+
+ def flush(self):
+ return None
+
+ def close(self):
+ return None
+
+ class StubOutput:
+ def __init__(self, lines: list[str]):
+ self._lines = lines
+
+ def readline(self):
+ return self._lines.pop(0) if self._lines else ""
+
+ def read(self):
+ return ""
+
class StubProcess:
returncode = 0
def __init__(self):
- self.stdin = None
+ self.stdin = StubInput()
+ self.stdout = StubOutput(
+ [
+ '{"jsonrpc":"2.0","id":1,"result":{"protocolVersion":"2024-11-05","capabilities":{},"serverInfo":{"name":"stub","version":"1.0.0"}}}\n'
+ ]
+ )
+ self.stderr = StubOutput([])
- def communicate(self, timeout: int):
- return "{}", ""
+ def wait(self, timeout: int):
+ return 0
def fake_popen(*args, **kwargs):
nonlocal captured_env
@@ -92,6 +124,8 @@ class StubProcess:
def __init__(self):
self.stdin = StubStdin()
+ self.stdout = None
+ self.stderr = None
def kill(self):
nonlocal killed
@@ -114,3 +148,60 @@ def test_doctor_report_filters_component():
report = build_doctor_report(FIXTURES / "good-plugin", "manifest")
assert report["component"] == "manifest"
assert isinstance(report["cases"], list)
+
+
+def test_verify_plugin_performs_mcp_initialize_lifecycle(tmp_path: Path):
+ script = """
+import json
+import sys
+
+init = json.loads(sys.stdin.readline())
+assert init["method"] == "initialize"
+assert init["params"]["protocolVersion"]
+assert init["params"]["clientInfo"]["name"] == "codex-plugin-scanner"
+sys.stdout.write(json.dumps({
+ "jsonrpc": "2.0",
+ "id": init["id"],
+ "result": {
+ "protocolVersion": init["params"]["protocolVersion"],
+ "capabilities": {"tools": {}, "resources": {}, "prompts": {}},
+ "serverInfo": {"name": "stub", "version": "1.0.0"}
+ }
+}) + "\\n")
+sys.stdout.flush()
+
+initialized = json.loads(sys.stdin.readline())
+assert initialized["method"] == "notifications/initialized"
+
+for method, key in (("tools/list", "tools"), ("resources/list", "resources"), ("prompts/list", "prompts")):
+ request = json.loads(sys.stdin.readline())
+ assert request["method"] == method
+ sys.stdout.write(json.dumps({"jsonrpc": "2.0", "id": request["id"], "result": {key: []}}) + "\\n")
+ sys.stdout.flush()
+"""
+ (tmp_path / ".codex-plugin").mkdir()
+ (tmp_path / ".codex-plugin" / "plugin.json").write_text(
+ '{"name":"mcp-demo","version":"1.0.0","description":"demo"}',
+ encoding="utf-8",
+ )
+ (tmp_path / ".mcp.json").write_text(
+ json.dumps(
+ {
+ "mcpServers": {
+ "stub": {
+ "command": sys.executable,
+ "args": ["-u", "-c", script],
+ }
+ }
+ }
+ ),
+ encoding="utf-8",
+ )
+
+ result = verify_plugin(tmp_path)
+ report = build_doctor_report(tmp_path, "mcp")
+
+ assert result.verify_pass is True
+ assert any(case.name == "stdio initialize:stub" and case.passed for case in result.cases)
+ assert "notifications/initialized" in report["stdout_log"]
+ assert "tools/list" in report["stdout_log"]
From 879d2a415f4a0f562e73f9b6493ff1a87dc1cf06 Mon Sep 17 00:00:00 2001
From: Michael Kantor <6068672+kantorcodes@users.noreply.github.com>
Date: Thu, 2 Apr 2026 21:17:03 -0700
Subject: [PATCH 2/2] fix: harden mcp handshake reads
Signed-off-by: Michael Kantor <6068672+kantorcodes@users.noreply.github.com>
---
src/codex_plugin_scanner/verification.py | 48 ++++++++++++++++++++----
tests/test_verification.py | 6 +++
2 files changed, 46 insertions(+), 8 deletions(-)
diff --git a/src/codex_plugin_scanner/verification.py b/src/codex_plugin_scanner/verification.py
index 65e3fe9..9a7604e 100644
--- a/src/codex_plugin_scanner/verification.py
+++ b/src/codex_plugin_scanner/verification.py
@@ -4,14 +4,18 @@
import json
import os
+import queue
import re
import subprocess
+import threading
import urllib.error
import urllib.parse
import urllib.request
+from contextlib import suppress
from dataclasses import dataclass
from pathlib import Path
+from . import __version__
from .checks.manifest import load_manifest
from .checks.manifest_support import safe_manifest_path
from .marketplace_support import (
@@ -70,6 +74,26 @@ def _is_safe_relative_asset(plugin_dir: Path, value: str) -> bool:
return is_safe_relative_path(plugin_dir, value, require_prefix=True, require_exists=True)
+def _readline_with_timeout(stream, *, timeout: float, command: list[str], transcript: list[str]) -> str:
+ result_queue: queue.Queue[str | BaseException] = queue.Queue(maxsize=1)
+
+ def _reader() -> None:
+ try:
+ result_queue.put(stream.readline())
+ except BaseException as exc: # pragma: no cover - defensive worker handoff
+ result_queue.put(exc)
+
+ thread = threading.Thread(target=_reader, daemon=True)
+ thread.start()
+ try:
+ result = result_queue.get(timeout=timeout)
+ except queue.Empty as exc:
+ raise subprocess.TimeoutExpired(command, timeout, output="\n".join(transcript)) from exc
+ if isinstance(result, BaseException):
+ raise result
+ return result
+
+
def _check_manifest(plugin_dir: Path) -> list[VerificationCase]:
manifest_path = plugin_dir / ".codex-plugin" / "plugin.json"
if not manifest_path.exists():
@@ -449,14 +473,19 @@ def _check_mcp_stdio(servers: dict) -> tuple[list[VerificationCase], list[Runtim
"params": {
"protocolVersion": "2024-11-05",
"capabilities": {"tools": {}, "resources": {}, "prompts": {}},
- "clientInfo": {"name": "codex-plugin-scanner", "version": "1.4.0"},
+ "clientInfo": {"name": "codex-plugin-scanner", "version": __version__},
},
}
proc.stdin.write(json.dumps(initialize_request) + "\n")
proc.stdin.flush()
transcript.append("> " + json.dumps(initialize_request))
- initialize_response_line = proc.stdout.readline()
+ initialize_response_line = _readline_with_timeout(
+ proc.stdout,
+ timeout=2,
+ command=command,
+ transcript=transcript,
+ )
if not initialize_response_line:
raise RuntimeError("server did not respond to initialize")
transcript.append("< " + initialize_response_line.strip())
@@ -486,7 +515,12 @@ def _check_mcp_stdio(servers: dict) -> tuple[list[VerificationCase], list[Runtim
proc.stdin.write(json.dumps(request) + "\n")
proc.stdin.flush()
transcript.append("> " + json.dumps(request))
- response_line = proc.stdout.readline()
+ response_line = _readline_with_timeout(
+ proc.stdout,
+ timeout=2,
+ command=command,
+ transcript=transcript,
+ )
if not response_line:
raise RuntimeError(f"server did not respond to {method}")
transcript.append("< " + response_line.strip())
@@ -548,12 +582,10 @@ def _check_mcp_stdio(servers: dict) -> tuple[list[VerificationCase], list[Runtim
)
cases.append(VerificationCase("mcp", f"stdio timeout:{name}", False, "process timed out", "timeout"))
except Exception as exc:
- poll = getattr(proc, "poll", None)
- wait = getattr(proc, "wait", None)
- if not callable(poll) or poll() is None:
+ if proc.poll() is None:
proc.kill()
- if callable(wait):
- wait(timeout=1)
+ with suppress(Exception):
+ proc.wait(timeout=1)
stdout = proc.stdout.read() if proc.stdout is not None else ""
stderr = proc.stderr.read() if proc.stderr is not None else ""
transcript_output = "\n".join(transcript)
diff --git a/tests/test_verification.py b/tests/test_verification.py
index b3a393c..10fdcd5 100644
--- a/tests/test_verification.py
+++ b/tests/test_verification.py
@@ -131,6 +131,12 @@ def kill(self):
nonlocal killed
killed = True
+ def poll(self):
+ return None
+
+ def wait(self, timeout=None):
+ return None
+
monkeypatch.setattr(verification_module.subprocess, "Popen", lambda *args, **kwargs: StubProcess())
(tmp_path / ".mcp.json").write_text(
'{"mcpServers":{"demo":{"command":"python","args":["-c","print(1)"]}}}',