From 1aca6d2a40910944bc9935f1ba00016a7152c904 Mon Sep 17 00:00:00 2001 From: Lukasz Juranek Date: Fri, 27 Feb 2026 19:57:21 +0100 Subject: [PATCH 1/4] Add sbom generation tooling (#2232) Co-Authored-By: Claude Sonnet 4.6 --- .bazelrc | 5 + .../workflows/sbom_dependency_submission.yml | 111 +++ .github/workflows/tests.yml | 3 + MODULE.bazel | 11 +- README.md | 8 +- bazel/rules/rules_score/BUILD | 53 ++ bazel/rules/rules_score/docs/index.rst | 444 +++++++++ bazel/rules/rules_score/private/BUILD | 0 .../private/architectural_design.bzl | 147 +++ .../private/assumptions_of_use.bzl | 154 ++++ bazel/rules/rules_score/private/component.bzl | 150 +++ .../private/component_requirements.bzl | 128 +++ .../private/dependability_analysis.bzl | 183 ++++ .../private/dependable_element.bzl | 789 ++++++++++++++++ .../private/feature_requirements.bzl | 119 +++ .../rules_score/private/safety_analysis.bzl | 175 ++++ .../rules_score/private/sphinx_module.bzl | 302 ++++++ bazel/rules/rules_score/private/unit.bzl | 157 ++++ bazel/rules/rules_score/providers.bzl | 57 ++ bazel/rules/rules_score/rules_score.bzl | 62 ++ .../rules_score/src/sphinx_html_merge.py | 191 ++++ bazel/rules/rules_score/src/sphinx_wrapper.py | 262 ++++++ .../rules_score/templates/conf.template.py | 207 +++++ .../templates/seooc_index.template.rst | 76 ++ bazel/rules/rules_score/test/BUILD | 389 ++++++++ .../rules_score/test/fixtures/mock_lib1.cc | 4 + .../rules_score/test/fixtures/mock_lib2.cc | 4 + .../rules_score/test/fixtures/mock_test.sh | 18 + .../test/fixtures/module_a/index.rst | 31 + .../test/fixtures/module_b/index.rst | 37 + .../test/fixtures/module_c/index.rst | 29 + .../seooc_test/architectural_design.rst | 174 ++++ .../seooc_test/assumptions_of_use.rst | 80 ++ .../seooc_test/component_requirements.rst | 105 +++ .../seooc_test/dependability_analysis.rst | 292 ++++++ .../test/fixtures/seooc_test/dfa.rst | 149 +++ .../seooc_test/dynamic_architecture.rst | 66 ++ .../seooc_test/feature_requirements.rst | 48 + .../seooc_test/static_architecture.rst | 45 + .../test/fixtures/test_component_main.cc | 13 + .../test/fixtures/test_unit_test.cc | 25 + .../rules_score/test/html_generation_test.bzl | 223 +++++ .../test/score_module_providers_test.bzl | 323 +++++++ bazel/rules/rules_score/test/seooc_test.bzl | 135 +++ .../rules_score/test/unit_component_test.bzl | 175 ++++ coverage/README.md | 20 + coverage/ferrocene_report.sh | 134 ++- sbom/BUILD.bazel | 33 + sbom/SBOM_Readme.md | 340 +++++++ sbom/cpp_metadata.json | 1 + sbom/crates_metadata.json | 806 ++++++++++++++++ sbom/defs.bzl | 136 +++ .../requirements/component_requirements.rst | 88 ++ .../requirements/feature_requirements.rst | 92 ++ sbom/extensions.bzl | 454 +++++++++ sbom/internal/BUILD | 24 + sbom/internal/__init__.py | 1 + sbom/internal/aspect.bzl | 115 +++ sbom/internal/generator/BUILD | 38 + sbom/internal/generator/__init__.py | 1 + .../internal/generator/cyclonedx_formatter.py | 376 ++++++++ sbom/internal/generator/sbom_generator.py | 868 ++++++++++++++++++ sbom/internal/generator/spdx_formatter.py | 242 +++++ sbom/internal/metadata_rule.bzl | 49 + sbom/internal/providers.bzl | 28 + sbom/internal/rules.bzl | 286 ++++++ sbom/npm_wrapper.sh | 17 + sbom/scripts/BUILD.bazel | 5 + sbom/scripts/generate_cpp_metadata_cache.py | 116 +++ .../scripts/generate_crates_metadata_cache.py | 540 +++++++++++ sbom/scripts/spdx_to_github_snapshot.py | 230 +++++ sbom/tests/BUILD | 32 + sbom/tests/__init__.py | 1 + sbom/tests/test_bcr_known_licenses.py | 250 +++++ sbom/tests/test_cpp_enrich_checksum.py | 156 ++++ sbom/tests/test_cyclonedx_formatter.py | 199 ++++ .../test_generate_crates_metadata_cache.py | 398 ++++++++ sbom/tests/test_spdx_formatter.py | 191 ++++ sbom/tests/test_spdx_to_github_snapshot.py | 189 ++++ 79 files changed, 12585 insertions(+), 30 deletions(-) create mode 100644 .github/workflows/sbom_dependency_submission.yml create mode 100644 bazel/rules/rules_score/BUILD create mode 100644 bazel/rules/rules_score/docs/index.rst create mode 100644 bazel/rules/rules_score/private/BUILD create mode 100644 bazel/rules/rules_score/private/architectural_design.bzl create mode 100644 bazel/rules/rules_score/private/assumptions_of_use.bzl create mode 100644 bazel/rules/rules_score/private/component.bzl create mode 100644 bazel/rules/rules_score/private/component_requirements.bzl create mode 100644 bazel/rules/rules_score/private/dependability_analysis.bzl create mode 100644 bazel/rules/rules_score/private/dependable_element.bzl create mode 100644 bazel/rules/rules_score/private/feature_requirements.bzl create mode 100644 bazel/rules/rules_score/private/safety_analysis.bzl create mode 100644 bazel/rules/rules_score/private/sphinx_module.bzl create mode 100644 bazel/rules/rules_score/private/unit.bzl create mode 100644 bazel/rules/rules_score/providers.bzl create mode 100644 bazel/rules/rules_score/rules_score.bzl create mode 100644 bazel/rules/rules_score/src/sphinx_html_merge.py create mode 100644 bazel/rules/rules_score/src/sphinx_wrapper.py create mode 100644 bazel/rules/rules_score/templates/conf.template.py create mode 100644 bazel/rules/rules_score/templates/seooc_index.template.rst create mode 100644 bazel/rules/rules_score/test/BUILD create mode 100644 bazel/rules/rules_score/test/fixtures/mock_lib1.cc create mode 100644 bazel/rules/rules_score/test/fixtures/mock_lib2.cc create mode 100755 bazel/rules/rules_score/test/fixtures/mock_test.sh create mode 100644 bazel/rules/rules_score/test/fixtures/module_a/index.rst create mode 100644 bazel/rules/rules_score/test/fixtures/module_b/index.rst create mode 100644 bazel/rules/rules_score/test/fixtures/module_c/index.rst create mode 100644 bazel/rules/rules_score/test/fixtures/seooc_test/architectural_design.rst create mode 100644 bazel/rules/rules_score/test/fixtures/seooc_test/assumptions_of_use.rst create mode 100644 bazel/rules/rules_score/test/fixtures/seooc_test/component_requirements.rst create mode 100644 bazel/rules/rules_score/test/fixtures/seooc_test/dependability_analysis.rst create mode 100644 bazel/rules/rules_score/test/fixtures/seooc_test/dfa.rst create mode 100644 bazel/rules/rules_score/test/fixtures/seooc_test/dynamic_architecture.rst create mode 100644 bazel/rules/rules_score/test/fixtures/seooc_test/feature_requirements.rst create mode 100644 bazel/rules/rules_score/test/fixtures/seooc_test/static_architecture.rst create mode 100644 bazel/rules/rules_score/test/fixtures/test_component_main.cc create mode 100644 bazel/rules/rules_score/test/fixtures/test_unit_test.cc create mode 100644 bazel/rules/rules_score/test/html_generation_test.bzl create mode 100644 bazel/rules/rules_score/test/score_module_providers_test.bzl create mode 100644 bazel/rules/rules_score/test/seooc_test.bzl create mode 100644 bazel/rules/rules_score/test/unit_component_test.bzl create mode 100644 sbom/BUILD.bazel create mode 100644 sbom/SBOM_Readme.md create mode 100644 sbom/cpp_metadata.json create mode 100644 sbom/crates_metadata.json create mode 100644 sbom/defs.bzl create mode 100644 sbom/docs/requirements/component_requirements.rst create mode 100644 sbom/docs/requirements/feature_requirements.rst create mode 100644 sbom/extensions.bzl create mode 100644 sbom/internal/BUILD create mode 100644 sbom/internal/__init__.py create mode 100644 sbom/internal/aspect.bzl create mode 100644 sbom/internal/generator/BUILD create mode 100644 sbom/internal/generator/__init__.py create mode 100644 sbom/internal/generator/cyclonedx_formatter.py create mode 100644 sbom/internal/generator/sbom_generator.py create mode 100644 sbom/internal/generator/spdx_formatter.py create mode 100644 sbom/internal/metadata_rule.bzl create mode 100644 sbom/internal/providers.bzl create mode 100644 sbom/internal/rules.bzl create mode 100755 sbom/npm_wrapper.sh create mode 100644 sbom/scripts/BUILD.bazel create mode 100644 sbom/scripts/generate_cpp_metadata_cache.py create mode 100755 sbom/scripts/generate_crates_metadata_cache.py create mode 100644 sbom/scripts/spdx_to_github_snapshot.py create mode 100644 sbom/tests/BUILD create mode 100644 sbom/tests/__init__.py create mode 100644 sbom/tests/test_bcr_known_licenses.py create mode 100644 sbom/tests/test_cpp_enrich_checksum.py create mode 100644 sbom/tests/test_cyclonedx_formatter.py create mode 100644 sbom/tests/test_generate_crates_metadata_cache.py create mode 100644 sbom/tests/test_spdx_formatter.py create mode 100644 sbom/tests/test_spdx_to_github_snapshot.py diff --git a/.bazelrc b/.bazelrc index 0823a01..3a6ddac 100644 --- a/.bazelrc +++ b/.bazelrc @@ -1,2 +1,7 @@ common --registry=https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/ common --registry=https://bcr.bazel.build + +build --java_language_version=17 +build --tool_java_language_version=17 +build --java_runtime_version=remotejdk_17 +build --tool_java_runtime_version=remotejdk_17 diff --git a/.github/workflows/sbom_dependency_submission.yml b/.github/workflows/sbom_dependency_submission.yml new file mode 100644 index 0000000..0330998 --- /dev/null +++ b/.github/workflows/sbom_dependency_submission.yml @@ -0,0 +1,111 @@ +name: SBOM Dependency Submission +# Submit SBOM-derived dependency snapshot to GitHub Dependency Graph, +# enabling Dependabot vulnerability alerts for SBOM-declared packages. +# +# Requirements (configured by org/repo admin): +# - Dependency Graph must be enabled in repo Settings → Code security +# - Write permission on contents (for dependency-graph/snapshots) +# +# GitHub Dependency Submission API: +# https://docs.github.com/en/rest/dependency-graph/dependency-submission + +on: + workflow_call: + inputs: + sbom_target: + description: 'Bazel SBOM target to build (e.g. //:sbom_all)' + required: false + type: string + default: '//:sbom_all' + release_tag: + description: 'Version tag for the SBOM component_version' + required: false + type: string + default: 'dev' + +jobs: + sbom-dependency-submission: + name: Build SBOM and submit to Dependency Graph + runs-on: ubuntu-24.04 + permissions: + contents: write # Required for dependency-graph/snapshots API + + steps: + - name: Checkout + uses: actions/checkout@v4.2.2 + + - name: Setup Bazel + uses: bazel-contrib/setup-bazel@0.15.0 + with: + disk-cache: true + repository-cache: true + bazelisk-cache: true + + - name: Build SBOM + run: | + bazel build ${{ inputs.sbom_target }} \ + --define=component_version=${{ inputs.release_tag }} + + - name: Collect SPDX outputs + run: | + mkdir -p sbom_output + find bazel-bin -name "*.spdx.json" -exec cp {} sbom_output/ \; + echo "SBOM files collected:" + ls -lh sbom_output/ || echo "(none)" + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Convert SPDX → GitHub Dependency Snapshot + run: | + mkdir -p snapshots + for spdx_file in sbom_output/*.spdx.json; do + [ -f "$spdx_file" ] || continue + base=$(basename "$spdx_file" .spdx.json) + correlator="${{ github.workflow }}_${base}" + echo "Converting $spdx_file (correlator: $correlator)" + python3 sbom/scripts/spdx_to_github_snapshot.py \ + --input "$spdx_file" \ + --output "snapshots/${base}_snapshot.json" \ + --sha "${{ github.sha }}" \ + --ref "${{ github.ref }}" \ + --job-correlator "$correlator" \ + --job-id "${{ github.run_id }}" + done + + - name: Submit snapshots to GitHub Dependency Graph + env: + GH_TOKEN: ${{ github.token }} + run: | + repo="${{ github.repository }}" + submitted=0 + failed=0 + for snapshot_file in snapshots/*_snapshot.json; do + [ -f "$snapshot_file" ] || continue + echo "Submitting $snapshot_file to $repo ..." + http_code=$(gh api \ + "repos/${repo}/dependency-graph/snapshots" \ + --method POST \ + --input "$snapshot_file" \ + --jq '.message // "submitted"' \ + 2>&1) && { + echo " OK: $http_code" + submitted=$((submitted + 1)) + } || { + echo " FAILED: $http_code" + failed=$((failed + 1)) + } + done + echo "---" + echo "Submitted: $submitted, Failed: $failed" + [ "$failed" -eq 0 ] || exit 1 + + - name: Upload snapshot artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: dependency-snapshots-${{ inputs.release_tag }} + path: snapshots/ + retention-days: 30 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7354275..b43c3ec 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -23,3 +23,6 @@ jobs: - name: Run coverage module tests run: | bazel test //coverage/tests:all + - name: Run rules_score tests + run: | + bazel test //bazel/rules/rules_score/... diff --git a/MODULE.bazel b/MODULE.bazel index 2f5d939..e6c032d 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -13,7 +13,7 @@ module( name = "score_tooling", - version = "1.1.0", + version = "0.0.0", compatibility_level = 1, ) @@ -28,9 +28,7 @@ bazel_dep(name = "rules_java", version = "8.15.1") bazel_dep(name = "rules_rust", version = "0.61.0") bazel_dep(name = "rules_multitool", version = "1.9.0") bazel_dep(name = "score_rust_policies", version = "0.0.2") - -bazel_dep(name = "bazel_skylib", version = "1.7.1", dev_dependency = True) - +bazel_dep(name = "bazel_skylib", version = "1.7.1") bazel_dep(name = "buildifier_prebuilt", version = "8.2.0.2") ############################################################################### @@ -95,3 +93,8 @@ multitool.hub( lockfile = "tools/yamlfmt.lock.json", ) use_repo(multitool, "yamlfmt_hub") + +bazel_dep(name = "score_docs_as_code", version = "3.0.1", dev_dependency = True) + +# bazel_dep(name = "score_platform", version = "0.5.0") +bazel_dep(name = "score_process", version = "1.3.2") diff --git a/README.md b/README.md index bded050..571b805 100644 --- a/README.md +++ b/README.md @@ -58,14 +58,15 @@ bazel run //:rust_coverage -- --min-line-coverage 80 ## Upgrading from separate MODULES -If you are still using separate module imports and want to upgrade to the new version. +If you are still using separate module imports and want to upgrade to the new version. Here are two examples to showcase how to do this. ``` load("@score_python_basics//:defs.bzl", "score_py_pytest") => load("@score_tooling//:defs.bzl", "score_py_pytest") load("@score_cr_checker//:cr_checker.bzl", "copyright_checker") => load("@score_tooling//:defs.bzl", "copyright_checker") ``` -All things inside of 'tooling' can now be imported from `@score_tooling//:defs.bzl`. + +All things inside of 'tooling' can now be imported from `@score_tooling//:defs.bzl`. The available import targets are: - score_virtualenv @@ -78,6 +79,7 @@ The available import targets are: - rust_coverage_report ## Format the tooling repository -```bash + +```bash bazel run //:format.fix ``` diff --git a/bazel/rules/rules_score/BUILD b/bazel/rules/rules_score/BUILD new file mode 100644 index 0000000..6b53f2f --- /dev/null +++ b/bazel/rules/rules_score/BUILD @@ -0,0 +1,53 @@ +load( + "//bazel/rules/rules_score:rules_score.bzl", + "sphinx_module", +) + +exports_files([ + "templates/conf.template.py", + "templates/seooc_index.template.rst", + "templates/unit.template.rst", + "templates/component.template.rst", +]) + +# HTML merge tool +py_binary( + name = "sphinx_html_merge", + srcs = ["src/sphinx_html_merge.py"], + main = "src/sphinx_html_merge.py", + visibility = ["//visibility:public"], +) + +# Sphinx build binary with all required dependencies +py_binary( + name = "score_build", + srcs = ["src/sphinx_wrapper.py"], + data = [], + env = { + "SOURCE_DIRECTORY": "", + "DATA": "", + "ACTION": "check", + }, + main = "src/sphinx_wrapper.py", + visibility = ["//visibility:public"], + deps = [ + "@score_docs_as_code//src:plantuml_for_python", + "@score_docs_as_code//src/extensions/score_sphinx_bundle", + ], +) + +sphinx_module( + name = "rules_score_doc", + srcs = glob( + [ + "docs/**/*.rst", + "docs/**/*.puml", + ], + allow_empty = True, + ), + index = "docs/index.rst", + visibility = ["//visibility:public"], + deps = [ + "@score_process//:score_process_module", + ], +) diff --git a/bazel/rules/rules_score/docs/index.rst b/bazel/rules/rules_score/docs/index.rst new file mode 100644 index 0000000..79769a4 --- /dev/null +++ b/bazel/rules/rules_score/docs/index.rst @@ -0,0 +1,444 @@ +SCORE Rules for Bazel +===================== + +This package provides Bazel build rules for defining and building SCORE documentation modules with integrated Sphinx-based HTML generation. + +.. contents:: Table of Contents + :depth: 2 + :local: + + +Overview +-------- + +The ``rules_score`` package provides Bazel rules for structuring and documenting safety-critical software following S-CORE process guidelines: + +**Documentation Rule:** + +- ``sphinx_module``: Generic rule for building Sphinx HTML documentation with dependency support + +**Artifact Rules:** + +- ``feature_requirements``: High-level feature specifications +- ``component_requirements``: Component-level requirements +- ``assumptions_of_use``: Safety-relevant operating conditions +- ``architectural_design``: Software architecture documentation +- ``safety_analysis``: Detailed safety analysis (FMEA, FTA) +- ``dependability_analysis``: Comprehensive safety analysis results + +**Structural Rules:** + +- ``unit``: Smallest testable software element (design + implementation + tests) +- ``component``: Collection of units providing specific functionality +- ``dependable_element``: Complete Safety Element out of Context (SEooC) with full documentation + +All rules support cross-module dependencies for automatic sphinx-needs integration and HTML merging. + + +sphinx_module +------------- + +Builds Sphinx-based HTML documentation from RST source files with support for dependencies and cross-referencing. + +.. code-block:: python + + sphinx_module( + name = "my_docs", + srcs = glob(["docs/**/*.rst"]), + index = "docs/index.rst", + deps = ["@external_module//:docs"], + ) + +**Key Parameters:** + +- ``srcs``: RST/MD source files +- ``index``: Main index.rst file +- ``deps``: Other sphinx_module or dependable_element targets for cross-referencing +- ``sphinx``: Sphinx build binary (default: ``//bazel/rules/rules_score:score_build``) + +**Output:** ``/html/`` with merged dependency documentation + + +Artifact Rules +-------------- + +Artifact rules define S-CORE process work products. All provide ``SphinxSourcesInfo`` for documentation generation. + +**feature_requirements** + +.. code-block:: python + + feature_requirements( + name = "features", + srcs = ["docs/features.rst"], + ) + +**component_requirements** + +.. code-block:: python + + component_requirements( + name = "requirements", + srcs = ["docs/requirements.rst"], + ) + +**assumptions_of_use** + +.. code-block:: python + + assumptions_of_use( + name = "aous", + srcs = ["docs/assumptions.rst"], + ) + +**architectural_design** + +.. code-block:: python + + architectural_design( + name = "architecture", + static = ["docs/static_arch.rst"], + dynamic = ["docs/dynamic_arch.rst"], + ) + +**safety_analysis** + +.. code-block:: python + + safety_analysis( + name = "safety", + controlmeasures = ["docs/controls.rst"], + failuremodes = ["docs/failures.rst"], + fta = ["docs/fta.rst"], + arch_design = ":architecture", + ) + +**dependability_analysis** + +.. code-block:: python + + dependability_analysis( + name = "analysis", + arch_design = ":architecture", + dfa = ["docs/dfa.rst"], + safety_analysis = [":safety"], + ) + + +Structural Rules +---------------- + +**unit** + +Define the smallest testable software element. + +.. code-block:: python + + unit( + name = "my_unit", + unit_design = [":architecture"], + implementation = ["//src:lib"], + tests = ["//tests:unit_test"], + ) + +**component** + +Define a collection of units. + +.. code-block:: python + + component( + name = "my_component", + component_requirements = [":requirements"], + units = [":my_unit"], + implementation = ["//src:binary"], + tests = ["//tests:integration_test"], + ) + +**dependable_element** + +Define a complete SEooC with automatic documentation generation. + +.. code-block:: python + + dependable_element( + name = "my_seooc", + description = "My safety-critical component", + assumptions_of_use = [":aous"], + requirements = [":requirements"], + architectural_design = [":architecture"], + dependability_analysis = [":analysis"], + components = [":my_component"], + tests = ["//tests:system_test"], + deps = ["@platform//:platform_module"], + ) + +**Generated Targets:** + +- ````: Sphinx module with HTML documentation +- ``_needs``: Sphinx-needs JSON for cross-referencing +- ``_index``: Generated index.rst with artifact structure + + srcs = glob(["docs/**/*.rst"]), + index = "docs/index.rst", + deps = ["@external_module//:docs"], + ) + +**Key Parameters:** + +- ``srcs``: RST/MD source files +- ``index``: Main index.rst file +- ``deps``: Other sphinx_module or dependable_element targets for cross-referencing +- ``sphinx``: Sphinx build binary (default: ``//bazel/rules/rules_score:score_build``) + +**Output:** ``/html/`` with merged dependency documentation + + +Artifact Rules +-------------- + +Artifact rules define S-CORE process work products. All provide ``SphinxSourcesInfo`` for documentation generation. + +**feature_requirements** + +.. code-block:: python + + feature_requirements( + name = "features", + srcs = ["docs/features.rst"], + ) + +**component_requirements** + +.. code-block:: python + + component_requirements( + name = "requirements", + srcs = ["docs/requirements.rst"], + feature_requirement = [":features"], + ) + +**assumptions_of_use** + +.. code-block:: python + + assumptions_of_use( + name = "aous", + srcs = ["docs/assumptions.rst"], + ) + +**architectural_design** + +.. code-block:: python + + architectural_design( + name = "architecture", + static = ["docs/static_arch.rst"], + dynamic = ["docs/dynamic_arch.rst"], + ) + +**safety_analysis** + +.. code-block:: python + + safety_analysis( + name = "safety", + controlmeasures = ["docs/controls.rst"], + failuremodes = ["docs/failures.rst"], + fta = ["docs/fta.rst"], + arch_design = ":architecture", + ) + +**dependability_analysis** + +.. code-block:: python + + dependability_analysis( + name = "analysis", + arch_design = ":architecture", + dfa = ["docs/dfa.rst"], + safety_analysis = [":safety"], + ) + + +Structural Rules +---------------- + +**unit** + +Define the smallest testable software element. + +.. code-block:: python + + unit( + name = "my_unit", + unit_design = [":architecture"], + implementation = ["//src:lib"], + tests = ["//tests:unit_test"], + ) + +**component** + +Define a collection of units. + +.. code-block:: python + + component( + name = "my_component", + component_requirements = [":requirements"], + units = [":my_unit"], + implementation = ["//src:binary"], + tests = ["//tests:integration_test"], + ) + +**dependable_element** + +Define a complete SEooC with automatic documentation generation. + +.. code-block:: python + + dependable_element( + name = "my_seooc", + description = "My safety-critical component", + assumptions_of_use = [":aous"], + requirements = [":requirements"], + architectural_design = [":architecture"], + dependability_analysis = [":analysis"], + components = [":my_component"], + tests = ["//tests:system_test"], + deps = ["@platform//:platform_module"], + ) + +**Generated Targets:** + +- ````: Sphinx module with HTML documentation +- ``_needs``: Sphinx-needs JSON for cross-referencing +- ``_index``: Generated index.rst with artifact structure + +**Implementation Details:** + +The macro automatically: + +- Generates an index.rst file with a toctree referencing all provided artifacts +- Creates symlinks to artifact files (assumptions of use, requirements, architecture, safety analysis) for co-location with the generated index +- Delegates to ``sphinx_module`` for actual Sphinx build and HTML generation +- Integrates dependencies for cross-module referencing and HTML merging + +Dependency Management +--------------------- + +Use ``deps`` for cross-module references. HTML is automatically merged: + +.. code-block:: text + + /html/ + ├── index.html # Main documentation + ├── _static/ + ├── dependency1/ # Merged dependency + └── dependency2/ + + +Complete Example +---------------- + +.. code-block:: python + + load("//bazel/rules/rules_score:rules_score.bzl", + "architectural_design", "assumptions_of_use", + "component", "component_requirements", + "dependability_analysis", "dependable_element", + "feature_requirements", "safety_analysis", "unit") + + # Artifacts + feature_requirements(name = "features", srcs = ["docs/features.rst"]) + component_requirements(name = "reqs", srcs = ["docs/reqs.rst"], + feature_requirement = [":features"]) + assumptions_of_use(name = "aous", srcs = ["docs/aous.rst"]) + architectural_design(name = "arch", static = ["docs/arch.rst"], + dynamic = ["docs/dynamic.rst"]) + safety_analysis(name = "safety", arch_design = ":arch") + dependability_analysis(name = "analysis", arch_design = ":arch", + dfa = ["docs/dfa.rst"], + safety_analysis = [":safety"]) + + # Implementation + cc_library(name = "kvs_lib", srcs = ["kvs.cpp"], hdrs = ["kvs.h"]) + cc_test(name = "kvs_test", srcs = ["kvs_test.cpp"], deps = [":kvs_lib"]) + + # Structure + unit(name = "kvs_unit", unit_design = [":arch"], + implementation = [":kvs_lib"], tests = [":kvs_test"]) + component(name = "kvs_component", requirements = [":reqs"], + units = [":kvs_unit"], implementation = [":kvs_lib"], tests = []) + + # SEooC + dependable_element( + name = "persistency_kvs", + description = "Key-Value Store for persistent data storage", + assumptions_of_use = [":aous"], + requirements = [":reqs"], + architectural_design = [":arch"], + dependability_analysis = [":analysis"], + components = [":kvs_component"], + tests = [], + deps = ["@score_process//:score_process_module"], + ) + +Build: + +.. code-block:: bash + + bazel build //:persistency_kvs + # Output: bazel-bin/persistency_kvs/html/ + + # Implementation + cc_library(name = "kvs_lib", srcs = ["kvs.cpp"], hdrs = ["kvs.h"]) + cc_test(name = "kvs_test", srcs = ["kvs_test.cpp"], deps = [":kvs_lib"]) + + # Structure + unit(name = "kvs_unit", unit_design = [":arch"], + implementation = [":kvs_lib"], tests = [":kvs_test"]) + component(name = "kvs_component", component_requirements = [":reqs"], + units = [":kvs_unit"], implementation = [":kvs_lib"], tests = []) + + # SEooC + dependable_element( + name = "persistency_kvs", + description = "Key-Value Store for persistent data storage", + assumptions_of_use = [":aous"], + requirements = [":reqs"], + architectural_design = [":arch"], + dependability_analysis = [":analysis"], + components = [":kvs_component"], + tests = [], + deps = ["@score_process//:score_process_module"], + ) + +Build: + +.. code-block:: bash + + bazel build //:kvs_seooc + # Output: bazel-bin/kvs_seooc/html/ + # Includes merged HTML from score_platform and score_process modules + +Design Rationale +---------------- + +These rules provide a structured approach to documentation by: + +1. **Two-Tier Architecture**: Generic ``sphinx_module`` for flexibility, specialized ``score_component`` for safety-critical work +2. **Dependency Management**: Automatic cross-referencing and HTML merging across modules +3. **Standardization**: SEooC enforces consistent structure for safety documentation +4. **Traceability**: Sphinx-needs integration enables bidirectional traceability +5. **Automation**: Index generation, symlinking, and configuration management are automatic +6. **Build System Integration**: Bazel ensures reproducible, cacheable documentation builds + +Reference Implementation +------------------------ + +See complete examples in the test BUILD file: + +.. literalinclude:: ../test/BUILD + :language: python + :caption: test/BUILD diff --git a/bazel/rules/rules_score/private/BUILD b/bazel/rules/rules_score/private/BUILD new file mode 100644 index 0000000..e69de29 diff --git a/bazel/rules/rules_score/private/architectural_design.bzl b/bazel/rules/rules_score/private/architectural_design.bzl new file mode 100644 index 0000000..ddd8fdd --- /dev/null +++ b/bazel/rules/rules_score/private/architectural_design.bzl @@ -0,0 +1,147 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Architectural Design build rules for S-CORE projects. + +This module provides macros and rules for defining architectural design +documentation following S-CORE process guidelines. Architectural design +documents describe the software architecture including static and dynamic views. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +ArchitecturalDesignInfo = provider( + doc = "Provider for architectural design artifacts", + fields = { + "static": "Depset of static architecture diagram files (e.g., class diagrams, component diagrams)", + "dynamic": "Depset of dynamic architecture diagram files (e.g., sequence diagrams, activity diagrams)", + "name": "Name of the architectural design target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _architectural_design_impl(ctx): + """Implementation for architectural_design rule. + + Collects architectural design artifacts including static and dynamic + diagrams and provides them through the ArchitecturalDesignInfo provider. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and ArchitecturalDesignInfo + """ + static_files = depset(ctx.files.static) + dynamic_files = depset(ctx.files.dynamic) + + # Combine all files for DefaultInfo + all_files = depset( + transitive = [static_files, dynamic_files], + ) + + return [ + DefaultInfo(files = all_files), + ArchitecturalDesignInfo( + static = static_files, + dynamic = dynamic_files, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = all_files, + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_architectural_design = rule( + implementation = _architectural_design_impl, + doc = "Collects architectural design documents and diagrams for S-CORE process compliance", + attrs = { + "static": attr.label_list( + allow_files = [".puml", ".plantuml", ".png", ".svg", ".rst", ".md"], + mandatory = False, + doc = "Static architecture diagrams (class diagrams, component diagrams, etc.)", + ), + "dynamic": attr.label_list( + allow_files = [".puml", ".plantuml", ".png", ".svg", ".rst", ".md"], + mandatory = False, + doc = "Dynamic architecture diagrams (sequence diagrams, activity diagrams, etc.)", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def architectural_design( + name, + static = [], + dynamic = [], + visibility = None): + """Define architectural design following S-CORE process guidelines. + + Architectural design documents describe the software architecture of a + component, including both static and dynamic views. Static views show + the structural organization (classes, components, modules), while dynamic + views show the behavioral aspects (sequences, activities, states). + + Args: + name: The name of the architectural design target. Used as the base + name for all generated targets. + static: Optional list of labels to diagram files (.puml, .plantuml, + .png, .svg) or documentation files (.rst, .md) containing static + architecture views such as class diagrams, component diagrams, + or package diagrams as defined in the S-CORE process. + dynamic: Optional list of labels to diagram files (.puml, .plantuml, + .png, .svg) or documentation files (.rst, .md) containing dynamic + architecture views such as sequence diagrams, activity diagrams, + or state diagrams as defined in the S-CORE process. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main architectural design target providing ArchitecturalDesignInfo + + Example: + ```starlark + architectural_design( + name = "my_architectural_design", + static = [ + "class_diagram.puml", + "component_diagram.puml", + ], + dynamic = [ + "sequence_diagram.puml", + "activity_diagram.puml", + ], + ) + ``` + """ + _architectural_design( + name = name, + static = static, + dynamic = dynamic, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/assumptions_of_use.bzl b/bazel/rules/rules_score/private/assumptions_of_use.bzl new file mode 100644 index 0000000..36c584f --- /dev/null +++ b/bazel/rules/rules_score/private/assumptions_of_use.bzl @@ -0,0 +1,154 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Assumptions of Use build rules for S-CORE projects. + +This module provides macros and rules for defining Assumptions of Use (AoU) +following S-CORE process guidelines. Assumptions of Use define the safety-relevant +operating conditions and constraints for a Safety Element out of Context (SEooC). +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") +load("//bazel/rules/rules_score/private:component_requirements.bzl", "ComponentRequirementsInfo") +load("//bazel/rules/rules_score/private:feature_requirements.bzl", "FeatureRequirementsInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +AssumptionsOfUseInfo = provider( + doc = "Provider for assumptions of use artifacts", + fields = { + "srcs": "Depset of source files containing assumptions of use", + "feature_requirements": "List of FeatureRequirementsInfo providers this AoU traces to", + "name": "Name of the assumptions of use target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _assumptions_of_use_impl(ctx): + """Implementation for assumptions_of_use rule. + + Collects assumptions of use source files and links them to their + parent feature requirements through providers. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and AssumptionsOfUseInfo + """ + srcs = depset(ctx.files.srcs) + + # Collect feature requirements providers + feature_reqs = [] + for feat_req in ctx.attr.feature_requirements: + if FeatureRequirementsInfo in feat_req: + feature_reqs.append(feat_req[FeatureRequirementsInfo]) + + # Collect transitive sphinx sources from feature requirements + transitive = [srcs] + for feat_req in ctx.attr.feature_requirements: + if SphinxSourcesInfo in feat_req: + transitive.append(feat_req[SphinxSourcesInfo].transitive_srcs) + + return [ + DefaultInfo(files = srcs), + AssumptionsOfUseInfo( + srcs = srcs, + feature_requirements = feature_reqs, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = srcs, + transitive_srcs = depset(transitive = transitive), + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_assumptions_of_use = rule( + implementation = _assumptions_of_use_impl, + doc = "Collects Assumptions of Use documents with traceability to feature requirements", + attrs = { + "srcs": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = True, + doc = "Source files containing Assumptions of Use specifications", + ), + "feature_requirements": attr.label_list( + providers = [FeatureRequirementsInfo], + mandatory = False, + doc = "List of feature_requirements targets that these Assumptions of Use trace to", + ), + "component_requirements": attr.label_list( + providers = [ComponentRequirementsInfo], + mandatory = False, + doc = "List of feature_requirements targets that these Assumptions of Use trace to", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def assumptions_of_use( + name, + srcs, + feature_requirement = [], + component_requirements = [], + visibility = None): + """Define Assumptions of Use following S-CORE process guidelines. + + Assumptions of Use (AoU) define the safety-relevant operating conditions + and constraints for a Safety Element out of Context (SEooC). They specify + the conditions under which the component is expected to operate safely + and the responsibilities of the integrator. + + Args: + name: The name of the assumptions of use target. Used as the base + name for all generated targets. + srcs: List of labels to .rst, .md, or .trlc files containing the + Assumptions of Use specifications as defined in the S-CORE + process. + feature_requirement: Optional list of labels to feature_requirements + targets that these Assumptions of Use relate to. Establishes + traceability as defined in the S-CORE process. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main assumptions of use target providing AssumptionsOfUseInfo + + Example: + ```starlark + assumptions_of_use( + name = "my_assumptions_of_use", + srcs = ["assumptions_of_use.rst"], + feature_requirement = [":my_feature_requirements"], + ) + ``` + """ + _assumptions_of_use( + name = name, + srcs = srcs, + feature_requirements = feature_requirement, + component_requirements = component_requirements, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/component.bzl b/bazel/rules/rules_score/private/component.bzl new file mode 100644 index 0000000..4e41b9e --- /dev/null +++ b/bazel/rules/rules_score/private/component.bzl @@ -0,0 +1,150 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Component build rules for S-CORE projects. + +This module provides macros and rules for defining software components +following S-CORE process guidelines. A component consists of multiple units +with associated requirements and tests. +""" + +load("//bazel/rules/rules_score:providers.bzl", "ComponentInfo", "SphinxSourcesInfo") + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _component_impl(ctx): + """Implementation for component rule. + + Collects component requirements, units, and tests and provides them + through the ComponentInfo provider. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and ComponentInfo + """ + + # Collect requirements files from component_requirements targets + requirements_files = [] + for req_target in ctx.attr.requirements: + if SphinxSourcesInfo in req_target: + requirements_files.append(req_target[SphinxSourcesInfo].srcs) + + requirements_depset = depset(transitive = requirements_files) + + # Collect components and tests + components_depset = depset(ctx.attr.components) + tests_depset = depset(ctx.attr.tests) + + # Combine all files for DefaultInfo + all_files = depset( + transitive = [requirements_depset], + ) + + return [ + DefaultInfo(files = all_files), + ComponentInfo( + name = ctx.label.name, + requirements = requirements_depset, + components = components_depset, + tests = tests_depset, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = all_files, + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_component = rule( + implementation = _component_impl, + doc = "Defines a software component composed of multiple units for S-CORE process compliance", + attrs = { + "requirements": attr.label_list( + mandatory = True, + doc = "Component requirements artifacts (typically component_requirements targets)", + ), + "components": attr.label_list( + mandatory = True, + doc = "Unit targets that comprise this component", + ), + "tests": attr.label_list( + mandatory = True, + doc = "Component-level integration test targets", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def component( + name, + units = None, + tests = [], + requirements = None, + components = None, + testonly = True, + visibility = None): + """Define a software component following S-CORE process guidelines. + + A component is a collection of related units that together provide + a specific functionality. It consists of: + - Component requirements: Requirements specification for the component + - Implementation: Concrete libraries/binaries that realize the component + - Units: Individual software units that implement the requirements + - Tests: Integration tests that verify the component as a whole + + Args: + name: The name of the component. Used as the target name. + component_requirements: List of labels to component_requirements targets + that define the requirements for this component. + requirements: Alias for component_requirements (use one or the other). + implementation: List of labels to implementation targets (cc_library, + cc_binary, etc.) that realize this component. + units: List of labels to unit targets that comprise this component. + components: Alias for units (use one or the other). + tests: List of labels to Bazel test targets that verify the component + integration. + testonly: If true, only testonly targets can depend on this component. + visibility: Bazel visibility specification for the component target. + + Example: + ```python + component( + name = "kvs_component", + requirements = [":kvs_component_requirements"], + implementation = [":kvs_lib", ":kvs_tool"], + units = [":kvs_unit1", ":kvs_unit2"], + tests = ["//persistency/kvs/tests:score_kvs_component_integration_tests"], + visibility = ["//visibility:public"], + ) + ``` + """ + + _component( + name = name, + requirements = requirements, + components = components, + tests = tests, + testonly = testonly, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/component_requirements.bzl b/bazel/rules/rules_score/private/component_requirements.bzl new file mode 100644 index 0000000..8735354 --- /dev/null +++ b/bazel/rules/rules_score/private/component_requirements.bzl @@ -0,0 +1,128 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Component Requirements build rules for S-CORE projects. + +This module provides macros and rules for defining component requirements +following S-CORE process guidelines. Component requirements are derived from +feature requirements and define the specific requirements for a software component. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") +load("//bazel/rules/rules_score/private:feature_requirements.bzl", "FeatureRequirementsInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +ComponentRequirementsInfo = provider( + doc = "Provider for component requirements artifacts", + fields = { + "srcs": "Depset of source files containing component requirements", + "requirements": "List of FeatureRequirementsInfo providers this component traces to", + "name": "Name of the component requirements target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _component_requirements_impl(ctx): + """Implementation for component_requirements rule. + + Collects component requirements source files and links them to their + parent feature requirements through providers. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and ComponentRequirementsInfo + """ + srcs = depset(ctx.files.srcs) + + # Collect feature requirements providers + feature_reqs = [] + + # Collect transitive sphinx sources from feature requirements + transitive = [srcs] + + return [ + DefaultInfo(files = srcs), + ComponentRequirementsInfo( + srcs = srcs, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = srcs, + transitive_srcs = depset(transitive = transitive), + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_component_requirements = rule( + implementation = _component_requirements_impl, + doc = "Collects component requirements documents with traceability to feature requirements", + attrs = { + "srcs": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = True, + doc = "Source files containing component requirements specifications", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def component_requirements( + name, + srcs, + visibility = None): + """Define component requirements following S-CORE process guidelines. + + Component requirements are derived from feature requirements and define + the specific functional and safety requirements for a software component. + They establish traceability from high-level features to component-level + specifications. + + Args: + name: The name of the component requirements target. Used as the base + name for all generated targets. + srcs: List of labels to .rst, .md, or .trlc files containing the + component requirements specifications as defined in the S-CORE + process. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main component requirements target providing ComponentRequirementsInfo + + Example: + ```starlark + component_requirements( + name = "my_component_requirements", + srcs = ["component_requirements.rst"], + ) + ``` + """ + _component_requirements( + name = name, + srcs = srcs, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/dependability_analysis.bzl b/bazel/rules/rules_score/private/dependability_analysis.bzl new file mode 100644 index 0000000..0d31738 --- /dev/null +++ b/bazel/rules/rules_score/private/dependability_analysis.bzl @@ -0,0 +1,183 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Dependability Analysis build rules for S-CORE projects. + +This module provides macros and rules for defining dependability analysis +documentation following S-CORE process guidelines. Dependability analysis +combines safety analysis with dependent failure analysis (DFA) to provide +a comprehensive view of component reliability and safety. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") +load("//bazel/rules/rules_score/private:architectural_design.bzl", "ArchitecturalDesignInfo") +load("//bazel/rules/rules_score/private:safety_analysis.bzl", "AnalysisInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +DependabilityAnalysisInfo = provider( + doc = "Provider for dependability analysis artifacts", + fields = { + "safety_analysis": "List of AnalysisInfo providers", + "security_analysis": "List of AnalysisInfo providers", + "arch_design": "ArchitecturalDesignInfo provider for linked architectural design", + "name": "Name of the dependability analysis target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _dependability_analysis_impl(ctx): + """Implementation for dependability_analysis rule. + + Collects dependability analysis artifacts including safety analysis results + and dependent failure analysis, linking them to architectural design. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and DependabilityAnalysisInfo + """ + security_analysis_files = depset(ctx.files.dfa) + safety_analysis_files = depset(ctx.files.fmea) + + # Get architectural design provider if available + arch_design_info = None + if ctx.attr.arch_design and ArchitecturalDesignInfo in ctx.attr.arch_design: + arch_design_info = ctx.attr.arch_design[ArchitecturalDesignInfo] + + # Combine all files for DefaultInfo + all_files = depset(transitive = [security_analysis_files, safety_analysis_files]) + + # Collect transitive sphinx sources from safety analysis and architectural design + transitive = [all_files] + for sa in ctx.attr.security_analysis: + if SphinxSourcesInfo in sa: + transitive.append(sa[SphinxSourcesInfo].transitive_srcs) + if ctx.attr.arch_design and SphinxSourcesInfo in ctx.attr.arch_design: + transitive.append(ctx.attr.arch_design[SphinxSourcesInfo].transitive_srcs) + + return [ + DefaultInfo(files = all_files), + DependabilityAnalysisInfo( + safety_analysis = security_analysis_files, + security_analysis = security_analysis_files, + arch_design = arch_design_info, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = depset(transitive = transitive), + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_dependability_analysis = rule( + implementation = _dependability_analysis_impl, + doc = "Collects dependability analysis documents for S-CORE process compliance", + attrs = { + "security_analysis": attr.label_list( + # TODO: change provider name + providers = [AnalysisInfo], + mandatory = False, + doc = "List of safety_analysis targets containing FMEA, FMEDA, FTA results", + ), + "dfa": attr.label_list( + allow_files = [".rst", ".md"], + mandatory = False, + doc = "Dependent Failure Analysis (DFA) documentation", + ), + "fmea": attr.label_list( + allow_files = [".rst", ".md"], + mandatory = False, + doc = "Failure Mode and Effects Analysis (FMEA) documentation", + ), + "arch_design": attr.label( + providers = [ArchitecturalDesignInfo], + mandatory = False, + doc = "Reference to architectural_design target for traceability", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def dependability_analysis( + name, + safety_analysis = [], + dfa = [], + fmea = [], + arch_design = None, + visibility = None): + """Define dependability analysis following S-CORE process guidelines. + + Dependability analysis provides a comprehensive view of component + reliability and safety by combining safety analysis results with + dependent failure analysis (DFA). It establishes traceability to + the architectural design for complete safety argumentation. + + Args: + name: The name of the dependability analysis target. Used as the base + name for all generated targets. + safety_analysis: Optional list of labels to safety_analysis targets + containing the results of FMEA, FMEDA, FTA, or other safety + analysis methods as defined in the S-CORE process. + dfa: Optional list of labels to .rst or .md files containing + Dependent Failure Analysis (DFA) documentation. DFA identifies + failures that could affect multiple components or functions + as defined in the S-CORE process. + fmea: Optional list of labels to .rst or .md files containing + Failure Mode and Effects Analysis (FMEA) documentation. FMEA + identifies potential failure modes and their effects on the + system as defined in the S-CORE process. + arch_design: Optional label to an architectural_design target for + establishing traceability between dependability analysis and + the software architecture. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main dependability analysis target providing DependabilityAnalysisInfo + + Example: + ```starlark + dependability_analysis( + name = "my_dependability_analysis", + safety_analysis = [":my_safety_analysis"], + dfa = ["dependent_failure_analysis.rst"], + fmea = ["failure_mode_effects_analysis.rst"], + arch_design = ":my_architectural_design", + ) + ``` + """ + _dependability_analysis( + name = name, + # TODO: this needs to be fixed. A security is not a safety_analysis. + # we leave it for now for compatibility reasons until there is alignment on the a + # attributes of a dependability analysis + security_analysis = safety_analysis, + dfa = dfa, + fmea = fmea, + arch_design = arch_design, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/dependable_element.bzl b/bazel/rules/rules_score/private/dependable_element.bzl new file mode 100644 index 0000000..da368f9 --- /dev/null +++ b/bazel/rules/rules_score/private/dependable_element.bzl @@ -0,0 +1,789 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Dependable Element build rules for S-CORE projects. + +This module provides macros and rules for defining dependable elements (Safety +Elements out of Context - SEooC) following S-CORE process guidelines. A dependable +element is a safety-critical component with comprehensive documentation including +assumptions of use, requirements, design, and safety analysis. +""" + +load( + "//bazel/rules/rules_score:providers.bzl", + "ComponentInfo", + "SphinxSourcesInfo", + "UnitInfo", +) +load("//bazel/rules/rules_score/private:sphinx_module.bzl", "sphinx_module") + +# ============================================================================ +# Template Constants +# ============================================================================ + +_UNIT_DESIGN_SECTION_TEMPLATE = """Unit Design +----------- + +.. toctree:: + :maxdepth: 2 + +{design_refs}""" + +_IMPLEMENTATION_SECTION_TEMPLATE = """Implementation +-------------- + +This {entity_type} is implemented by the following targets: + +{implementation_list}""" + +_TESTS_SECTION_TEMPLATE = """Tests +----- + +This {entity_type} is verified by the following test targets: + +{test_list}""" + +_COMPONENT_REQUIREMENTS_SECTION_TEMPLATE = """Component Requirements +---------------------- + +.. toctree:: + :maxdepth: 2 + +{requirements_refs}""" + +_COMPONENT_UNITS_SECTION_TEMPLATE = """Units +----- + +This component is composed of the following units: + +{unit_links}""" + +_UNIT_TEMPLATE = """ + +Unit: {unit_name} +{underline} + +{design_section}{implementation_section}{tests_section}""" + +_COMPONENT_TEMPLATE = """ + +Component: {component_name} +{underline} + +{requirements_section}{units_section}{implementation_section}{tests_section}""" + +# ============================================================================ +# Helper Functions for Documentation Generation +# ============================================================================ + +def _get_sphinx_files(target): + return target[SphinxSourcesInfo].srcs.to_list() + +def _filter_doc_files(files): + """Filter files to only include documentation files. + + Args: + files: List of files to filter + + Returns: + List of documentation files + """ + return [f for f in files if f.extension in ["rst", "md", "puml", "plantuml", "png", "svg"]] + +def _find_common_directory(files): + """Find the longest common directory path for a list of files. + + Args: + files: List of File objects + + Returns: + String representing the common directory path, or empty string if none + """ + if not files: + return "" + + # Get all directory paths + dirs = [f.dirname for f in files] + + if not dirs: + return "" + + # Start with first directory + common = dirs[0] + + # Iterate through all directories to find common prefix + for d in dirs[1:]: + # Find common prefix between common and d + # Split into path components + common_parts = common.split("/") + d_parts = d.split("/") + + # Find matching prefix + new_common_parts = [] + for i in range(min(len(common_parts), len(d_parts))): + if common_parts[i] == d_parts[i]: + new_common_parts.append(common_parts[i]) + else: + break + + common = "/".join(new_common_parts) + + if not common: + break + + return common + +def _compute_relative_path(file, common_dir): + """Compute relative path from common directory to file. + + Args: + file: File object + common_dir: Common directory path string + + Returns: + String containing the relative path + """ + file_dir = file.dirname + + if not common_dir: + return file.basename + + if not file_dir.startswith(common_dir): + return file.basename + + if file_dir == common_dir: + return file.basename + + relative_subdir = file_dir[len(common_dir):].lstrip("/") + return relative_subdir + "/" + file.basename + +def _is_document_file(file): + """Check if file should be included in toctree. + + Args: + file: File object + + Returns: + Boolean indicating if file is a document (.rst or .md) + """ + return file.extension in ["rst", "md"] + +def _create_artifact_symlink(ctx, artifact_name, artifact_file, relative_path): + """Create symlink for artifact file in output directory. + + Args: + ctx: Rule context + artifact_name: Name of artifact type (e.g., "architectural_design") + artifact_file: Source file + relative_path: Relative path within artifact directory + + Returns: + Declared output file + """ + output_file = ctx.actions.declare_file( + ctx.label.name + "/" + artifact_name + "/" + relative_path, + ) + + ctx.actions.symlink( + output = output_file, + target_file = artifact_file, + ) + + return output_file + +def _process_artifact_files(ctx, artifact_name, label): + """Process all files from a single label for a given artifact type. + + Args: + ctx: Rule context + artifact_name: Name of artifact type + label: Label to process + + Returns: + Tuple of (output_files, index_references) + """ + output_files = [] + index_refs = [] + + # Get and filter files + all_files = _get_sphinx_files(label) + doc_files = _filter_doc_files(all_files) + + if not doc_files: + return (output_files, index_refs) + + # Find common directory to preserve hierarchy + common_dir = _find_common_directory(doc_files) + + # Process each file + for artifact_file in doc_files: + # Compute paths + relative_path = _compute_relative_path(artifact_file, common_dir) + + # Create symlink + output_file = _create_artifact_symlink( + ctx, + artifact_name, + artifact_file, + relative_path, + ) + output_files.append(output_file) + + # Add to index if it's a document file + if _is_document_file(artifact_file): + doc_ref = (artifact_name + "/" + relative_path) \ + .replace(".rst", "") \ + .replace(".md", "") + index_refs.append(doc_ref) + + return (output_files, index_refs) + +def _process_artifact_type(ctx, artifact_name): + """Process all labels for a given artifact type. + + Args: + ctx: Rule context + artifact_name: Name of artifact type (e.g., "architectural_design") + + Returns: + Tuple of (output_files, index_references) + """ + output_files = [] + index_refs = [] + + attr_list = getattr(ctx.attr, artifact_name) + if not attr_list: + return (output_files, index_refs) + + # Process each label + for label in attr_list: + label_outputs, label_refs = _process_artifact_files( + ctx, + artifact_name, + label, + ) + output_files.extend(label_outputs) + index_refs.extend(label_refs) + + return (output_files, index_refs) + +def _process_deps(ctx): + """Process deps to generate references to submodule documentation. + + The HTML merger in sphinx_module will copy the HTML directories from deps. + We generate RST bullet list with links to those HTML directories. + + Args: + ctx: Rule context + + Returns: + String containing RST-formatted bullet list of links + """ + if not ctx.attr.deps: + return "" + + # Generate RST bullet list with links to submodule HTML + links = [] + for dep in ctx.attr.deps: + dep_name = dep.label.name + + # Create a link to the index.html that will be merged + # Format: * `Module Name `_ + # Use underscores in name for readability, convert to spaces for display + display_name = dep_name.replace("_", " ").title() + links.append("* `{} <{}/index.html>`_".format(display_name, dep_name)) + + return "\n".join(links) + +def _get_component_names(components): + return [c.label.name for c in components] + +def _collect_units_recursive(components, visited_units = None): + """Iteratively collect all units from components, handling nested components. + + Uses a stack-based approach to avoid Starlark recursion limitations. + + Args: + components: List of component targets + visited_units: Dict of unit names already visited (for deduplication) + + Returns: + Dict mapping unit names to unit targets + """ + if visited_units == None: + visited_units = {} + + # Process components iteratively using a work queue approach + # Since Starlark doesn't support while loops, we use a for loop with a large enough range + # and track our own index + to_process = [] + components + + for _ in range(1000): # Max depth to prevent infinite loops + if not to_process: + break + comp_target = to_process.pop(0) + + # Check if this is a component with ComponentInfo + if ComponentInfo in comp_target: + comp_info = comp_target[ComponentInfo] + + # Process nested components + nested_components = comp_info.components.to_list() + for nested in nested_components: + # Check if nested item is a unit or component + if UnitInfo in nested: + unit_name = nested.label.name + if unit_name not in visited_units: + visited_units[unit_name] = nested + elif ComponentInfo in nested: + # Add nested component to queue for processing + to_process.append(nested) + + # Check if this is directly a unit + elif UnitInfo in comp_target: + unit_name = comp_target.label.name + if unit_name not in visited_units: + visited_units[unit_name] = comp_target + + return visited_units + +def _generate_unit_doc(ctx, unit_target, unit_name): + """Generate RST documentation for a single unit. + + Args: + ctx: Rule context + unit_target: The unit target + unit_name: Name of the unit + + Returns: + Tuple of (rst_file, list_of_output_files) + """ + unit_info = unit_target[UnitInfo] + + # Create RST file for this unit + unit_rst = ctx.actions.declare_file(ctx.label.name + "/units/" + unit_name + ".rst") + + # Collect design files - unit_design depset contains File objects + design_files = [] + design_refs = [] + if unit_info.unit_design: + doc_files = _filter_doc_files(unit_info.unit_design.to_list()) + + if doc_files: + # Find common directory + common_dir = _find_common_directory(doc_files) + + for f in doc_files: + relative_path = _compute_relative_path(f, common_dir) + output_file = _create_artifact_symlink( + ctx, + "units/" + unit_name + "_design", + f, + relative_path, + ) + design_files.append(output_file) + + if _is_document_file(f): + doc_ref = ("units/" + unit_name + "_design/" + relative_path) \ + .replace(".rst", "") \ + .replace(".md", "") + design_refs.append(" " + doc_ref) + + # Collect implementation target names + impl_names = [] + if unit_info.implementation: + for impl in unit_info.implementation.to_list(): + impl_names.append(impl.label) + + # Collect test target names + test_names = [] + if unit_info.tests: + for test in unit_info.tests.to_list(): + test_names.append(test.label) + + # Generate RST content using template + underline = "=" * (len("Unit: " + unit_name)) + + # Generate sections from template constants + design_section = "" + if design_refs: + design_section = "\n" + _UNIT_DESIGN_SECTION_TEMPLATE.format( + design_refs = "\n".join(design_refs), + ) + "\n" + + implementation_section = "" + if impl_names: + impl_list = "\n".join(["- ``" + str(impl) + "``" for impl in impl_names]) + implementation_section = "\n" + _IMPLEMENTATION_SECTION_TEMPLATE.format( + entity_type = "unit", + implementation_list = impl_list, + ) + "\n" + + tests_section = "" + if test_names: + test_list = "\n".join(["- ``" + str(test) + "``" for test in test_names]) + tests_section = "\n" + _TESTS_SECTION_TEMPLATE.format( + entity_type = "unit", + test_list = test_list, + ) + "\n" + + # Generate unit RST content from template constant + unit_content = _UNIT_TEMPLATE.format( + unit_name = unit_name, + underline = underline, + design_section = design_section, + implementation_section = implementation_section, + tests_section = tests_section, + ) + + ctx.actions.write( + output = unit_rst, + content = unit_content, + ) + + return (unit_rst, design_files) + +def _generate_component_doc(ctx, comp_target, comp_name, unit_names): + """Generate RST documentation for a single component. + + Args: + ctx: Rule context + comp_target: The component target + comp_name: Name of the component + unit_names: List of unit names that belong to this component + + Returns: + Tuple of (rst_file, list_of_output_files) + """ + comp_info = comp_target[ComponentInfo] + + # Create RST file for this component + comp_rst = ctx.actions.declare_file(ctx.label.name + "/components/" + comp_name + ".rst") + + # Collect requirements files - requirements depset contains File objects + req_files = [] + req_refs = [] + if comp_info.requirements: + doc_files = _filter_doc_files(comp_info.requirements.to_list()) + + if doc_files: + # Find common directory + common_dir = _find_common_directory(doc_files) + + for f in doc_files: + relative_path = _compute_relative_path(f, common_dir) + output_file = _create_artifact_symlink( + ctx, + "components/" + comp_name + "_requirements", + f, + relative_path, + ) + req_files.append(output_file) + + if _is_document_file(f): + doc_ref = ("components/" + comp_name + "_requirements/" + relative_path) \ + .replace(".rst", "") \ + .replace(".md", "") + req_refs.append(" " + doc_ref) + + # Collect test target names + test_names = [] + if comp_info.tests: + for test in comp_info.tests.to_list(): + test_names.append(test.label) + + # Generate RST content using template + underline = "=" * (len("Component: " + comp_name)) + + # Generate sections from template constants + requirements_section = "" + if req_refs: + requirements_section = "\n" + _COMPONENT_REQUIREMENTS_SECTION_TEMPLATE.format( + requirements_refs = "\n".join(req_refs), + ) + "\n" + + units_section = "" + if unit_names: + unit_links = "\n".join(["- :doc:`../units/" + unit_name + "`" for unit_name in unit_names]) + units_section = "\n" + _COMPONENT_UNITS_SECTION_TEMPLATE.format( + unit_links = unit_links, + ) + "\n" + + tests_section = "" + if test_names: + test_list = "\n".join(["- ``" + str(test) + "``" for test in test_names]) + tests_section = "\n" + _TESTS_SECTION_TEMPLATE.format( + entity_type = "component", + test_list = test_list, + ) + "\n" + + # Generate component RST content from template constant + component_content = _COMPONENT_TEMPLATE.format( + component_name = comp_name, + underline = underline, + requirements_section = requirements_section, + units_section = units_section, + implementation_section = "", + tests_section = tests_section, + ) + + ctx.actions.write( + output = comp_rst, + content = component_content, + ) + + return (comp_rst, req_files) + +# ============================================================================ +# Index Generation Rule Implementation +# ============================================================================ + +def _dependable_element_index_impl(ctx): + """Generate index.rst file with references to all dependable element artifacts. + + This rule creates a Sphinx index.rst file that includes references to all + the documentation artifacts for the dependable element. + + Args: + ctx: Rule context + + Returns: + DefaultInfo provider with generated index.rst file + """ + + # Declare output index file + index_rst = ctx.actions.declare_file(ctx.label.name + "/index.rst") + output_files = [index_rst] + + # Define artifacts + # Note: "requirements" can contain both component_requirements and feature_requirements + artifact_types = [ + "components", + "assumptions_of_use", + "requirements", + "architectural_design", + "dependability_analysis", + "checklists", + ] + + # Process each artifact type + artifacts_by_type = {} + for artifact_name in artifact_types: + files, refs = _process_artifact_type(ctx, artifact_name) + output_files.extend(files) + artifacts_by_type[artifact_name] = refs + + # Collect all units recursively from components + all_units = _collect_units_recursive(ctx.attr.components) + + # Generate documentation for each unit + unit_refs = [] + for unit_name, unit_target in all_units.items(): + unit_rst, unit_files = _generate_unit_doc(ctx, unit_target, unit_name) + output_files.append(unit_rst) + output_files.extend(unit_files) + unit_refs.append(" units/" + unit_name) + + # Generate documentation for each component + component_refs = [] + for comp_target in ctx.attr.components: + if ComponentInfo in comp_target: + comp_info = comp_target[ComponentInfo] + comp_name = comp_info.name + + # Collect units that belong to this component + comp_unit_names = [] + for nested in comp_info.components.to_list(): + if UnitInfo in nested: + comp_unit_names.append(nested.label.name) + elif ComponentInfo in nested: + # For nested components, collect their units recursively + nested_units = _collect_units_recursive([nested]) + comp_unit_names.extend(nested_units.keys()) + + comp_rst, comp_files = _generate_component_doc(ctx, comp_target, comp_name, comp_unit_names) + output_files.append(comp_rst) + output_files.extend(comp_files) + component_refs.append(" components/" + comp_name) + + # Process dependencies (submodules) + deps_links = _process_deps(ctx) + + # Generate index file from template + title = ctx.attr.module_name + underline = "=" * len(title) + + ctx.actions.expand_template( + template = ctx.file.template, + output = index_rst, + substitutions = { + "{title}": title, + "{underline}": underline, + "{description}": ctx.attr.description, + "{units}": "\n".join(unit_refs) if unit_refs else " (none)", + "{components}": "\n".join(component_refs) if component_refs else " (none)", + "{assumptions_of_use}": "\n ".join(artifacts_by_type["assumptions_of_use"]), + "{component_requirements}": "\n ".join(artifacts_by_type["requirements"]), + "{architectural_design}": "\n ".join(artifacts_by_type["architectural_design"]), + "{dependability_analysis}": "\n ".join(artifacts_by_type["dependability_analysis"]), + "{checklists}": "\n ".join(artifacts_by_type["checklists"]), + "{submodules}": deps_links, + }, + ) + + return [ + DefaultInfo(files = depset(output_files)), + ] + +_dependable_element_index = rule( + implementation = _dependable_element_index_impl, + doc = "Generates index.rst file with references to dependable element artifacts", + attrs = { + "module_name": attr.string( + mandatory = True, + doc = "Name of the dependable element module (used as document title)", + ), + "description": attr.string( + mandatory = True, + doc = "Description of the dependable element. Supports RST formatting.", + ), + "assumptions_of_use": attr.label_list( + mandatory = True, + doc = "Assumptions of Use targets or files.", + ), + "requirements": attr.label_list( + mandatory = True, + doc = "Requirements targets (component_requirements, feature_requirements, etc.).", + ), + "architectural_design": attr.label_list( + mandatory = True, + doc = "Architectural design targets or files.", + ), + "dependability_analysis": attr.label_list( + mandatory = True, + doc = "Dependability analysis targets or files.", + ), + "components": attr.label_list( + default = [], + doc = "Safety checklists targets or files.", + ), + "tests": attr.label_list( + default = [], + doc = "Integration tests for the dependable element.", + ), + "checklists": attr.label_list( + default = [], + doc = "Safety checklists targets or files.", + ), + "template": attr.label( + allow_single_file = [".rst"], + mandatory = True, + doc = "Template file for generating index.rst", + ), + "deps": attr.label_list( + default = [], + doc = "Dependencies on other dependable element modules (submodules).", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def dependable_element( + name, + description, + assumptions_of_use, + requirements, + architectural_design, + dependability_analysis, + components, + tests, + checklists = [], + deps = [], + sphinx = Label("@score_tooling//bazel/rules/rules_score:score_build"), + testonly = True, + visibility = None): + """Define a dependable element (Safety Element out of Context - SEooC) following S-CORE process guidelines. + + This macro creates a complete dependable element with integrated documentation + generation. It generates an index.rst file referencing all artifacts and builds + HTML documentation using the sphinx_module infrastructure. + + A dependable element is a safety-critical component that can be developed + independently and integrated into different systems. It includes comprehensive + documentation covering all aspects required for safety certification. + + Args: + name: The name of the dependable element. Used as the base name for + all generated targets. + description: String containing a high-level description of the element. + This text provides context about what the element does and its purpose. + Supports RST formatting. + assumptions_of_use: List of labels to assumptions_of_use targets that + define the safety-relevant operating conditions and constraints. + requirements: List of labels to requirements targets (component_requirements, + feature_requirements, etc.) that define functional and safety requirements. + architectural_design: List of labels to architectural_design targets that + describe the software architecture and design decisions. + dependability_analysis: List of labels to dependability_analysis targets + containing safety analysis results (FMEA, FMEDA, FTA, DFA, etc.). + components: List of labels to component and/or unit targets that implement + this dependable element. + tests: List of labels to Bazel test targets that verify the dependable + element at the system level (integration tests, system tests). + checklists: Optional list of labels to .rst or .md files containing + safety checklists and verification documents. + deps: Optional list of other module targets this element depends on. + Cross-references will work automatically. + sphinx: Label to sphinx build binary. Default: //bazel/rules/rules_score:score_build + testonly: If True, only testonly targets can depend on this target. + visibility: Bazel visibility specification for the dependable element target. + + Generated Targets: + _index: Internal rule that generates index.rst and copies artifacts + : Main dependable element target (sphinx_module) with HTML documentation + _needs: Sphinx-needs JSON target (created by sphinx_module for cross-referencing) + + """ + + # Step 1: Generate index.rst and collect all artifacts + _dependable_element_index( + name = name + "_index", + module_name = name, + description = description, + template = Label("//bazel/rules/rules_score:templates/seooc_index.template.rst"), + assumptions_of_use = assumptions_of_use, + requirements = requirements, + components = components, + architectural_design = architectural_design, + dependability_analysis = dependability_analysis, + checklists = checklists, + tests = tests, + deps = deps, + testonly = testonly, + visibility = ["//visibility:private"], + ) + + # Step 2: Create sphinx_module using generated index and artifacts + sphinx_module( + name = name, + srcs = [":" + name + "_index"], + index = ":" + name + "_index", + deps = deps, + sphinx = sphinx, + testonly = testonly, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/feature_requirements.bzl b/bazel/rules/rules_score/private/feature_requirements.bzl new file mode 100644 index 0000000..fd8dec1 --- /dev/null +++ b/bazel/rules/rules_score/private/feature_requirements.bzl @@ -0,0 +1,119 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Feature Requirements build rules for S-CORE projects. + +This module provides macros and rules for defining feature requirements +following S-CORE process guidelines. Feature requirements describe the +high-level features that a software component must implement. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +FeatureRequirementsInfo = provider( + doc = "Provider for feature requirements artifacts", + fields = { + "srcs": "Depset of source files containing feature requirements", + "name": "Name of the feature requirements target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _feature_requirements_impl(ctx): + """Implementation for feature_requirements rule. + + Collects feature requirements source files and provides them through + the FeatureRequirementsInfo provider. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and FeatureRequirementsInfo + """ + srcs = depset(ctx.files.srcs) + + return [ + DefaultInfo(files = srcs), + FeatureRequirementsInfo( + srcs = srcs, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = srcs, + transitive_srcs = srcs, + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_feature_requirements = rule( + implementation = _feature_requirements_impl, + doc = "Collects feature requirements documents for S-CORE process compliance", + attrs = { + "srcs": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = True, + doc = "Source files containing feature requirements specifications", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def feature_requirements( + name, + srcs, + visibility = None): + """Define feature requirements following S-CORE process guidelines. + + Feature requirements describe the high-level features and capabilities + that a software component must implement. They serve as the top-level + requirements that drive component-level requirements. + + Args: + name: The name of the feature requirements target. Used as the base + name for all generated targets. + srcs: List of labels to .rst, .md, or .trlc files containing the + feature requirements specifications as defined in the S-CORE + process. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main feature requirements target providing FeatureRequirementsInfo + + Example: + ```starlark + feature_requirements( + name = "my_feature_requirements", + srcs = ["feature_requirements.rst"], + ) + ``` + """ + _feature_requirements( + name = name, + srcs = srcs, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/safety_analysis.bzl b/bazel/rules/rules_score/private/safety_analysis.bzl new file mode 100644 index 0000000..9614e25 --- /dev/null +++ b/bazel/rules/rules_score/private/safety_analysis.bzl @@ -0,0 +1,175 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Safety Analysis build rules for S-CORE projects. + +This module provides macros and rules for defining safety analysis documentation +following S-CORE process guidelines. Safety analysis includes failure mode analysis, +control measures, fault tree analysis, and other safety-related artifacts. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo") +load("//bazel/rules/rules_score/private:architectural_design.bzl", "ArchitecturalDesignInfo") + +# ============================================================================ +# Provider Definition +# ============================================================================ + +AnalysisInfo = provider( + doc = "Provider for safety analysis artifacts", + fields = { + "controlmeasures": "Depset of control measures documentation or requirements", + "failuremodes": "Depset of failure modes documentation or requirements", + "fta": "Depset of Fault Tree Analysis diagrams", + "arch_design": "ArchitecturalDesignInfo provider for linked architectural design", + "name": "Name of the safety analysis target", + }, +) + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _analysis_impl(ctx): + """Implementation for safety_analysis rule. + + Collects safety analysis artifacts including control measures, failure modes, + and fault tree analysis diagrams, linking them to architectural design. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and AnalysisInfo + """ + controlmeasures = depset(ctx.files.controlmeasures) + failuremodes = depset(ctx.files.failuremodes) + fta = depset(ctx.files.fta) + + # Get architectural design provider if available + arch_design_info = None + if ctx.attr.arch_design and ArchitecturalDesignInfo in ctx.attr.arch_design: + arch_design_info = ctx.attr.arch_design[ArchitecturalDesignInfo] + + # Combine all files for DefaultInfo + all_files = depset( + transitive = [controlmeasures, failuremodes, fta], + ) + + # Collect transitive sphinx sources from architectural design + transitive = [all_files] + if ctx.attr.arch_design and SphinxSourcesInfo in ctx.attr.arch_design: + transitive.append(ctx.attr.arch_design[SphinxSourcesInfo].transitive_srcs) + + return [ + DefaultInfo(files = all_files), + AnalysisInfo( + controlmeasures = controlmeasures, + failuremodes = failuremodes, + fta = fta, + arch_design = arch_design_info, + name = ctx.label.name, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = depset(transitive = transitive), + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_analysis = rule( + implementation = _analysis_impl, + doc = "Collects safety analysis documents for S-CORE process compliance", + attrs = { + "controlmeasures": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = False, + doc = "Control measures documentation or requirements targets (can be AoUs or requirements)", + ), + "failuremodes": attr.label_list( + allow_files = [".rst", ".md", ".trlc"], + mandatory = False, + doc = "Failure modes documentation or requirements targets", + ), + "fta": attr.label_list( + allow_files = [".puml", ".plantuml", ".png", ".svg"], + mandatory = False, + doc = "Fault Tree Analysis (FTA) diagrams", + ), + "arch_design": attr.label( + providers = [ArchitecturalDesignInfo], + mandatory = False, + doc = "Reference to architectural_design target for traceability", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def safety_analysis( + name, + controlmeasures = [], + failuremodes = [], + fta = [], + arch_design = None, + visibility = None): + """Define safety analysis following S-CORE process guidelines. + + Safety analysis documents the safety-related analysis of a component, + including failure mode and effects analysis (FMEA/FMEDA), fault tree + analysis (FTA), and control measures that mitigate identified risks. + + Args: + name: The name of the safety analysis target. Used as the base + name for all generated targets. + controlmeasures: Optional list of labels to documentation files or + requirements targets containing control measures that mitigate + identified failure modes. Can reference Assumptions of Use or + requirements as defined in the S-CORE process. + failuremodes: Optional list of labels to documentation files or + requirements targets containing identified failure modes as + defined in the S-CORE process. + fta: Optional list of labels to Fault Tree Analysis diagram files + (.puml, .plantuml, .png, .svg) as defined in the S-CORE process. + arch_design: Optional label to an architectural_design target for + establishing traceability between safety analysis and architecture. + visibility: Bazel visibility specification for the generated targets. + + Generated Targets: + : Main safety analysis target providing AnalysisInfo + + Example: + ```starlark + safety_analysis( + name = "my_safety_analysis", + controlmeasures = [":my_control_measures"], + failuremodes = [":my_failure_modes"], + fta = ["fault_tree.puml"], + arch_design = ":my_architectural_design", + ) + ``` + """ + _analysis( + name = name, + controlmeasures = controlmeasures, + failuremodes = failuremodes, + fta = fta, + arch_design = arch_design, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/sphinx_module.bzl b/bazel/rules/rules_score/private/sphinx_module.bzl new file mode 100644 index 0000000..2ab3d29 --- /dev/null +++ b/bazel/rules/rules_score/private/sphinx_module.bzl @@ -0,0 +1,302 @@ +# ====================================================================================== +# Providers +# ====================================================================================== + +SphinxModuleInfo = provider( + doc = "Provider for Sphinx HTML module documentation", + fields = { + "html_dir": "Directory containing HTML files", + }, +) + +SphinxNeedsInfo = provider( + doc = "Provider for sphinx-needs info", + fields = { + "needs_json_file": "Direct needs.json file for this module", + "needs_json_files": "Depset of needs.json files including transitive dependencies", + }, +) + +# ====================================================================================== +# Helpers +# ====================================================================================== +def _create_config_py(ctx): + """Get or generate the conf.py configuration file. + + Args: + ctx: Rule context + """ + if ctx.attr.config: + config_file = ctx.attr.config.files.to_list()[0] + else: + config_file = ctx.actions.declare_file(ctx.label.name + "/conf.py") + template = ctx.file._config_template + + # Read template and substitute PROJECT_NAME + ctx.actions.expand_template( + template = template, + output = config_file, + substitutions = { + "{PROJECT_NAME}": ctx.label.name.replace("_", " ").title(), + }, + ) + return config_file + +# ====================================================================================== +# Common attributes for Sphinx rules +# ====================================================================================== +sphinx_rule_attrs = { + "srcs": attr.label_list( + allow_files = True, + doc = "List of source files for the Sphinx documentation.", + ), + "sphinx": attr.label( + doc = "The Sphinx build binary to use.", + mandatory = True, + executable = True, + cfg = "exec", + ), + "config": attr.label( + allow_files = [".py"], + doc = "Configuration file (conf.py) for the Sphinx documentation. If not provided, a default config will be generated.", + mandatory = False, + ), + "index": attr.label( + allow_files = [".rst"], + doc = "Index file (index.rst) for the Sphinx documentation.", + mandatory = True, + ), + "deps": attr.label_list( + doc = "List of other sphinx_module targets this module depends on for intersphinx.", + ), + "_config_template": attr.label( + default = Label("//bazel/rules/rules_score:templates/conf.template.py"), + allow_single_file = True, + doc = "Template for generating default conf.py", + ), + "_html_merge_tool": attr.label( + default = Label("//bazel/rules/rules_score:sphinx_html_merge"), + executable = True, + cfg = "exec", + doc = "Tool for merging HTML directories", + ), +} + +# ====================================================================================== +# Rule implementations +# ====================================================================================== +def _score_needs_impl(ctx): + output_path = ctx.label.name.replace("_needs", "") + "/needs.json" + needs_output = ctx.actions.declare_file(output_path) + + # Get config file (generate or use provided) + config_file = _create_config_py(ctx) + + # Phase 1: Build needs.json (without external needs) + needs_inputs = ctx.files.srcs + [config_file] + + if ctx.attr.config: + needs_inputs = needs_inputs + ctx.files.config + + needs_args = [ + "--index_file", + ctx.attr.index.files.to_list()[0].path, + "--output_dir", + needs_output.dirname, + "--config", + config_file.path, + "--builder", + "needs", + ] + + ctx.actions.run( + inputs = needs_inputs, + outputs = [needs_output], + arguments = needs_args, + progress_message = "Generating needs.json for: %s" % ctx.label.name, + executable = ctx.executable.sphinx, + ) + + transitive_needs = [dep[SphinxNeedsInfo].needs_json_files for dep in ctx.attr.deps if SphinxNeedsInfo in dep] + needs_json_files = depset([needs_output], transitive = transitive_needs) + + return [ + DefaultInfo( + files = needs_json_files, + ), + SphinxNeedsInfo( + needs_json_file = needs_output, # Direct file only + needs_json_files = needs_json_files, # Transitive depset + ), + ] + +def _score_html_impl(ctx): + """Implementation for building a Sphinx module with two-phase build. + + Phase 1: Generate needs.json for this module and collect from all deps + Phase 2: Generate HTML with external needs and merge all dependency HTML + """ + + # Collect all transitive dependencies with deduplication + modules = [] + + needs_external_needs = {} + for dep in ctx.attr.needs: + if SphinxNeedsInfo in dep: + dep_name = dep.label.name.replace("_needs", "") + needs_external_needs[dep.label.name] = { + "base_url": dep_name, # Relative path to the subdirectory where dep HTML is copied + "json_path": dep[SphinxNeedsInfo].needs_json_file.path, # Use direct file + "id_prefix": "", + "css_class": "", + } + + for dep in ctx.attr.deps: + if SphinxModuleInfo in dep: + modules.extend([dep[SphinxModuleInfo].html_dir]) + + needs_external_needs_json = ctx.actions.declare_file(ctx.label.name + "/needs_external_needs.json") + + ctx.actions.write( + output = needs_external_needs_json, + content = json.encode_indent(needs_external_needs, indent = " "), + ) + + # Read template and substitute PROJECT_NAME + config_file = ctx.actions.declare_file(ctx.label.name + "/conf.py") + template = ctx.file._config_template + + ctx.actions.expand_template( + template = template, + output = config_file, + substitutions = { + "{PROJECT_NAME}": ctx.label.name.replace("_", " ").title(), + }, + ) + + # Build HTML with external needs + html_inputs = ctx.files.srcs + ctx.files.needs + [config_file, needs_external_needs_json] + sphinx_html_output = ctx.actions.declare_directory(ctx.label.name + "/_html") + html_args = [ + "--index_file", + ctx.attr.index.files.to_list()[0].path, + "--output_dir", + sphinx_html_output.path, + "--config", + config_file.path, + "--builder", + "html", + ] + + ctx.actions.run( + inputs = html_inputs, + outputs = [sphinx_html_output], + arguments = html_args, + progress_message = "Building HTML: %s" % ctx.label.name, + executable = ctx.executable.sphinx, + ) + + # Create final HTML output directory with dependencies using Python merge script + html_output = ctx.actions.declare_directory(ctx.label.name + "/html") + + # Build arguments for the merge script + merge_args = [ + "--output", + html_output.path, + "--main", + sphinx_html_output.path, + ] + + merge_inputs = [sphinx_html_output] + + # Add each dependency + for dep in ctx.attr.deps: + if SphinxModuleInfo in dep: + dep_html_dir = dep[SphinxModuleInfo].html_dir + dep_name = dep.label.name + merge_inputs.append(dep_html_dir) + merge_args.extend(["--dep", dep_name + ":" + dep_html_dir.path]) + + # Merging html files + ctx.actions.run( + inputs = merge_inputs, + outputs = [html_output], + arguments = merge_args, + progress_message = "Merging HTML with dependencies for %s" % ctx.label.name, + executable = ctx.executable._html_merge_tool, + ) + + return [ + DefaultInfo(files = depset(ctx.files.needs + [html_output])), + SphinxModuleInfo( + html_dir = html_output, + ), + ] + +# ====================================================================================== +# Rule definitions +# ====================================================================================== + +_score_needs = rule( + implementation = _score_needs_impl, + attrs = sphinx_rule_attrs, +) + +_score_html = rule( + implementation = _score_html_impl, + attrs = dict(sphinx_rule_attrs, needs = attr.label_list( + allow_files = True, + doc = "Submodule symbols.needs targets for this module.", + )), +) + +# ====================================================================================== +# Rule wrappers +# ====================================================================================== + +def sphinx_module( + name, + srcs, + index, + config = None, + deps = [], + sphinx = Label("//bazel/rules/rules_score:score_build"), + testonly = False, + visibility = ["//visibility:public"]): + """Build a Sphinx module with transitive HTML dependencies. + + This rule builds documentation modules into complete HTML sites with + transitive dependency collection. All dependencies are automatically + included in a modules/ subdirectory for intersphinx cross-referencing. + + Args: + name: Name of the target + srcs: List of source files (.rst, .md) with index file first + index: Label to index.rst file + config: Label to conf.py configuration file (optional, will be auto-generated if not provided) + deps: List of other sphinx_module targets this module depends on + sphinx: Label to sphinx build binary (default: :sphinx_build) + visibility: Bazel visibility + """ + _score_needs( + name = name + "_needs", + srcs = srcs, + config = config, + index = index, + deps = [d + "_needs" for d in deps], + sphinx = sphinx, + testonly = testonly, + visibility = visibility, + ) + + _score_html( + name = name, + srcs = srcs, + config = config, + index = index, + deps = deps, + needs = [d + "_needs" for d in deps], + sphinx = sphinx, + testonly = testonly, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/private/unit.bzl b/bazel/rules/rules_score/private/unit.bzl new file mode 100644 index 0000000..835b3a1 --- /dev/null +++ b/bazel/rules/rules_score/private/unit.bzl @@ -0,0 +1,157 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Unit build rules for S-CORE projects. + +This module provides macros and rules for defining software units +following S-CORE process guidelines. A unit is the smallest testable +software element with associated design, implementation, and tests. +""" + +load("//bazel/rules/rules_score:providers.bzl", "SphinxSourcesInfo", "UnitInfo") + +# ============================================================================ +# Private Rule Implementation +# ============================================================================ + +def _unit_impl(ctx): + """Implementation for unit rule. + + Collects unit design artifacts, implementation targets, and tests + and provides them through the UnitInfo provider. + + Args: + ctx: Rule context + + Returns: + List of providers including DefaultInfo and UnitInfo + """ + + # Collect design files from unit_design targets + design_files = [] + for design_target in ctx.attr.unit_design: + if SphinxSourcesInfo in design_target: + design_files.append(design_target[SphinxSourcesInfo].srcs) + + design_depset = depset(transitive = design_files) + + # Collect implementation and test targets + # Include scope targets in the implementation depset + implementation_depset = depset(ctx.attr.implementation + ctx.attr.scope) + tests_depset = depset(ctx.attr.tests) + + # Combine all files for DefaultInfo + all_files = depset( + transitive = [design_depset], + ) + + return [ + DefaultInfo(files = all_files), + UnitInfo( + name = ctx.label.name, + unit_design = design_depset, + implementation = implementation_depset, + tests = tests_depset, + ), + SphinxSourcesInfo( + srcs = all_files, + transitive_srcs = all_files, + ), + ] + +# ============================================================================ +# Rule Definition +# ============================================================================ + +_unit = rule( + implementation = _unit_impl, + doc = "Defines a software unit with design, implementation, and tests for S-CORE process compliance", + attrs = { + "unit_design": attr.label_list( + mandatory = True, + doc = "Unit design artifacts (typically architectural_design targets)", + ), + "implementation": attr.label_list( + mandatory = True, + doc = "Implementation targets (cc_library, py_library, rust_library, etc.)", + ), + "scope": attr.label_list( + default = [], + doc = "Additional not explicitly named targets which are needed for the unit implementation", + ), + "tests": attr.label_list( + mandatory = True, + doc = "Test targets that verify the unit (cc_test, py_test, rust_test, etc.)", + ), + }, +) + +# ============================================================================ +# Public Macro +# ============================================================================ + +def unit( + name, + unit_design, + implementation, + tests, + scope = [], + testonly = True, + visibility = None): + """Define a software unit following S-CORE process guidelines. + + A unit is the smallest testable software element in the S-CORE process. + It consists of: + - Unit design: Design documentation and diagrams + - Implementation: Source code that realizes the design + - Tests: Test cases that verify the implementation + + Args: + name: The name of the unit. Used as the target name. + unit_design: List of labels to architectural_design targets or design + documentation that describes the unit's internal structure and behavior. + implementation: List of labels to Bazel targets representing the actual + implementation (cc_library, py_library, rust_library, etc.). + scope: Optional list of additional targets needed for the unit implementation + but not explicitly named in the implementation list. Default is empty list. + tests: List of labels to Bazel test targets (cc_test, py_test, rust_test, etc.) + that verify the unit implementation. + testonly: If true, only testonly targets can depend on this unit. Set to true + when the unit depends on testonly targets like tests. + visibility: Bazel visibility specification for the unit target. + + Example: + ```python + unit( + name = "kvs_unit1", + unit_design = [":kvs_architectural_design"], + implementation = [ + "//persistency/kvs:lib1", + "//persistency/kvs:lib2", + "//persistency/kvs:lib3", + ], + tests = ["//persistency/kvs/tests:score_kvs_component_tests"], + visibility = ["//visibility:public"], + ) + ``` + """ + _unit( + name = name, + unit_design = unit_design, + implementation = implementation, + scope = scope, + tests = tests, + testonly = testonly, + visibility = visibility, + ) diff --git a/bazel/rules/rules_score/providers.bzl b/bazel/rules/rules_score/providers.bzl new file mode 100644 index 0000000..e038df2 --- /dev/null +++ b/bazel/rules/rules_score/providers.bzl @@ -0,0 +1,57 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Shared providers for S-CORE documentation build rules. + +This module defines providers that are shared across multiple documentation +build rules to enable consistent Sphinx documentation generation. +""" + +# ============================================================================ +# Provider Definitions +# ============================================================================ + +SphinxSourcesInfo = provider( + doc = """Provider for Sphinx documentation source files. + + This provider aggregates all source files needed for Sphinx documentation + builds, including reStructuredText, Markdown, PlantUML diagrams, and + image files. Rules that produce documentation artifacts should provide + this to enable integration with sphinx_module and dependable_element. + """, + fields = { + "srcs": "Depset of source files for Sphinx documentation (.rst, .md, .puml, .plantuml, .svg, .png, etc.)", + "transitive_srcs": "Depset of transitive source files from dependencies", + }, +) + +UnitInfo = provider( + doc = "Provider for unit artifacts", + fields = { + "name": "Name of the unit target", + "unit_design": "Depset of unit design artifacts (architectural design)", + "implementation": "Depset of implementation targets (libraries, binaries)", + "tests": "Depset of test targets", + }, +) + +ComponentInfo = provider( + doc = "Provider for component artifacts", + fields = { + "name": "Name of the component target", + "requirements": "Depset of component requirements artifacts", + "components": "Depset of unit targets that comprise this component", + "tests": "Depset of component-level integration test targets", + }, +) diff --git a/bazel/rules/rules_score/rules_score.bzl b/bazel/rules/rules_score/rules_score.bzl new file mode 100644 index 0000000..7084744 --- /dev/null +++ b/bazel/rules/rules_score/rules_score.bzl @@ -0,0 +1,62 @@ +load("@rules_python//sphinxdocs:sphinx.bzl", "sphinx_docs") +load("@rules_python//sphinxdocs:sphinx_docs_library.bzl", "sphinx_docs_library") +load( + "//bazel/rules/rules_score:providers.bzl", + _ComponentInfo = "ComponentInfo", + _SphinxSourcesInfo = "SphinxSourcesInfo", + _UnitInfo = "UnitInfo", +) +load( + "//bazel/rules/rules_score/private:architectural_design.bzl", + _architectural_design = "architectural_design", +) +load( + "//bazel/rules/rules_score/private:assumptions_of_use.bzl", + _assumptions_of_use = "assumptions_of_use", +) +load( + "//bazel/rules/rules_score/private:component.bzl", + _component = "component", +) +load( + "//bazel/rules/rules_score/private:component_requirements.bzl", + _component_requirements = "component_requirements", +) +load( + "//bazel/rules/rules_score/private:dependability_analysis.bzl", + _dependability_analysis = "dependability_analysis", +) +load( + "//bazel/rules/rules_score/private:dependable_element.bzl", + _dependable_element = "dependable_element", +) +load( + "//bazel/rules/rules_score/private:feature_requirements.bzl", + _feature_requirements = "feature_requirements", +) +load( + "//bazel/rules/rules_score/private:safety_analysis.bzl", + _safety_analysis = "safety_analysis", +) +load( + "//bazel/rules/rules_score/private:sphinx_module.bzl", + _sphinx_module = "sphinx_module", +) +load( + "//bazel/rules/rules_score/private:unit.bzl", + _unit = "unit", +) + +architectural_design = _architectural_design +assumptions_of_use = _assumptions_of_use +component_requirements = _component_requirements +dependability_analysis = _dependability_analysis +feature_requirements = _feature_requirements +safety_analysis = _safety_analysis +sphinx_module = _sphinx_module +unit = _unit +component = _component +dependable_element = _dependable_element +SphinxSourcesInfo = _SphinxSourcesInfo +UnitInfo = _UnitInfo +ComponentInfo = _ComponentInfo diff --git a/bazel/rules/rules_score/src/sphinx_html_merge.py b/bazel/rules/rules_score/src/sphinx_html_merge.py new file mode 100644 index 0000000..60dfaa4 --- /dev/null +++ b/bazel/rules/rules_score/src/sphinx_html_merge.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python3 +"""Merge multiple Sphinx HTML output directories. + +This script merges Sphinx HTML documentation from multiple modules into a single +output directory. It copies the main module's HTML as-is, and then copies each +dependency module's HTML into a subdirectory, excluding nested module directories +to avoid duplication. + +Usage: + sphinx_html_merge.py --output OUTPUT_DIR --main MAIN_HTML_DIR [--dep NAME:PATH ...] +""" + +import argparse +import os +import re +import shutil +import sys +from pathlib import Path + + +# Standard Sphinx directories that should be copied +# Note: _static and _sphinx_design_static are excluded for dependencies to avoid duplication +SPHINX_DIRS = {"_sources", ".doctrees"} + + +def copy_html_files(src_dir, dst_dir, exclude_module_dirs=None, sibling_modules=None): + """Copy HTML and related files from src to dst, with optional link fixing. + + Args: + src_dir: Source HTML directory + dst_dir: Destination directory + exclude_module_dirs: Set of module directory names to skip (to avoid copying nested modules). + If None, copy everything. + sibling_modules: Set of sibling module names for fixing links in HTML files. + If None, no link fixing is performed. + """ + src_path = Path(src_dir) + dst_path = Path(dst_dir) + + if not src_path.exists(): + print(f"Warning: Source directory does not exist: {src_dir}", file=sys.stderr) + return + + dst_path.mkdir(parents=True, exist_ok=True) + + if exclude_module_dirs is None: + exclude_module_dirs = set() + + # Prepare regex patterns for link fixing if needed + module_pattern = None + static_pattern = None + if sibling_modules: + module_pattern = re.compile( + r'((?:href|src)=")(' + + "|".join(re.escape(mod) for mod in sibling_modules) + + r")/", + re.IGNORECASE, + ) + static_pattern = re.compile( + r'((?:href|src)=")(\.\./)*(_static|_sphinx_design_static)/', re.IGNORECASE + ) + + def process_file(src_file, dst_file, relative_path): + """Read, optionally modify, and write a file.""" + if src_file.suffix == ".html" and sibling_modules: + # Read, modify, and write HTML files + try: + content = src_file.read_text(encoding="utf-8") + + # Replace module_name/ with ../module_name/ + modified_content = module_pattern.sub(r"\1../\2/", content) + + # Calculate depth for static file references + depth = len(relative_path.parents) - 1 + parent_prefix = "../" * (depth + 1) + + def replace_static(match): + return f"{match.group(1)}{parent_prefix}{match.group(3)}/" + + modified_content = static_pattern.sub(replace_static, modified_content) + + # Write modified content + dst_file.parent.mkdir(parents=True, exist_ok=True) + dst_file.write_text(modified_content, encoding="utf-8") + except Exception as e: + print(f"Warning: Failed to process {src_file}: {e}", file=sys.stderr) + # Fallback to regular copy on error + shutil.copy2(src_file, dst_file) + else: + # Regular copy for non-HTML files + dst_file.parent.mkdir(parents=True, exist_ok=True) + shutil.copy2(src_file, dst_file) + + def copy_tree(src, dst, rel_path): + """Recursively copy directory tree with processing.""" + for item in src.iterdir(): + rel_item = rel_path / item.name + dst_item = dst / item.name + + if item.is_file(): + process_file(item, dst_item, rel_item) + elif item.is_dir(): + # Skip excluded directories + if item.name in exclude_module_dirs: + continue + # Skip static dirs from dependencies + if ( + item.name in ("_static", "_sphinx_design_static") + and exclude_module_dirs + ): + continue + + dst_item.mkdir(parents=True, exist_ok=True) + copy_tree(item, dst_item, rel_item) + + # Start copying from root + copy_tree(src_path, dst_path, Path(".")) + + +def merge_html_dirs(output_dir, main_html_dir, dependencies): + """Merge HTML directories. + + Args: + output_dir: Target output directory + main_html_dir: Main module's HTML directory to copy as-is + dependencies: List of (name, path) tuples for dependency modules + """ + output_path = Path(output_dir) + + # First, copy the main HTML directory + print(f"Copying main HTML from {main_html_dir} to {output_dir}") + copy_html_files(main_html_dir, output_dir) + + # Collect all dependency names for link fixing and exclusion + dep_names = [name for name, _ in dependencies] + + # Then copy each dependency into a subdirectory with link fixing + for dep_name, dep_html_dir in dependencies: + dep_output = output_path / dep_name + print(f"Copying dependency {dep_name} from {dep_html_dir} to {dep_output}") + # Exclude other module directories to avoid nested modules + # Remove current module from the list to get actual siblings to exclude + sibling_modules = set(n for n in dep_names if n != dep_name) + copy_html_files( + dep_html_dir, + dep_output, + exclude_module_dirs=sibling_modules, + sibling_modules=sibling_modules, + ) + + +def main(): + parser = argparse.ArgumentParser( + description="Merge Sphinx HTML documentation directories" + ) + parser.add_argument( + "--output", required=True, help="Output directory for merged HTML" + ) + parser.add_argument("--main", required=True, help="Main HTML directory to copy") + parser.add_argument( + "--dep", + action="append", + default=[], + metavar="NAME:PATH", + help="Dependency HTML directory in format NAME:PATH", + ) + + args = parser.parse_args() + + # Parse dependencies + dependencies = [] + for dep_spec in args.dep: + if ":" not in dep_spec: + print( + f"Error: Invalid dependency format '{dep_spec}', expected NAME:PATH", + file=sys.stderr, + ) + return 1 + + name, path = dep_spec.split(":", 1) + dependencies.append((name, path)) + + # Merge the HTML directories + merge_html_dirs(args.output, args.main, dependencies) + + print(f"Successfully merged HTML into {args.output}") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/bazel/rules/rules_score/src/sphinx_wrapper.py b/bazel/rules/rules_score/src/sphinx_wrapper.py new file mode 100644 index 0000000..1376057 --- /dev/null +++ b/bazel/rules/rules_score/src/sphinx_wrapper.py @@ -0,0 +1,262 @@ +""" +Wrapper script for running Sphinx builds in Bazel environments. + +This script provides a command-line interface to Sphinx documentation builds, +handling argument parsing, environment configuration, and build execution. +It's designed to be used as part of Bazel build rules for Score modules. +""" + +import argparse +import logging +import os +import sys +import time +from pathlib import Path +from typing import List, Optional +import re +import sys +from contextlib import redirect_stdout, redirect_stderr + +from sphinx.cmd.build import main as sphinx_main + +# Constants +DEFAULT_PORT = 8000 +DEFAULT_GITHUB_VERSION = "main" +DEFAULT_SOURCE_DIR = "." + +# Configure logging +logging.basicConfig( + level=logging.INFO, + format="%(levelname)s: %(message)s", +) +logger = logging.getLogger(__name__) + +SANDBOX_PATH = re.compile(r"^.*_main/") + + +class StdoutProcessor: + def write(self, text): + if text.strip(): + text = re.sub(SANDBOX_PATH, "", text) + sys.__stdout__.write(f"[SPHINX_STDOUT]: {text.strip()}\n") + + def flush(self): + sys.__stdout__.flush() + + +class StderrProcessor: + def write(self, text): + if text.strip(): + text = re.sub(SANDBOX_PATH, "", text) + sys.__stderr__.write(f"[SPHINX_STDERR]: {text.strip()}\n") + + def flush(self): + sys.__stderr__.flush() + + +def get_env(name: str, required: bool = True) -> Optional[str]: + """ + Get an environment variable value. + + Args: + name: The name of the environment variable + required: Whether the variable is required (raises error if not set) + + Returns: + The value of the environment variable, or None if not required and not set + + Raises: + ValueError: If the variable is required but not set + """ + val = os.environ.get(name) + logger.debug(f"Environment variable {name} = {val}") + if val is None and required: + raise ValueError(f"Required environment variable {name} is not set") + return val + + +def validate_arguments(args: argparse.Namespace) -> None: + """ + Validate required command-line arguments. + + Args: + args: Parsed command-line arguments + + Raises: + ValueError: If required arguments are missing or invalid + """ + if not args.index_file: + raise ValueError("--index_file is required") + if not args.output_dir: + raise ValueError("--output_dir is required") + if not args.builder: + raise ValueError("--builder is required") + + # Validate that index file exists if it's a real path + index_path = Path(args.index_file) + if not index_path.exists(): + raise ValueError(f"Index file does not exist: {args.index_file}") + + +def build_sphinx_arguments(args: argparse.Namespace) -> List[str]: + """ + Build the argument list for Sphinx. + + Args: + args: Parsed command-line arguments + + Returns: + List of arguments to pass to Sphinx + """ + source_dir = ( + str(Path(args.index_file).parent) if args.index_file else DEFAULT_SOURCE_DIR + ) + config_dir = str(Path(args.config).parent) if args.config else source_dir + + base_arguments = [ + source_dir, # source dir + args.output_dir, # output dir + "-c", + config_dir, # config directory + # "-W", # treat warning as errors - disabled for modular builds + "--keep-going", # do not abort after one error + "-T", # show details in case of errors in extensions + "--jobs", + "auto", + ] + + # Configure sphinx build with GitHub user and repo from CLI + if args.github_user and args.github_repo: + base_arguments.extend( + [ + f"-A=github_user={args.github_user}", + f"-A=github_repo={args.github_repo}", + f"-A=github_version={DEFAULT_GITHUB_VERSION}", + ] + ) + + # Add doc_path if SOURCE_DIRECTORY environment variable is set + source_directory = get_env("SOURCE_DIRECTORY", required=False) + if source_directory: + base_arguments.append(f"-A=doc_path='{source_directory}'") + + base_arguments.extend(["-b", args.builder]) + + return base_arguments + + +def run_sphinx_build(sphinx_args: List[str], builder: str) -> int: + """ + Execute the Sphinx build and measure duration. + + Args: + sphinx_args: Arguments to pass to Sphinx + builder: The builder type (for logging purposes) + + Returns: + The exit code from Sphinx build + """ + logger.info(f"Starting Sphinx build with builder: {builder}") + logger.debug(f"Sphinx arguments: {sphinx_args}") + + start_time = time.perf_counter() + + try: + exit_code = sphinx_main(sphinx_args) + except Exception as e: + logger.error(f"Sphinx build failed with exception: {e}") + return 1 + + end_time = time.perf_counter() + duration = end_time - start_time + + if exit_code == 0: + logger.info(f"docs ({builder}) finished successfully in {duration:.1f} seconds") + else: + logger.error( + f"docs ({builder}) failed with exit code {exit_code} after {duration:.1f} seconds" + ) + + return exit_code + + +def parse_arguments() -> argparse.Namespace: + """ + Parse command-line arguments. + + Returns: + Parsed command-line arguments + """ + parser = argparse.ArgumentParser( + description="Wrapper for Sphinx documentation builds in Bazel environments" + ) + + # Required arguments + parser.add_argument( + "--index_file", + required=True, + help="Path to the index file (e.g., index.rst)", + ) + parser.add_argument( + "--output_dir", + required=True, + help="Build output directory", + ) + parser.add_argument( + "--builder", + required=True, + help="Sphinx builder to use (e.g., html, needs, json)", + ) + + # Optional arguments + parser.add_argument( + "--config", + help="Path to config file (conf.py)", + ) + parser.add_argument( + "--github_user", + help="GitHub username to embed in the Sphinx build", + ) + parser.add_argument( + "--github_repo", + help="GitHub repository to embed in the Sphinx build", + ) + parser.add_argument( + "--port", + type=int, + default=DEFAULT_PORT, + help=f"Port to use for live preview (default: {DEFAULT_PORT}). Use 0 for auto-detection.", + ) + + return parser.parse_args() + + +def main() -> int: + """ + Main entry point for the Sphinx wrapper script. + + Returns: + Exit code (0 for success, non-zero for failure) + """ + try: + args = parse_arguments() + validate_arguments(args) + # Create processor instance + stdout_processor = StdoutProcessor() + stderr_processor = StderrProcessor() + # Redirect stdout and stderr + with redirect_stderr(stdout_processor), redirect_stdout(stderr_processor): + sphinx_args = build_sphinx_arguments(args) + exit_code = run_sphinx_build(sphinx_args, args.builder) + exit_code = 0 + return exit_code + except ValueError as e: + logger.error(f"Validation error: {e}") + return 1 + except Exception as e: + logger.error(f"Unexpected error: {e}") + return 1 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/bazel/rules/rules_score/templates/conf.template.py b/bazel/rules/rules_score/templates/conf.template.py new file mode 100644 index 0000000..e916952 --- /dev/null +++ b/bazel/rules/rules_score/templates/conf.template.py @@ -0,0 +1,207 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +""" +Generic Sphinx configuration template for SCORE modules. + +This file is auto-generated from a template and should not be edited directly. +Template variables like {PROJECT_NAME} are replaced during Bazel build. +""" + +import json +import os +from pathlib import Path +from typing import Any, Dict, List +from sphinx.util import logging + + +# Create a logger with the Sphinx namespace +logger = logging.getLogger(__name__) + +# Project configuration - {PROJECT_NAME} will be replaced by the module name during build +project = "{PROJECT_NAME}" +author = "S-CORE" +version = "1.0" +release = "1.0.0" +project_url = ( + "https://github.com/eclipse-score" # Required by score_metamodel extension +) + +# Sphinx extensions - comprehensive list for SCORE modules +extensions = [ + "sphinx_needs", + "sphinx_design", + "myst_parser", + "sphinxcontrib.plantuml", + "score_plantuml", + "score_metamodel", + "score_draw_uml_funcs", + "score_source_code_linker", + "score_layout", +] + +# MyST parser extensions +myst_enable_extensions = ["colon_fence"] + +# Exclude patterns for Bazel builds +exclude_patterns = [ + "bazel-*", + ".venv*", +] + +# Enable markdown rendering +source_suffix = { + ".rst": "restructuredtext", + ".md": "markdown", +} + +# Enable numref for cross-references +numfig = True + +# HTML theme +# html_theme = "pydata_sphinx_theme" + + +# Configuration constants +NEEDS_EXTERNAL_FILE = "needs_external_needs.json" +BAZEL_OUT_DIR = "bazel-out" + + +def find_workspace_root() -> Path: + """ + Find the Bazel workspace root by looking for the bazel-out directory. + + Returns: + Path to the workspace root directory + """ + current = Path.cwd() + + # Traverse up the directory tree looking for bazel-out + while current != current.parent: + if (current / BAZEL_OUT_DIR).exists(): + return current + current = current.parent + + # If we reach the root without finding it, return current directory + return Path.cwd() + + +def load_external_needs() -> List[Dict[str, Any]]: + """ + Load external needs configuration from JSON file. + + This function reads the needs_external_needs.json file if it exists and + resolves relative paths to absolute paths based on the workspace root. + + Returns: + List of external needs configurations with resolved paths + """ + needs_file = Path(NEEDS_EXTERNAL_FILE) + + if not needs_file.exists(): + logger.info(f"{NEEDS_EXTERNAL_FILE} not found - no external dependencies") + return [] + + logger.info(f"Loading external needs from {NEEDS_EXTERNAL_FILE}") + + try: + with needs_file.open("r", encoding="utf-8") as file: + needs_dict = json.load(file) + except json.JSONDecodeError as e: + logger.error(f"Failed to parse {NEEDS_EXTERNAL_FILE}: {e}") + return [] + except Exception as e: + logger.error(f"Failed to read {NEEDS_EXTERNAL_FILE}: {e}") + return [] + + workspace_root = find_workspace_root() + logger.info(f"Workspace root: {workspace_root}") + + external_needs = [] + for key, config in needs_dict.items(): + if "json_path" not in config: + logger.warning( + f"External needs config for '{key}' missing 'json_path', skipping" + ) + continue + + # Resolve relative path to absolute path + # Bazel provides relative paths like: bazel-out/k8-fastbuild/bin/.../needs.json + # We need absolute paths: .../execroot/_main/bazel-out/... + json_path = workspace_root / config["json_path"] + config["json_path"] = str(json_path) + + logger.info(f"Added external needs config for '{key}':") + logger.info(f" json_path: {config['json_path']}") + logger.info(f" id_prefix: {config.get('id_prefix', 'none')}") + logger.info(f" version: {config.get('version', 'none')}") + + external_needs.append(config) + + return external_needs + + +def verify_config(app: Any, config: Any) -> None: + """ + Initialize and verify external needs configuration. + + This is called during Sphinx's config-inited event to ensure + external needs configuration is correctly set up. We need to + explicitly set the config value here because Sphinx doesn't + automatically pick up module-level variables for extension configs. + + Args: + app: Sphinx application object + config: Sphinx configuration object + """ + # Set the config from our module-level variable + # This is needed because sphinx-needs registers its config with add_config_value + # which doesn't automatically pick up module-level variables from conf.py + if needs_external_needs: + config.needs_external_needs = needs_external_needs + + logger.info("=" * 80) + logger.info("Verifying Sphinx configuration") + logger.info(f" Project: {config.project}") + logger.info(f" External needs count: {len(config.needs_external_needs)}") + logger.info("=" * 80) + + +def setup(app: Any) -> Dict[str, Any]: + """ + Sphinx setup hook to register event listeners. + + Args: + app: Sphinx application object + + Returns: + Extension metadata dictionary + """ + app.connect("config-inited", verify_config) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } + + +# Initialize external needs configuration +logger.info("=" * 80) +logger.info(f"Sphinx configuration loaded for project: {project}") +logger.info(f"Current working directory: {Path.cwd()}") + +# Load external needs configuration +# Note: This sets a module-level variable that is then applied to the Sphinx +# config object in the verify_config callback during the config-inited event +needs_external_needs = load_external_needs() diff --git a/bazel/rules/rules_score/templates/seooc_index.template.rst b/bazel/rules/rules_score/templates/seooc_index.template.rst new file mode 100644 index 0000000..5def2dc --- /dev/null +++ b/bazel/rules/rules_score/templates/seooc_index.template.rst @@ -0,0 +1,76 @@ +.. ******************************************************************************* +.. Copyright (c) 2025 Contributors to the Eclipse Foundation +.. +.. See the NOTICE file(s) distributed with this work for additional +.. information regarding copyright ownership. +.. +.. This program and the accompanying materials are made available under the +.. terms of the Apache License Version 2.0 which is available at +.. https://www.apache.org/licenses/LICENSE-2.0 +.. +.. SPDX-License-Identifier: Apache-2.0 +.. ******************************************************************************* + +Dependable element: {title} +===================={underline} + +{description} + +Architectural Design +-------------------- + +.. toctree:: + :maxdepth: 2 + + {architectural_design} + + +Assumptions of Use +------------------ + +.. toctree:: + :maxdepth: 2 + + {assumptions_of_use} + +Components +---------- + +.. toctree:: + :maxdepth: 1 + +{components} + + +Units +----- + +.. toctree:: + :maxdepth: 1 + +{units} + + + + +Dependability Analysis +---------------------- + +.. toctree:: + :maxdepth: 2 + + {dependability_analysis} + +Checklists +---------- + +.. toctree:: + :maxdepth: 2 + + {checklists} + +Submodules +---------- +This module includes the following submodules: + +{submodules} diff --git a/bazel/rules/rules_score/test/BUILD b/bazel/rules/rules_score/test/BUILD new file mode 100644 index 0000000..e1690cd --- /dev/null +++ b/bazel/rules/rules_score/test/BUILD @@ -0,0 +1,389 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +load( + "//bazel/rules/rules_score:rules_score.bzl", + "architectural_design", + "assumptions_of_use", + "component", + "component_requirements", + "dependability_analysis", + "dependable_element", + "feature_requirements", + "safety_analysis", + "sphinx_module", + "unit", +) +load( + ":html_generation_test.bzl", + "html_merging_test", + "module_dependencies_test", + "needs_transitive_test", + "sphinx_module_test_suite", +) +load( + ":seooc_test.bzl", + "seooc_artifacts_copied_test", + "seooc_needs_provider_test", + "seooc_sphinx_module_generated_test", +) +load( + ":unit_component_test.bzl", + "component_provider_test", + "component_sphinx_sources_test", + "unit_component_test_suite", + "unit_provider_test", + "unit_sphinx_sources_test", +) + +package(default_visibility = ["//visibility:public"]) + +# ============================================================================ +# Test Fixtures - Module Definitions +# ============================================================================ + +# Test 1: Multi-Module Aggregation +# Dependency graph: module_a_lib -> module_b_lib -> module_c_lib +# module_a_lib -> module_c_lib (also direct) +sphinx_module( + name = "module_c_lib", + srcs = glob(["fixtures/module_c/*.rst"]), + index = "fixtures/module_c/index.rst", + sphinx = "//bazel/rules/rules_score:score_build", +) + +sphinx_module( + name = "module_b_lib", + srcs = glob(["fixtures/module_b/*.rst"]), + index = "fixtures/module_b/index.rst", + sphinx = "//bazel/rules/rules_score:score_build", + deps = [":module_c_lib"], +) + +sphinx_module( + name = "module_a_lib", + srcs = glob(["fixtures/module_a/*.rst"]), + index = "fixtures/module_a/index.rst", + sphinx = "//bazel/rules/rules_score:score_build", + deps = [ + ":module_b_lib", + ":module_c_lib", + ], +) + +# Test 2: SEooC (Safety Element out of Context) Module +# Tests the score_component macro with S-CORE process artifacts + +# - Feature Requirements: wp__requirements_feat +# TODO: Feature requirements are a stand-alone artifact for now +# We have to link them manually to component requirements +feature_requirements( + name = "feat_req", + srcs = ["fixtures/seooc_test/feature_requirements.rst"], +) + +# - Component Requirements: wp__requirements_comp +component_requirements( + name = "comp_req", + srcs = ["fixtures/seooc_test/component_requirements.rst"], +) + +# - Assumptions of Use: wp__requirements_comp_aou +assumptions_of_use( + name = "aous", + srcs = ["fixtures/seooc_test/assumptions_of_use.rst"], + feature_requirement = [":feat_req"], +) + +# - Architecture Design: wp__component_arch +architectural_design( + name = "arch_design", + dynamic = ["fixtures/seooc_test/dynamic_architecture.rst"], + static = ["fixtures/seooc_test/static_architecture.rst"], +) + +# - Safety Analysis (DFA): wp__sw_component_dfa +# - Safety Analysis (FMEA): wp__sw_component_fmea +dependability_analysis( + name = "dependability_analysis_target", + arch_design = ":arch_design", + dfa = ["fixtures/seooc_test/dfa.rst"], + safety_analysis = [":samplelibrary_safety_analysis"], +) + +safety_analysis( + name = "samplelibrary_safety_analysis", + # TODO + # controlmeasures = [], # can be AoUs or requirements + # failuremodes = [], + # fta = [], + arch_design = ":arch_design", +) + +dependable_element( + name = "seooc_test_lib", + architectural_design = [":arch_design"], + assumptions_of_use = [":aous"], + components = [], + dependability_analysis = [":dependability_analysis_target"], + description = "Test SEooC module demonstrating S-CORE process compliance structure.", + requirements = [":comp_req"], + tests = [], + deps = [ + ":module_c_lib", # dependency to other seoocs/score_components + ], +) + +# ============================================================================ +# Test Fixtures - Unit, Component, and Dependable Element +# ============================================================================ + +# Mock implementation targets with dummy functions +cc_library( + name = "mock_lib1", + srcs = ["fixtures/mock_lib1.cc"], +) + +cc_library( + name = "mock_lib2", + srcs = ["fixtures/mock_lib2.cc"], +) + +cc_binary( + name = "test_component_binary", + srcs = ["fixtures/test_component_main.cc"], + deps = [ + ":mock_lib1", + ":mock_lib2", + ], +) + +cc_test( + name = "test_unit_tests", + testonly = True, + srcs = ["fixtures/test_unit_test.cc"], + tags = ["manual"], + deps = [ + ":mock_lib1", + ":mock_lib2", + ], +) + +# Test Unit +unit( + name = "test_unit", + testonly = True, + tests = [":test_unit_tests"], + unit_design = [":arch_design"], + implementation = [ + ":mock_lib1", + ":mock_lib2", + ], +) + +unit( + name = "test_binary_unit", + testonly = True, + tests = [":test_unit_tests"], + unit_design = [":arch_design"], + implementation = [":test_component_binary"], +) + +# Test Component +component( + name = "test_component", + testonly = True, + components = [ + ":test_unit", + "test_binary_unit", + ], + requirements = [":comp_req"], + tests = [], # Empty for testing +) + +# Test Dependable Element +dependable_element( + name = "test_dependable_element", + testonly = True, + architectural_design = [":arch_design"], + assumptions_of_use = [":aous"], + components = [":test_component"], + dependability_analysis = [":dependability_analysis_target"], + description = "Test dependable element for unit testing", + requirements = [":comp_req"], + tests = [], # Empty for testing +) + +# ============================================================================ +# Test Fixtures - Nested Components for Recursive Testing +# ============================================================================ + +# Additional mock implementations +cc_library( + name = "mock_lib3", + srcs = ["fixtures/mock_lib1.cc"], # Reuse same source for testing +) + +cc_test( + name = "test_unit2_tests", + testonly = True, + srcs = ["fixtures/test_unit_test.cc"], + tags = ["manual"], + deps = [":mock_lib3"], +) + +# Second unit that will be shared between components +unit( + name = "test_unit2", + testonly = True, + tests = [":test_unit2_tests"], + unit_design = [":arch_design"], + implementation = [":mock_lib3"], +) + +# Nested component containing unit2 +component( + name = "test_nested_component", + testonly = True, + components = [":test_unit2"], + requirements = [":comp_req"], + tests = [], +) + +# Parent component containing nested component and shared unit +component( + name = "test_parent_component", + testonly = True, + components = [ + ":test_nested_component", + ":test_unit2", # Same unit appears here and in nested component + ":test_unit", # Different unit + ], + requirements = [":comp_req"], + tests = [], +) + +# Dependable element with nested components to test recursive collection +dependable_element( + name = "test_dependable_element_nested", + testonly = True, + architectural_design = [":arch_design"], + assumptions_of_use = [":aous"], + components = [":test_parent_component"], + dependability_analysis = [":dependability_analysis_target"], + description = "Test dependable element with nested components for testing recursive unit collection and deduplication", + requirements = [":comp_req"], + tests = [], +) + +# ============================================================================ +# Test Instantiations - HTML Generation Tests +# ============================================================================ + +# Needs Generation Tests +needs_transitive_test( + name = "needs_transitive_test", + target_under_test = ":module_b_lib_needs", +) + +# Dependency Tests +module_dependencies_test( + name = "module_dependencies_test", + target_under_test = ":module_a_lib", +) + +html_merging_test( + name = "html_merging_test", + target_under_test = ":module_a_lib", +) + +# ============================================================================ +# SEooC-Specific Tests +# ============================================================================ + +# Test that all artifacts are copied +seooc_artifacts_copied_test( + name = "seooc_tests_artifacts_copied", + target_under_test = ":seooc_test_lib_index", +) + +# Test that sphinx_module is generated with correct providers +seooc_sphinx_module_generated_test( + name = "seooc_tests_sphinx_module_generated", + target_under_test = ":seooc_test_lib", +) + +# Test that needs provider exists for cross-referencing +seooc_needs_provider_test( + name = "seooc_tests_needs_provider", + target_under_test = ":seooc_test_lib_needs", +) + +# ============================================================================ +# Test Suites +# ============================================================================ + +# Main test suite combining all sphinx_module tests +sphinx_module_test_suite(name = "sphinx_module_tests") + +# SEooC-focused test suite +test_suite( + name = "seooc_tests", + tests = [ + ":seooc_tests_artifacts_copied", + ":seooc_tests_needs_provider", + ":seooc_tests_sphinx_module_generated", + ], +) + +# ============================================================================ +# Unit, Component, and Dependable Element Test Instantiations +# ============================================================================ + +# Unit tests +unit_provider_test( + name = "unit_provider_test", + target_under_test = ":test_unit", +) + +unit_sphinx_sources_test( + name = "unit_sphinx_sources_test", + target_under_test = ":test_unit", +) + +# Component tests +component_provider_test( + name = "component_provider_test", + target_under_test = ":test_component", +) + +component_sphinx_sources_test( + name = "component_sphinx_sources_test", + target_under_test = ":test_component", +) + +# Unit, Component, and Dependable Element test suite +unit_component_test_suite(name = "unit_component_tests") + +# ============================================================================ +# Combined Test Suite +# ============================================================================ + +# Combined test suite for all tests +test_suite( + name = "all_tests", + tests = [ + ":seooc_tests", + ":sphinx_module_tests", + ":unit_component_tests", + ], +) diff --git a/bazel/rules/rules_score/test/fixtures/mock_lib1.cc b/bazel/rules/rules_score/test/fixtures/mock_lib1.cc new file mode 100644 index 0000000..599e3c1 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/mock_lib1.cc @@ -0,0 +1,4 @@ +// Mock implementation for testing purposes +int mock_function_1() { + return 42; +} diff --git a/bazel/rules/rules_score/test/fixtures/mock_lib2.cc b/bazel/rules/rules_score/test/fixtures/mock_lib2.cc new file mode 100644 index 0000000..588f15d --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/mock_lib2.cc @@ -0,0 +1,4 @@ +// Mock implementation for testing purposes +int mock_function_2() { + return 84; +} diff --git a/bazel/rules/rules_score/test/fixtures/mock_test.sh b/bazel/rules/rules_score/test/fixtures/mock_test.sh new file mode 100755 index 0000000..d5aa21e --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/mock_test.sh @@ -0,0 +1,18 @@ +#!/bin/bash +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +# Mock test script that always succeeds +# Used for testing rule structure without actual implementation + +exit 0 diff --git a/bazel/rules/rules_score/test/fixtures/module_a/index.rst b/bazel/rules/rules_score/test/fixtures/module_a/index.rst new file mode 100644 index 0000000..573ad4b --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/module_a/index.rst @@ -0,0 +1,31 @@ +Module A Documentation +====================== + +This is the documentation for Module A. + +.. document:: Documentation for Module A + :id: doc__module_fixtures_module_a + :status: valid + :safety: ASIL_B + :security: NO + :realizes: wp__component_arch + +Overview +-------- + +Module A is a simple module that depends on Module C. + +Features +-------- + +.. needlist:: + :tags: module_a + +Cross-Module References +----------------------- + +General reference to Module C :external+module_c_lib:doc:`index`. + +Need reference to Module C :need:`doc__module_fixtures_module_c`. + +Need reference to Module B :need:`doc__module_fixtures_module_b`. diff --git a/bazel/rules/rules_score/test/fixtures/module_b/index.rst b/bazel/rules/rules_score/test/fixtures/module_b/index.rst new file mode 100644 index 0000000..3155c10 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/module_b/index.rst @@ -0,0 +1,37 @@ +Module B Documentation +====================== + +This is the documentation for Module B. + +.. document:: Documentation for Module B + :id: doc__module_fixtures_module_b + :status: valid + :safety: ASIL_B + :security: NO + :realizes: + +Overview +-------- + +Module B depends on both Module A and Module C. + +Features +-------- + +.. needlist:: + :tags: module_b + +Cross-Module References +----------------------- + +This module references: + +* :external+module_a_lib:doc:`index` from Module A +* :external+module_c_lib:doc:`index` from Module C +* Need reference to Module C :need:`doc__module_fixtures_module_c` +* Need reference to Module C :need:`doc__module_fixtures_module_d` + +Dependencies +------------ + +Module B integrates functionality from both dependent modules. diff --git a/bazel/rules/rules_score/test/fixtures/module_c/index.rst b/bazel/rules/rules_score/test/fixtures/module_c/index.rst new file mode 100644 index 0000000..b73ae61 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/module_c/index.rst @@ -0,0 +1,29 @@ +Module C Documentation +====================== + +This is the documentation for Module C. + +.. document:: Documentation for Module C + :id: doc__module_fixtures_module_c + :status: valid + :safety: ASIL_B + :security: NO + :realizes: + + +Overview +-------- + +Module C is a base module with no dependencies. +Local need link: :need:`doc__module_fixtures_module_c` + +Features +-------- + +.. needlist:: + :tags: module_c + +Content +------- + +Module C provides foundational functionality used by other modules. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/architectural_design.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/architectural_design.rst new file mode 100644 index 0000000..02e96f7 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/architectural_design.rst @@ -0,0 +1,174 @@ +Architectural Design +==================== + +This document describes the architectural design of the test SEooC module. + +Software Architecture Overview +------------------------------- + +The system consists of the following software components: + +.. comp_arc_sta:: Input Processing Module + :id: comp_arc_sta__seooc_test__input_processing_module + :status: valid + :tags: architecture, component, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__input_data_processing, comp_req__seooc_test__can_message_reception + + Responsible for receiving and validating input data from CAN interface. + + **Inputs**: Raw CAN messages + + **Outputs**: Validated data structures + + **Safety Mechanisms**: CRC validation, sequence counter check + +.. comp_arc_sta:: Data Processing Engine + :id: comp_arc_sta__seooc_test__data_processing_engine + :status: valid + :tags: architecture, component, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__output_accuracy, comp_req__seooc_test__redundant_calculation + + Core processing component that performs calculations on validated data. + + **Inputs**: Validated data from Input Processing Module + + **Outputs**: Processed results + + **Safety Mechanisms**: Dual-channel redundant calculation + +.. comp_arc_sta:: Output Handler + :id: comp_arc_sta__seooc_test__output_handler + :status: valid + :tags: architecture, component, seooc_test + :safety: QM + :security: NO + :fulfils: comp_req__seooc_test__can_message_transmission + + Formats and transmits output data via CAN interface. + + **Inputs**: Processed results from Data Processing Engine + + **Outputs**: CAN messages + + **Safety Mechanisms**: Message sequence numbering, alive counter + +.. comp_arc_sta:: Fault Detection and Handling + :id: comp_arc_sta__seooc_test__fault_detection_handling + :status: valid + :tags: architecture, component, safety, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection, comp_req__seooc_test__safe_state_transition + + Monitors system health and handles fault conditions. + + **Inputs**: Status from all components + + **Outputs**: System state, error flags + + **Safety Mechanisms**: Watchdog timer, plausibility checks + +Component Interfaces +--------------------- + +Interface: CAN Communication +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. real_arc_int:: CAN RX Interface + :id: real_arc_int__seooc_test__can_rx + :status: valid + :tags: interface, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__can_message_reception + :language: cpp + + * **Protocol**: CAN 2.0B + * **Baud Rate**: 500 kbps + * **Message ID Range**: 0x100-0x1FF + * **DLC**: 8 bytes + +.. real_arc_int:: CAN TX Interface + :id: real_arc_int__seooc_test__can_tx + :status: valid + :tags: interface, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__can_message_transmission + :language: cpp + + * **Protocol**: CAN 2.0B + * **Baud Rate**: 500 kbps + * **Message ID Range**: 0x200-0x2FF + * **DLC**: 8 bytes + +Design Decisions +---------------- + +.. comp_arc_dyn:: Use of Hardware Watchdog + :id: comp_arc_dyn__seooc_test__hw_watchdog + :status: valid + :tags: design-decision, safety, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection + + The architecture includes a hardware watchdog timer to ensure system + reliability and meet safety requirements. + + **Rationale**: Hardware watchdog provides independent monitoring + of software execution and can detect timing violations. + + **Alternatives Considered**: Software-only monitoring (rejected due + to lower ASIL coverage) + +.. comp_arc_dyn:: Redundant Processing Paths + :id: comp_arc_dyn__seooc_test__redundancy + :status: valid + :tags: design-decision, safety, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__redundant_calculation + + Critical calculations are performed using redundant processing paths + to detect and prevent silent data corruption. + + **Rationale**: Meets ASIL-B requirements for detection of random + hardware faults during calculation. + + **Implementation**: Main path + shadow path with result comparison + +Memory Architecture +------------------- + +.. comp_arc_sta:: RAM Allocation + :id: comp_arc_sta__seooc_test__ram_allocation + :status: valid + :tags: resource, memory, seooc_test + :safety: QM + :security: NO + :fulfils: aou_req__seooc_test__memory_requirements + + * **Total RAM**: 512 KB + * **Stack**: 64 KB + * **Heap**: 128 KB + * **Static Data**: 256 KB + * **Reserved**: 64 KB + +.. comp_arc_sta:: Flash Allocation + :id: comp_arc_sta__seooc_test__flash_allocation + :status: valid + :tags: resource, memory, seooc_test + :safety: QM + :security: NO + :fulfils: aou_req__seooc_test__memory_requirements + + * **Total Flash**: 2 MB + * **Application Code**: 1.5 MB + * **Configuration Data**: 256 KB + * **Boot Loader**: 128 KB + * **Reserved**: 128 KB diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/assumptions_of_use.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/assumptions_of_use.rst new file mode 100644 index 0000000..fae172c --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/assumptions_of_use.rst @@ -0,0 +1,80 @@ +Assumptions of Use +================== + +This document describes the assumptions of use for the test SEooC module. + +.. aou_req:: Operating Temperature Range + :id: aou_req__seooc_test__operating_temperature_range + :status: valid + :tags: environment, iso26262, seooc_test + :safety: ASIL_B + :security: NO + + The SEooC shall operate within temperature range -40°C to +85°C. + +.. aou_req:: Supply Voltage + :id: aou_req__seooc_test__supply_voltage + :status: valid + :tags: power, iso26262, seooc_test + :safety: ASIL_B + :security: NO + + The SEooC shall operate with supply voltage 12V ±10%. + + Maximum current consumption: 2.5A + +.. aou_req:: Processing Load + :id: aou_req__seooc_test__processing_load + :status: valid + :tags: performance, iso26262, seooc_test + :safety: ASIL_B + :security: NO + + The maximum processing load shall not exceed 80% to ensure + timing requirements are met. + +Environmental Assumptions +------------------------- + +.. aou_req:: Controlled Environment + :id: aou_req__seooc_test__controlled_environment + :status: valid + :tags: environment, seooc_test + :safety: ASIL_B + :security: NO + + The system operates in a controlled automotive environment + compliant with ISO 16750 standards. + +.. aou_req:: Maintenance + :id: aou_req__seooc_test__maintenance + :status: valid + :tags: maintenance, seooc_test + :safety: ASIL_B + :security: NO + + Regular maintenance is performed according to the maintenance + schedule defined in the integration manual. + +Integration Constraints +----------------------- + +.. aou_req:: CAN Bus Interface + :id: aou_req__seooc_test__can_bus_interface + :status: valid + :tags: interface, communication, seooc_test + :safety: ASIL_B + :security: NO + + The host system shall provide a CAN 2.0B compliant interface + for communication with the SEooC. + +.. aou_req:: Memory Requirements + :id: aou_req__seooc_test__memory_requirements + :status: valid + :tags: resource, seooc_test + :safety: ASIL_B + :security: NO + + The host system shall provide at least 512KB of RAM and + 2MB of flash memory for the SEooC. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/component_requirements.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/component_requirements.rst new file mode 100644 index 0000000..1d7f90c --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/component_requirements.rst @@ -0,0 +1,105 @@ +Component Requirements +====================== + +This document defines the functional and safety requirements. + +Functional Requirements +------------------------ + +.. comp_req:: Input Data Processing + :id: comp_req__seooc_test__input_data_processing + :status: valid + :tags: functional, performance, seooc_test + :safety: QM + :security: NO + :satisfies: aou_req__seooc_test__processing_load + + The system shall process input data within 100ms from reception. + + **Rationale**: Real-time processing required for control loop. + +.. comp_req:: Output Accuracy + :id: comp_req__seooc_test__output_accuracy + :status: valid + :tags: functional, quality, seooc_test + :safety: QM + :security: NO + + The system shall provide output with 99.9% accuracy under + nominal operating conditions. + +.. comp_req:: Data Logging + :id: comp_req__seooc_test__data_logging + :status: valid + :tags: functional, diagnostic, seooc_test + :safety: QM + :security: NO + + The system shall log all error events with timestamp and + error code to non-volatile memory. + +Safety Requirements +------------------- + +.. comp_req:: Fault Detection + :id: comp_req__seooc_test__fault_detection + :status: valid + :tags: safety, seooc_test + :safety: ASIL_B + :security: NO + :satisfies: aou_req__seooc_test__processing_load + + The system shall detect and handle fault conditions within 50ms. + + **ASIL Level**: ASIL-B + **Safety Mechanism**: Watchdog timer + plausibility checks + +.. comp_req:: Safe State Transition + :id: comp_req__seooc_test__safe_state_transition + :status: valid + :tags: safety, seooc_test + :safety: ASIL_B + :security: NO + + The system shall maintain safe state during power loss and + complete shutdown within 20ms. + + **ASIL Level**: ASIL-B + **Safe State**: All outputs disabled, error flag set + +.. comp_req:: Redundant Calculation + :id: comp_req__seooc_test__redundant_calculation + :status: valid + :tags: safety, seooc_test + :safety: ASIL_B + :security: NO + + Critical calculations shall be performed using redundant + processing paths with comparison. + + **ASIL Level**: ASIL-B + **Safety Mechanism**: Dual-channel processing + +Communication Requirements +--------------------------- + +.. comp_req:: CAN Message Transmission + :id: comp_req__seooc_test__can_message_transmission + :status: valid + :tags: functional, communication, seooc_test + :safety: QM + :security: NO + :satisfies: aou_req__seooc_test__can_bus_interface + + The system shall transmit status messages on CAN bus + every 100ms ±10ms. + +.. comp_req:: CAN Message Reception + :id: comp_req__seooc_test__can_message_reception + :status: valid + :tags: functional, communication, seooc_test + :safety: QM + :security: NO + :satisfies: aou_req__seooc_test__can_bus_interface + + The system shall process received CAN messages within 10ms. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/dependability_analysis.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/dependability_analysis.rst new file mode 100644 index 0000000..ea5b518 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/dependability_analysis.rst @@ -0,0 +1,292 @@ +Safety Analysis +=============== + +This document contains the safety analysis for the test SEooC module. + +Failure Mode and Effects Analysis (FMEA) +----------------------------------------- + +.. comp_saf_fmea:: Input Data Corruption + :id: comp_saf_fmea__seooc_test__input_data_corruption + :status: valid + :tags: fmea, safety, seooc_test + :violates: comp_arc_sta__seooc_test__input_processing_module + :fault_id: bit_flip + :failure_effect: Corrupted input data from CAN bus due to electromagnetic interference, transmission errors, or faulty sensor leading to incorrect processing results + :mitigated_by: comp_req__seooc_test__fault_detection + :sufficient: yes + + **Failure Mode**: Corrupted input data from CAN bus + + **Potential Causes**: + + * Electromagnetic interference + * Transmission errors + * Faulty sensor + + **Effects**: Incorrect processing results, potential unsafe output + + **Severity**: High (S9) + + **Occurrence**: Medium (O4) + + **Detection**: High (D2) + + **RPN**: 72 + + **Detection Method**: CRC checksum validation, sequence counter check + + **Mitigation**: Reject invalid data and enter safe state within 50ms + +.. comp_saf_fmea:: Processing Timeout + :id: comp_saf_fmea__seooc_test__processing_timeout + :status: valid + :tags: fmea, safety, seooc_test + :violates: comp_arc_sta__seooc_test__fault_detection_handling + :fault_id: timing_failure + :failure_effect: Processing exceeds time deadline due to software defect, CPU overload, or hardware fault causing system unresponsiveness + :mitigated_by: comp_req__seooc_test__fault_detection + :sufficient: yes + + **Failure Mode**: Processing exceeds time deadline + + **Potential Causes**: + + * Software defect (infinite loop) + * CPU overload + * Hardware fault + + **Effects**: System becomes unresponsive, watchdog reset + + **Severity**: Medium (S6) + + **Occurrence**: Low (O3) + + **Detection**: Very High (D1) + + **RPN**: 18 + + **Detection Method**: Hardware watchdog timer + + **Mitigation**: System reset and recovery to safe state + +.. comp_saf_fmea:: Calculation Error + :id: comp_saf_fmea__seooc_test__calculation_error + :status: valid + :tags: fmea, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :fault_id: seu + :failure_effect: Incorrect calculation result due to single event upset, register corruption, or ALU malfunction + :mitigated_by: comp_req__seooc_test__redundant_calculation + :sufficient: yes + + **Failure Mode**: Incorrect calculation result due to random hardware fault + + **Potential Causes**: + + * Single event upset (SEU) + * Register corruption + * ALU malfunction + + **Effects**: Incorrect output values + + **Severity**: High (S8) + + **Occurrence**: Very Low (O2) + + **Detection**: High (D2) + + **RPN**: 32 + + **Detection Method**: Dual-channel redundant calculation with comparison + + **Mitigation**: Discard result and use previous valid value, set error flag + +Dependent Failure Analysis (DFA) +--------------------------------- + +.. comp_saf_dfa:: System Failure Top Event + :id: comp_saf_dfa__seooc_test__system_failure_top + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: common_cause + :failure_effect: System provides unsafe output due to common cause failures affecting multiple safety mechanisms simultaneously + :mitigated_by: aou_req__seooc_test__controlled_environment + :sufficient: yes + + **Top Event**: System provides unsafe output + + **Goal**: Probability < 1e-6 per hour (ASIL-B target) + +.. comp_saf_dfa:: Hardware Failure Branch + :id: comp_saf_dfa__seooc_test__hw_failure + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: hw_common_mode + :failure_effect: Hardware component failures due to common cause (overvoltage, overtemperature) affecting multiple components + :mitigated_by: aou_req__seooc_test__operating_temperature_range, aou_req__seooc_test__supply_voltage + :sufficient: yes + + **Event**: Hardware component failure + + **Sub-events**: + + * Microcontroller failure (λ = 5e-7) + * Power supply failure (λ = 3e-7) + * CAN transceiver failure (λ = 2e-7) + + **Combined Probability**: 1.0e-6 per hour + +.. comp_saf_dfa:: Software Failure Branch + :id: comp_saf_dfa__seooc_test__sw_failure + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: sw_systematic + :failure_effect: Software defect affecting both processing channels due to systematic fault in common code base + :mitigated_by: comp_req__seooc_test__redundant_calculation + :sufficient: yes + + **Event**: Software defect leads to unsafe output + + **Sub-events**: + + * Undetected software bug (λ = 8e-6, detection coverage 90%) + * Memory corruption (λ = 1e-7) + + **Combined Probability**: 9e-7 per hour (after detection coverage) + +.. comp_saf_dfa:: External Interference Branch + :id: comp_saf_dfa__seooc_test__ext_interference + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__input_processing_module + :failure_id: emi + :failure_effect: External interference causing simultaneous malfunction of multiple components + :mitigated_by: aou_req__seooc_test__controlled_environment + :sufficient: yes + + **Event**: External interference causes malfunction + + **Sub-events**: + + * EMI beyond specification (λ = 5e-8) + * Voltage transient (λ = 2e-8, mitigation 99%) + + **Combined Probability**: 5.2e-8 per hour (after mitigation) + +**Total System Failure Probability**: 1.95e-6 per hour + +**ASIL-B Target**: < 1e-5 per hour ✓ **PASSED** + +Safety Mechanisms +----------------- + +.. comp_arc_sta:: SM: Input Validation + :id: comp_arc_sta__seooc_test__sm_input_validation + :status: valid + :tags: safety-mechanism, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection + + **Description**: All input data is validated before processing + + **Checks Performed**: + + * CRC-16 checksum validation + * Message sequence counter verification + * Data range plausibility checks + + **Diagnostic Coverage**: 95% + + **Reaction**: Reject invalid data, increment error counter, use last valid value + +.. comp_arc_sta:: SM: Watchdog Timer + :id: comp_arc_sta__seooc_test__sm_watchdog + :status: valid + :tags: safety-mechanism, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection + + **Description**: Hardware watchdog monitors software execution + + **Configuration**: + + * Timeout: 150ms + * Window watchdog: 100-140ms trigger window + * Reset delay: 10ms + + **Diagnostic Coverage**: 99% + + **Reaction**: System reset, boot to safe state + +.. comp_arc_sta:: SM: Redundant Calculation + :id: comp_arc_sta__seooc_test__sm_redundant_calc + :status: valid + :tags: safety-mechanism, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__redundant_calculation + + **Description**: Critical calculations performed in dual channels + + **Implementation**: + + * Main calculation path + * Independent shadow path + * Result comparison with tolerance check + + **Diagnostic Coverage**: 98% + + **Reaction**: On mismatch, use previous valid value, set error flag + +Safety Validation Results +-------------------------- + +.. comp_arc_dyn:: Validation: FMEA Coverage + :id: comp_arc_dyn__seooc_test__val_fmea_coverage + :status: valid + :tags: validation, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__fault_detection + + **Result**: All identified failure modes have detection mechanisms + + **Coverage**: 100% of critical failure modes + + **Status**: ✓ PASSED + +.. comp_arc_dyn:: Validation: DFA Target Achievement + :id: comp_arc_dyn__seooc_test__val_dfa_target + :status: valid + :tags: validation, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__safe_state_transition + + **Result**: System failure probability 1.95e-6 per hour + + **Target**: < 1e-5 per hour (ASIL-B) + + **Margin**: 5.1x + + **Status**: ✓ PASSED + +.. comp_arc_dyn:: Validation: Safety Mechanism Effectiveness + :id: comp_arc_dyn__seooc_test__val_sm_effectiveness + :status: valid + :tags: validation, seooc_test + :safety: ASIL_B + :security: NO + :fulfils: comp_req__seooc_test__redundant_calculation + + **Result**: Combined diagnostic coverage 97.3% + + **Target**: > 90% (ASIL-B) + + **Status**: ✓ PASSED diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/dfa.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/dfa.rst new file mode 100644 index 0000000..7b2e30d --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/dfa.rst @@ -0,0 +1,149 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +Dependent Failure Analysis (DFA) +================================ + +This document contains the Dependent Failure Analysis (DFA) for the test SEooC module, +following ISO 26262 requirements for analysis of dependent failures. + +Component DFA Overview +---------------------- + +The dependent failure analysis identifies and evaluates common cause failures, +cascading failures, and dependent failures that could affect the safety of the component. + +.. comp_saf_dfa:: Common Cause Failure Analysis + :id: comp_saf_dfa__seooc_test__common_cause_analysis + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: ccf_root + :failure_effect: Common cause failures affecting multiple safety mechanisms simultaneously + :mitigated_by: aou_req__seooc_test__controlled_environment + :sufficient: yes + + **Analysis Scope**: Identification of common cause failures + + **Initiators Analyzed**: + + * Environmental conditions (temperature, EMI, vibration) + * Power supply anomalies + * Manufacturing and design defects + * Maintenance-induced failures + + **Conclusion**: All identified common cause initiators have adequate mitigation measures. + +.. comp_saf_dfa:: Power Supply Dependency + :id: comp_saf_dfa__seooc_test__power_dependency + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: power_ccf + :failure_effect: Power supply failure affecting both main and redundant processing paths + :mitigated_by: aou_req__seooc_test__supply_voltage + :sufficient: yes + + **Dependent Failure**: Power supply failure + + **Affected Elements**: + + * Main processing unit + * Redundant calculation path + * Communication interface + + **Independence Measures**: + + * Voltage monitoring with independent reference + * Brownout detection circuit + * Defined safe state on power loss + + **Residual Risk**: Acceptable (< 1e-8 per hour) + +.. comp_saf_dfa:: Clock Source Dependency + :id: comp_saf_dfa__seooc_test__clock_dependency + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: clock_ccf + :failure_effect: Clock failure causing simultaneous malfunction of timing-dependent safety mechanisms + :mitigated_by: comp_req__seooc_test__fault_detection + :sufficient: yes + + **Dependent Failure**: Clock source failure + + **Affected Elements**: + + * Watchdog timer + * Communication timing + * Task scheduling + + **Independence Measures**: + + * Internal RC oscillator as backup + * Clock monitoring unit + * Frequency range checks + + **Residual Risk**: Acceptable (< 5e-9 per hour) + +.. comp_saf_dfa:: Software Design Dependency + :id: comp_saf_dfa__seooc_test__sw_design_dependency + :status: valid + :tags: dfa, safety, seooc_test + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: sw_ccf + :failure_effect: Systematic software defect in common code base affecting both calculation paths + :mitigated_by: comp_req__seooc_test__redundant_calculation + :sufficient: yes + + **Dependent Failure**: Systematic software defect + + **Affected Elements**: + + * Main calculation algorithm + * Redundant calculation algorithm + * Result comparison logic + + **Independence Measures**: + + * Diverse implementation of redundant path + * Independent development teams + * Different compilers/toolchains for each path + + **Residual Risk**: Acceptable (< 1e-7 per hour with diversity measures) + +DFA Summary +----------- + +.. comp_saf_dfa:: DFA Summary and Conclusion + :id: comp_saf_dfa__seooc_test__dfa_summary + :status: valid + :tags: dfa, safety, seooc_test, summary + :violates: comp_arc_sta__seooc_test__data_processing_engine + :failure_id: dfa_summary + :failure_effect: Combined dependent failure probability assessment + :mitigated_by: aou_req__seooc_test__controlled_environment + :sufficient: yes + + **Total Dependent Failure Probability**: < 1.5e-7 per hour + + **ASIL-B Target for Dependent Failures**: < 1e-6 per hour + + **Margin**: 6.7x + + **Status**: ✓ PASSED + + **Conclusion**: The component design provides adequate independence between + safety mechanisms. All identified dependent failure modes have been analyzed + and appropriate mitigation measures are in place. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/dynamic_architecture.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/dynamic_architecture.rst new file mode 100644 index 0000000..33cf03f --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/dynamic_architecture.rst @@ -0,0 +1,66 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +Dynamic Architecture +==================== + +This file contains the dynamic architectural design for the SEooC test component. + +.. comp_arc_dyn:: Data Processing Sequence + :id: comp_arc_dyn__seooc_test__data_processing + :security: NO + :safety: QM + :status: valid + :fulfils: comp_req__seooc_test__input_data_processing + + Sequence diagram showing the data processing flow from input to output. + + .. uml:: + + @startuml + participant "Client" as client + participant "SEooC Test Component" as main + participant "Data Processor" as processor + + client -> main : processData(input) + main -> processor : process(input) + processor --> main : result + main --> client : output + @enduml + +.. comp_arc_dyn:: Fault Handling Sequence + :id: comp_arc_dyn__seooc_test__fault_handling + :security: NO + :safety: ASIL_B + :status: valid + :fulfils: comp_req__seooc_test__fault_detection + + Sequence diagram showing the fault detection and safe state transition. + + .. uml:: + + @startuml + participant "Main Component" as main + participant "Fault Handler" as fault + participant "Safe State Manager" as safe + + main -> fault : checkHealth() + alt fault detected + fault -> safe : transitionToSafeState() + safe --> fault : safeStateConfirmed + fault --> main : faultHandled + else no fault + fault --> main : healthOK + end + @enduml diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/feature_requirements.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/feature_requirements.rst new file mode 100644 index 0000000..d1be18a --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/feature_requirements.rst @@ -0,0 +1,48 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +Feature Requirements +==================== + +This file contains the feature requirements for the SEooC test module. + +.. feat_req:: Data Processing + :id: feat_req__seooc_test__data_processing + :reqtype: Functional + :security: NO + :safety: QM + :satisfies: stkh_req__platform__data_handling + :status: valid + + The SEooC test component shall process input data and provide processed output. + +.. feat_req:: Safe State Management + :id: feat_req__seooc_test__safe_state + :reqtype: Functional + :security: NO + :safety: ASIL_B + :satisfies: stkh_req__platform__safety + :status: valid + + The SEooC test component shall transition to a safe state upon detection of a fault condition. + +.. feat_req:: CAN Communication + :id: feat_req__seooc_test__can_comm + :reqtype: Interface + :security: NO + :safety: QM + :satisfies: stkh_req__platform__communication + :status: valid + + The SEooC test component shall support CAN message transmission and reception. diff --git a/bazel/rules/rules_score/test/fixtures/seooc_test/static_architecture.rst b/bazel/rules/rules_score/test/fixtures/seooc_test/static_architecture.rst new file mode 100644 index 0000000..b81321c --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/seooc_test/static_architecture.rst @@ -0,0 +1,45 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +Static Architecture +=================== + +This file contains the static architectural design for the SEooC test component. + +.. comp_arc_sta:: SEooC Test Component + :id: comp_arc_sta__seooc_test__main + :security: NO + :safety: QM + :status: valid + :fulfils: comp_req__seooc_test__input_data_processing + + The main component of the SEooC test module providing data processing capabilities. + +.. comp_arc_sta:: Data Processor + :id: comp_arc_sta__seooc_test__data_processor + :security: NO + :safety: QM + :status: valid + :fulfils: comp_req__seooc_test__output_accuracy + + Sub-component responsible for processing input data and generating output. + +.. comp_arc_sta:: Fault Handler + :id: comp_arc_sta__seooc_test__fault_handler + :security: NO + :safety: ASIL_B + :status: valid + :fulfils: comp_req__seooc_test__fault_detection + + Sub-component responsible for detecting and handling fault conditions. diff --git a/bazel/rules/rules_score/test/fixtures/test_component_main.cc b/bazel/rules/rules_score/test/fixtures/test_component_main.cc new file mode 100644 index 0000000..578f2d5 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/test_component_main.cc @@ -0,0 +1,13 @@ +// Main implementation for test_component +#include + +// Declarations from mock libraries +extern int mock_function_1(); +extern int mock_function_2(); + +int main(int argc, char** argv) { + std::cout << "Test Component Implementation" << std::endl; + std::cout << "Mock function 1 returns: " << mock_function_1() << std::endl; + std::cout << "Mock function 2 returns: " << mock_function_2() << std::endl; + return 0; +} diff --git a/bazel/rules/rules_score/test/fixtures/test_unit_test.cc b/bazel/rules/rules_score/test/fixtures/test_unit_test.cc new file mode 100644 index 0000000..3420f20 --- /dev/null +++ b/bazel/rules/rules_score/test/fixtures/test_unit_test.cc @@ -0,0 +1,25 @@ +// Unit tests for mock libraries +#include + +// Declarations from mock libraries +extern int mock_function_1(); +extern int mock_function_2(); + +int main() { + // Test mock_function_1 + int result1 = mock_function_1(); + if (result1 != 42) { + std::cerr << "Test failed: mock_function_1() returned " << result1 << ", expected 42" << std::endl; + return 1; + } + + // Test mock_function_2 + int result2 = mock_function_2(); + if (result2 != 84) { + std::cerr << "Test failed: mock_function_2() returned " << result2 << ", expected 84" << std::endl; + return 1; + } + + std::cout << "All tests passed!" << std::endl; + return 0; +} diff --git a/bazel/rules/rules_score/test/html_generation_test.bzl b/bazel/rules/rules_score/test/html_generation_test.bzl new file mode 100644 index 0000000..39f06cd --- /dev/null +++ b/bazel/rules/rules_score/test/html_generation_test.bzl @@ -0,0 +1,223 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +"""Test rules for sphinx_module HTML generation and dependencies.""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("//bazel/rules/rules_score/private:sphinx_module.bzl", "SphinxModuleInfo", "SphinxNeedsInfo") + +# ============================================================================ +# Provider Tests +# ============================================================================ + +def _providers_test_impl(ctx): + """Test that sphinx_module provides the correct providers.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Verify required providers + asserts.true( + env, + SphinxModuleInfo in target_under_test, + "Target should provide SphinxModuleInfo", + ) + + asserts.true( + env, + DefaultInfo in target_under_test, + "Target should provide DefaultInfo", + ) + + return analysistest.end(env) + +providers_test = analysistest.make(_providers_test_impl) + +# ============================================================================ +# HTML Generation Tests +# ============================================================================ + +def _basic_html_generation_test_impl(ctx): + """Test that a simple document generates HTML output.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check that HTML directory exists + score_info = target_under_test[SphinxModuleInfo] + asserts.true( + env, + score_info.html_dir != None, + "Module should generate HTML directory", + ) + + return analysistest.end(env) + +basic_html_generation_test = analysistest.make(_basic_html_generation_test_impl) + +# ============================================================================ +# Needs.json Generation Tests +# ============================================================================ + +def _needs_generation_test_impl(ctx): + """Test that sphinx_module generates needs.json files.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check for SphinxNeedsInfo provider on _needs target + # Note: This test requires the _needs suffix target + asserts.true( + env, + DefaultInfo in target_under_test, + "Needs target should provide DefaultInfo", + ) + + return analysistest.end(env) + +needs_generation_test = analysistest.make(_needs_generation_test_impl) + +def _needs_transitive_test_impl(ctx): + """Test that needs.json files are collected transitively.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Verify SphinxNeedsInfo provider + asserts.true( + env, + SphinxNeedsInfo in target_under_test, + "Needs target should provide SphinxNeedsInfo", + ) + + needs_info = target_under_test[SphinxNeedsInfo] + + # Check direct needs.json file + asserts.true( + env, + needs_info.needs_json_file != None, + "Should have direct needs.json file", + ) + + # Check transitive needs collection + asserts.true( + env, + needs_info.needs_json_files != None, + "Should have transitive needs.json files depset", + ) + + return analysistest.end(env) + +needs_transitive_test = analysistest.make(_needs_transitive_test_impl) + +# ============================================================================ +# Dependency and Integration Tests +# ============================================================================ + +def _module_dependencies_test_impl(ctx): + """Test that module dependencies are properly handled.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module with dependencies should still generate HTML + asserts.true( + env, + score_info.html_dir != None, + "Module with dependencies should generate HTML", + ) + + return analysistest.end(env) + +module_dependencies_test = analysistest.make(_module_dependencies_test_impl) + +def _html_merging_test_impl(ctx): + """Test that HTML from dependencies is merged correctly.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Verify merged HTML output exists + asserts.true( + env, + score_info.html_dir != None, + "Merged HTML should be generated", + ) + + return analysistest.end(env) + +html_merging_test = analysistest.make(_html_merging_test_impl) + +# ============================================================================ +# Config Generation Tests +# ============================================================================ + +def _auto_config_generation_test_impl(ctx): + """Test that conf.py is automatically generated when not provided.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module without explicit config should still generate HTML + asserts.true( + env, + score_info.html_dir != None, + "Module with auto-generated config should produce HTML", + ) + + return analysistest.end(env) + +auto_config_generation_test = analysistest.make(_auto_config_generation_test_impl) + +def _explicit_config_test_impl(ctx): + """Test that explicit conf.py is used when provided.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module with explicit config should generate HTML + asserts.true( + env, + score_info.html_dir != None, + "Module with explicit config should produce HTML", + ) + + return analysistest.end(env) + +explicit_config_test = analysistest.make(_explicit_config_test_impl) + +# ============================================================================ +# Test Suite +# ============================================================================ + +def sphinx_module_test_suite(name): + """Create a comprehensive test suite for sphinx_module. + + Tests cover: + - Needs.json generation and transitive collection + - Module dependencies and HTML merging + + Args: + name: Name of the test suite + """ + + native.test_suite( + name = name, + tests = [ + # Needs generation + ":needs_transitive_test", + + # Dependencies and integration + ":module_dependencies_test", + ":html_merging_test", + ], + ) diff --git a/bazel/rules/rules_score/test/score_module_providers_test.bzl b/bazel/rules/rules_score/test/score_module_providers_test.bzl new file mode 100644 index 0000000..24fba52 --- /dev/null +++ b/bazel/rules/rules_score/test/score_module_providers_test.bzl @@ -0,0 +1,323 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +"""Tests for sphinx_module providers and two-phase build system.""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("//bazel/rules/rules_score/private:sphinx_module.bzl", "SphinxModuleInfo", "SphinxNeedsInfo") + +# ============================================================================ +# SphinxModuleInfo Provider Tests +# ============================================================================ + +def _sphinx_module_info_fields_test_impl(ctx): + """Test that SphinxModuleInfo provides all required fields.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + asserts.true( + env, + SphinxModuleInfo in target_under_test, + "Target should provide SphinxModuleInfo", + ) + + score_info = target_under_test[SphinxModuleInfo] + + # Verify html_dir field + asserts.true( + env, + hasattr(score_info, "html_dir"), + "SphinxModuleInfo should have html_dir field", + ) + + asserts.true( + env, + score_info.html_dir != None, + "html_dir should not be None", + ) + + return analysistest.end(env) + +sphinx_module_info_fields_test = analysistest.make(_sphinx_module_info_fields_test_impl) + +# ============================================================================ +# SphinxNeedsInfo Provider Tests +# ============================================================================ + +def _score_needs_info_fields_test_impl(ctx): + """Test that SphinxNeedsInfo provides all required fields.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + asserts.true( + env, + SphinxNeedsInfo in target_under_test, + "Needs target should provide SphinxNeedsInfo", + ) + + needs_info = target_under_test[SphinxNeedsInfo] + + # Verify needs_json_file field (direct file) + asserts.true( + env, + hasattr(needs_info, "needs_json_file"), + "SphinxNeedsInfo should have needs_json_file field", + ) + + asserts.true( + env, + needs_info.needs_json_file != None, + "needs_json_file should not be None", + ) + + # Verify needs_json_files field (transitive depset) + asserts.true( + env, + hasattr(needs_info, "needs_json_files"), + "SphinxNeedsInfo should have needs_json_files field", + ) + + asserts.true( + env, + needs_info.needs_json_files != None, + "needs_json_files should not be None", + ) + + # Verify it's a depset + asserts.true( + env, + type(needs_info.needs_json_files) == type(depset([])), + "needs_json_files should be a depset", + ) + + return analysistest.end(env) + +score_needs_info_fields_test = analysistest.make(_score_needs_info_fields_test_impl) + +def _score_needs_transitive_collection_test_impl(ctx): + """Test that needs.json files are collected transitively.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + needs_info = target_under_test[SphinxNeedsInfo] + + # Get the list of transitive needs files + transitive_needs = needs_info.needs_json_files.to_list() + + # Should have at least the direct needs file + asserts.true( + env, + len(transitive_needs) >= 1, + "Should have at least the direct needs.json file", + ) + + # Direct file should be in the transitive set + direct_file = needs_info.needs_json_file + asserts.true( + env, + direct_file in transitive_needs, + "Direct needs.json file should be in transitive collection", + ) + + return analysistest.end(env) + +score_needs_transitive_collection_test = analysistest.make(_score_needs_transitive_collection_test_impl) + +def _score_needs_with_deps_test_impl(ctx): + """Test that needs.json files include dependencies.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + needs_info = target_under_test[SphinxNeedsInfo] + transitive_needs = needs_info.needs_json_files.to_list() + + # Module with dependencies should have multiple needs files + # (its own + dependencies) + asserts.true( + env, + len(transitive_needs) >= 1, + "Module with dependencies should collect transitive needs.json files", + ) + + return analysistest.end(env) + +score_needs_with_deps_test = analysistest.make(_score_needs_with_deps_test_impl) + +# ============================================================================ +# Two-Phase Build Tests +# ============================================================================ + +def _two_phase_needs_first_test_impl(ctx): + """Test that Phase 1 (needs generation) works independently.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Verify SphinxNeedsInfo provider + asserts.true( + env, + SphinxNeedsInfo in target_under_test, + "Phase 1 should provide SphinxNeedsInfo", + ) + + # Verify DefaultInfo with needs.json output + asserts.true( + env, + DefaultInfo in target_under_test, + "Phase 1 should provide DefaultInfo", + ) + + default_info = target_under_test[DefaultInfo] + files = default_info.files.to_list() + + # Should have at least one file (needs.json) + asserts.true( + env, + len(files) >= 1, + "Phase 1 should output needs.json file", + ) + + return analysistest.end(env) + +two_phase_needs_first_test = analysistest.make(_two_phase_needs_first_test_impl) + +def _two_phase_html_second_test_impl(ctx): + """Test that Phase 2 (HTML generation) works with needs from Phase 1.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Verify SphinxModuleInfo provider + asserts.true( + env, + SphinxModuleInfo in target_under_test, + "Phase 2 should provide SphinxModuleInfo", + ) + + score_info = target_under_test[SphinxModuleInfo] + + # Verify HTML output + asserts.true( + env, + score_info.html_dir != None, + "Phase 2 should generate HTML directory", + ) + + return analysistest.end(env) + +two_phase_html_second_test = analysistest.make(_two_phase_html_second_test_impl) + +# ============================================================================ +# Config Generation Tests +# ============================================================================ + +def _config_auto_generation_test_impl(ctx): + """Test that conf.py is auto-generated when not provided.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module without explicit config should still build + asserts.true( + env, + score_info.html_dir != None, + "Auto-generated config should allow HTML generation", + ) + + return analysistest.end(env) + +config_auto_generation_test = analysistest.make(_config_auto_generation_test_impl) + +def _config_explicit_usage_test_impl(ctx): + """Test that explicit conf.py is used when provided.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module with explicit config should build + asserts.true( + env, + score_info.html_dir != None, + "Explicit config should allow HTML generation", + ) + + return analysistest.end(env) + +config_explicit_usage_test = analysistest.make(_config_explicit_usage_test_impl) + +# ============================================================================ +# Dependency Handling Tests +# ============================================================================ + +def _deps_html_merging_test_impl(ctx): + """Test that HTML from dependencies is merged into output.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + score_info = target_under_test[SphinxModuleInfo] + + # Module with dependencies should generate merged HTML + asserts.true( + env, + score_info.html_dir != None, + "Module with dependencies should generate merged HTML", + ) + + return analysistest.end(env) + +deps_html_merging_test = analysistest.make(_deps_html_merging_test_impl) + +def _deps_needs_collection_test_impl(ctx): + """Test that needs from dependencies are collected.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + needs_info = target_under_test[SphinxNeedsInfo] + transitive_needs = needs_info.needs_json_files.to_list() + + # Should collect needs from dependencies + asserts.true( + env, + len(transitive_needs) >= 1, + "Should collect needs.json from dependencies", + ) + + return analysistest.end(env) + +deps_needs_collection_test = analysistest.make(_deps_needs_collection_test_impl) + +# ============================================================================ +# Test Suite +# ============================================================================ + +def sphinx_module_providers_test_suite(name): + """Create a test suite for sphinx_module providers and build phases. + + Tests cover: + - Transitive needs.json collection + - Dependency handling (HTML merging, needs collection) + + Args: + name: Name of the test suite + """ + + native.test_suite( + name = name, + tests = [ + # Provider tests + ":score_needs_with_deps_test", + + # Dependency tests + ":deps_html_merging_test", + ":deps_needs_collection_test", + ], + ) diff --git a/bazel/rules/rules_score/test/seooc_test.bzl b/bazel/rules/rules_score/test/seooc_test.bzl new file mode 100644 index 0000000..a88a637 --- /dev/null +++ b/bazel/rules/rules_score/test/seooc_test.bzl @@ -0,0 +1,135 @@ +""" +Test suite for dependable_element macro. + +Tests the SEooC (Safety Element out of Context) functionality including: +- Index generation with artifact references +- Integration with sphinx_module +- Sphinx-needs cross-referencing +- HTML output generation +""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("//bazel/rules/rules_score/private:sphinx_module.bzl", "SphinxModuleInfo", "SphinxNeedsInfo") + +def _seooc_index_generation_test_impl(ctx): + """Test that dependable_element generates proper index.rst file.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Get the generated index file + files = target_under_test[DefaultInfo].files.to_list() + + # Find index.rst in the output files + index_file = None + for f in files: + if f.basename == "index.rst": + index_file = f + break + + # Assert index file exists + asserts.true( + env, + index_file != None, + "Expected index.rst to be generated by dependable_element_index rule", + ) + + return analysistest.end(env) + +seooc_index_generation_test = analysistest.make( + impl = _seooc_index_generation_test_impl, +) + +def _seooc_artifacts_copied_test_impl(ctx): + """Test that all dependable element artifacts are copied to output directory.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + files = target_under_test[DefaultInfo].files.to_list() + + # Expected artifact basenames - these come from the SphinxSourcesInfo providers + # and are filtered to only include .rst/.md files for the index + expected_artifacts = [ + "component_requirements.rst", # from requirements + "dfa.rst", # from :dependability_analysis_target + ] + + # Check each artifact exists + actual_basenames = [f.basename for f in files] + for artifact in expected_artifacts: + asserts.true( + env, + artifact in actual_basenames, + "Expected artifact '{}' to be in output files".format(artifact), + ) + + return analysistest.end(env) + +seooc_artifacts_copied_test = analysistest.make( + impl = _seooc_artifacts_copied_test_impl, +) + +def _seooc_sphinx_module_generated_test_impl(ctx): + """Test that dependable_element generates sphinx_module with HTML output.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check that SphinxModuleInfo provider exists + asserts.true( + env, + SphinxModuleInfo in target_under_test, + "Expected dependable_element to provide SphinxModuleInfo from sphinx_module", + ) + + return analysistest.end(env) + +seooc_sphinx_module_generated_test = analysistest.make( + impl = _seooc_sphinx_module_generated_test_impl, +) + +def _seooc_needs_provider_test_impl(ctx): + """Test that dependable_element generates needs provider for cross-referencing.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check that SphinxNeedsInfo provider exists + asserts.true( + env, + SphinxNeedsInfo in target_under_test, + "Expected dependable_element_needs to provide SphinxNeedsInfo", + ) + + return analysistest.end(env) + +seooc_needs_provider_test = analysistest.make( + impl = _seooc_needs_provider_test_impl, +) + +def _seooc_description_test_impl(ctx): + """Test that SEooC includes description in generated index.rst.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Get the generated index file + files = target_under_test[DefaultInfo].files.to_list() + + # Find index.rst + index_file = None + for f in files: + if f.basename == "index.rst": + index_file = f + break + + # Note: We can't easily read file contents in analysis test, + # but we can verify the file exists. The description content + # would be validated through integration tests or manual inspection. + asserts.true( + env, + index_file != None, + "Expected index.rst to exist for description validation", + ) + + return analysistest.end(env) + +seooc_description_test = analysistest.make( + impl = _seooc_description_test_impl, +) diff --git a/bazel/rules/rules_score/test/unit_component_test.bzl b/bazel/rules/rules_score/test/unit_component_test.bzl new file mode 100644 index 0000000..f62f4b8 --- /dev/null +++ b/bazel/rules/rules_score/test/unit_component_test.bzl @@ -0,0 +1,175 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +""" +Test suite for unit, component, and dependable_element rules. + +Tests the new hierarchical structure for S-CORE process compliance: +- Unit: smallest testable element +- Component: collection of units +- Dependable Element: complete SEooC with full documentation +""" + +load("@bazel_skylib//lib:unittest.bzl", "analysistest", "asserts") +load("//bazel/rules/rules_score:providers.bzl", "ComponentInfo", "SphinxSourcesInfo", "UnitInfo") + +# ============================================================================ +# Unit Tests +# ============================================================================ + +def _unit_provider_test_impl(ctx): + """Test that unit rule provides UnitInfo.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check UnitInfo provider exists + asserts.true( + env, + UnitInfo in target_under_test, + "Unit should provide UnitInfo", + ) + + unit_info = target_under_test[UnitInfo] + + # Verify fields are populated + asserts.true( + env, + unit_info.name != None, + "UnitInfo should have name field", + ) + + asserts.true( + env, + unit_info.unit_design != None, + "UnitInfo should have unit_design field", + ) + + asserts.true( + env, + unit_info.implementation != None, + "UnitInfo should have implementation field", + ) + + asserts.true( + env, + unit_info.tests != None, + "UnitInfo should have tests field", + ) + + return analysistest.end(env) + +unit_provider_test = analysistest.make(_unit_provider_test_impl) + +def _unit_sphinx_sources_test_impl(ctx): + """Test that unit rule provides SphinxSourcesInfo.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check SphinxSourcesInfo provider exists + asserts.true( + env, + SphinxSourcesInfo in target_under_test, + "Unit should provide SphinxSourcesInfo", + ) + + return analysistest.end(env) + +unit_sphinx_sources_test = analysistest.make(_unit_sphinx_sources_test_impl) + +# ============================================================================ +# Component Tests +# ============================================================================ + +def _component_provider_test_impl(ctx): + """Test that component rule provides ComponentInfo.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check ComponentInfo provider exists + asserts.true( + env, + ComponentInfo in target_under_test, + "Component should provide ComponentInfo", + ) + + comp_info = target_under_test[ComponentInfo] + + # Verify fields are populated + asserts.true( + env, + comp_info.name != None, + "ComponentInfo should have name field", + ) + + asserts.true( + env, + comp_info.requirements != None, + "ComponentInfo should have component_requirements field", + ) + + asserts.true( + env, + comp_info.components != None, + "ComponentInfo should have components field", + ) + + asserts.true( + env, + comp_info.tests != None, + "ComponentInfo should have tests field", + ) + + return analysistest.end(env) + +component_provider_test = analysistest.make(_component_provider_test_impl) + +def _component_sphinx_sources_test_impl(ctx): + """Test that component rule provides SphinxSourcesInfo.""" + env = analysistest.begin(ctx) + target_under_test = analysistest.target_under_test(env) + + # Check SphinxSourcesInfo provider exists + asserts.true( + env, + SphinxSourcesInfo in target_under_test, + "Component should provide SphinxSourcesInfo", + ) + + return analysistest.end(env) + +component_sphinx_sources_test = analysistest.make(_component_sphinx_sources_test_impl) + +# ============================================================================ +# Dependable Element Tests +# ============================================================================ +# Note: Provider tests removed as dependable_element no longer creates a +# separate provider target. The main target is now a sphinx_module. + +# ============================================================================ +# Test Suite Definition +# ============================================================================ + +def unit_component_test_suite(name): + """Create test suite for unit, component, and dependable_element rules. + + Args: + name: Name of the test suite + """ + native.test_suite( + name = name, + tests = [ + ":unit_provider_test", + ":unit_sphinx_sources_test", + ":component_provider_test", + ":component_sphinx_sources_test", + ], + ) diff --git a/coverage/README.md b/coverage/README.md index dbc46f9..c25115c 100644 --- a/coverage/README.md +++ b/coverage/README.md @@ -108,6 +108,26 @@ and point the report generator to the directory: bazel run //:rust_coverage -- --profraw-dir /path/to/profraw ``` +## Running from an integration workspace (external labels) + +You can invoke the report generator from a top-level integration repo (for +example, reference_integration) while targeting tests that live in external +modules. Use a query that references external labels and run the wrapper +target from the integration repo: + +```bash +bazel run //images/linux_x86_64:per_rust_coverage --config=ferrocene-coverage -- \ + --query 'kind("rust_test", @score_persistency//src/rust/...)' +``` + +If the `.profraw` files were produced in that same workspace, the reporter +auto-discovers them under `bazel-testlogs/` (including +`bazel-testlogs/external/+` for external labels), so you do not need +to pass `--profraw-dir`. If they were copied from elsewhere, pass +`--profraw-dir` to point to the directory containing the `.profraw` files. +External source paths are resolved via Bazel's output_base so +`external//...` paths are handled. + ## Coverage Gate Behavior `--min-line-coverage` applies per target. If any target is below the minimum, diff --git a/coverage/ferrocene_report.sh b/coverage/ferrocene_report.sh index 7acd343..298b665 100755 --- a/coverage/ferrocene_report.sh +++ b/coverage/ferrocene_report.sh @@ -335,6 +335,12 @@ label_to_path() { local label local pkg="${2:-}" label="$(strip_quotes "$1")" + # External labels look like "@repo//pkg:target". Strip the repo prefix so + # path conversion works for both workspace and external repos. + if [[ "${label}" == @*//?* ]]; then + label="//${label#*//}" + fi + # If the label still starts with "@", we do not know how to map it to a path. if [[ "${label}" == @* ]]; then echo "" return 0 @@ -403,6 +409,10 @@ label_pkg() { if [[ "${label}" =~ ^Label\\(\"(.*)\"\\)$ ]]; then label="${BASH_REMATCH[1]}" fi + # External labels include "@repo//". Strip the repo prefix to get the package. + if [[ "${label}" == @*//?* ]]; then + label="//${label#*//}" + fi if [[ "${label}" == //* ]]; then local rest="${label#//}" echo "${rest%%:*}" @@ -411,6 +421,37 @@ label_pkg() { echo "" } +# Resolve the "external/" prefix for an external label. +# We use bazel query --output=location to find a real file path, then extract +# the repo name from either "external//..." or ".../external//...". +workspace_root_for_label() { + local label + label="$(strip_quotes "$1")" + if [[ "${label}" =~ ^Label\\(\"(.*)\"\\)$ ]]; then + label="${BASH_REMATCH[1]}" + fi + # Non-external labels live in the workspace, so no external prefix is needed. + if [[ "${label}" != @* ]]; then + echo "" + return 0 + fi + # The location output may be absolute; handle both direct external paths + # and absolute paths that contain "/external//". + local location + location="$(bazel query --output=location "${label}" 2>/dev/null | head -n 1)" + location="${location%%:*}" + local rest="" + if [[ "${location}" == external/* ]]; then + rest="${location#external/}" + elif [[ "${location}" == */external/* ]]; then + rest="${location#*/external/}" + fi + local repo="${rest%%/*}" + if [[ -n "${repo}" ]]; then + echo "external/${repo}" + fi +} + resolve_runfile() { local bin="$1" local name="$2" @@ -563,10 +604,20 @@ for label in "${targets[@]}"; do pkg="${pkg%%:*}" name="${label##*:}" + # Resolve the package path and repo root so test.outputs works for + # workspace labels (//pkg:target) and external labels (@repo//pkg:target). + label_pkg_path="$(label_pkg "${label}")" + if [[ -z "${label_pkg_path}" ]]; then + label_pkg_path="${pkg}" + fi + label_repo_root="$(workspace_root_for_label "${label}")" + if [[ -n "${PROFRAW_DIR}" ]]; then test_out_dir="${PROFRAW_DIR}" + elif [[ -n "${label_repo_root}" ]]; then + test_out_dir="${PROFRAW_ROOT}/${label_repo_root}/${label_pkg_path}/${name}/test.outputs" else - test_out_dir="${PROFRAW_ROOT}/${pkg}/${name}/test.outputs" + test_out_dir="${PROFRAW_ROOT}/${label_pkg_path}/${name}/test.outputs" fi shopt -s nullglob @@ -622,46 +673,77 @@ for label in "${targets[@]}"; do if [[ -z "${crate_pkg}" ]]; then crate_pkg="${pkg}" fi + repo_root="$(workspace_root_for_label "${crate_target}")" crate_root_raw="$(query_labels_attr "${crate_target}" "crate_root")" if [[ -z "${crate_root_raw}" ]]; then crate_root_raw="$(query_attr_build "${crate_target}" "crate_root")" fi crate_root="$(label_to_path "${crate_root_raw}" "${crate_pkg}")" - if [[ -z "${crate_root}" ]]; then - # Prefer explicit srcs for rust_test targets when no crate attribute is set. - srcs_label="$(query_labels_attr "${label}" "srcs")" - if [[ -n "${srcs_label}" ]]; then - srcs_path="$(label_to_path "${srcs_label}" "${pkg}")" - if [[ -n "${srcs_path}" && "${srcs_path}" == *.rs ]]; then - crate_root="${srcs_path}" - fi - fi - fi + # First, try conventional crate roots to avoid choosing a random source file. if [[ -z "${crate_root}" ]]; then for candidate in \ "${crate_pkg}/src/lib.rs" \ "${crate_pkg}/src/main.rs" \ "${crate_pkg}/lib.rs" \ "${crate_pkg}/main.rs"; do - if [[ -f "${workspace}/${candidate}" ]]; then + if [[ -n "${repo_root}" ]]; then + if [[ -f "${exec_root}/${repo_root}/${candidate}" ]]; then + crate_root="${candidate}" + break + fi + elif [[ -f "${workspace}/${candidate}" ]]; then crate_root="${candidate}" break fi done - if [[ -z "${crate_root}" ]]; then - echo "Skipping ${label}: could not determine crate root for ${crate_target}" >&2 - continue + fi + # If there is no conventional root, fall back to the crate's declared srcs. + if [[ -z "${crate_root}" ]]; then + srcs_label="$(query_labels_attr "${crate_target}" "srcs")" + if [[ -n "${srcs_label}" ]]; then + srcs_path="$(label_to_path "${srcs_label}" "${crate_pkg}")" + if [[ -n "${srcs_path}" && "${srcs_path}" == *.rs ]]; then + crate_root="${srcs_path}" + fi fi fi - + if [[ -z "${crate_root}" ]]; then + # As a last resort, try rust_test srcs when the test target defines them. + # This handles rust_test targets that directly list their sources. + srcs_label="$(query_labels_attr "${label}" "srcs")" + if [[ -n "${srcs_label}" ]]; then + srcs_path="$(label_to_path "${srcs_label}" "${pkg}")" + if [[ -n "${srcs_path}" && "${srcs_path}" == *.rs ]]; then + crate_root="${srcs_path}" + fi + fi + fi + # Without a crate root, symbol-report cannot build the crate. + if [[ -z "${crate_root}" ]]; then + echo "Skipping ${label}: could not determine crate root for ${crate_target}" >&2 + continue + fi + # Convert the crate root into an absolute path. External repos live under + # exec_root/external/ (symlinked from output_base), while workspace + # sources live under $workspace. if [[ "${crate_root}" != /* ]]; then - crate_root="${workspace}/${crate_root}" + if [[ -n "${repo_root}" && "${crate_root}" != "${repo_root}/"* ]]; then + crate_root="${repo_root}/${crate_root}" + fi + if [[ "${crate_root}" == external/* ]]; then + crate_root="${exec_root}/${crate_root}" + else + crate_root="${workspace}/${crate_root}" + fi fi + # Keep a workspace- or exec_root-relative path for reporting and mapping. crate_root_rel="${crate_root}" if [[ "${crate_root_rel}" == "${workspace}/"* ]]; then crate_root_rel="${crate_root_rel#${workspace}/}" + elif [[ "${crate_root_rel}" == "${exec_root}/"* ]]; then + crate_root_rel="${crate_root_rel#${exec_root}/}" fi crate_name="$(normalize_scalar "$(query_attr_build "${crate_target}" "crate_name")")" @@ -754,6 +836,7 @@ for label in "${targets[@]}"; do remap_args+=("--remap-path-prefix=${workspace}/=.") fi + # Pass the absolute crate root; relative external paths fail to canonicalize. ( cd "${exec_root}" SYMBOL_REPORT_OUT="${symbol_report_json}" \ @@ -767,7 +850,7 @@ for label in "${targets[@]}"; do --sysroot "${sysroot_arg}" \ -o /dev/null \ "${remap_args[@]}" \ - "${crate_root_rel}" + "${crate_root}" ) # Normalize symbol-report paths to be workspace-relative (like the demo), @@ -779,18 +862,23 @@ for label in "${targets[@]}"; do bin_arg="${bin_rel}" fi - # Blanket expects report paths to resolve under --ferrocene-src; add a - # path-equivalence so workspace files map cleanly to report entries. + # Blanket resolves report filenames by joining them with --ferrocene-src. + # Use a path-equivalence so source files map cleanly to report entries. + # For external crates, profiler paths are absolute under output_base/external, + # so we point --ferrocene-src there instead of the workspace. ferrocene_src="${workspace}" + if [[ "${crate_root_rel}" == external/* ]]; then + ferrocene_src="${output_base}" + fi crate_root_dir_rel="$(dirname "${crate_root_rel}")" path_prefix="${crate_root_rel%%/*}" if [[ -n "${path_prefix}" && "${path_prefix}" != "${crate_root_rel}" && "${path_prefix}" != "." ]]; then # Broader remap to cover any file under the top-level directory (e.g. src/...). - path_equiv_args=("--path-equivalence" "${path_prefix},${workspace}/${path_prefix}") + path_equiv_args=("--path-equivalence" "${path_prefix},${ferrocene_src}/${path_prefix}") elif [[ "${crate_root_dir_rel}" == "." ]]; then - path_equiv_args=("--path-equivalence" ".,${workspace}") + path_equiv_args=("--path-equivalence" ".,${ferrocene_src}") else - path_equiv_args=("--path-equivalence" "${crate_root_dir_rel},${workspace}/${crate_root_dir_rel}") + path_equiv_args=("--path-equivalence" "${crate_root_dir_rel},${ferrocene_src}/${crate_root_dir_rel}") fi ( diff --git a/sbom/BUILD.bazel b/sbom/BUILD.bazel new file mode 100644 index 0000000..ec94784 --- /dev/null +++ b/sbom/BUILD.bazel @@ -0,0 +1,33 @@ +# SBOM Generation Package +# +# This package provides Bazel-native SBOM (Software Bill of Materials) generation +# using module extensions and aspects. +# +# Public API: +# - load("@score_tooling//sbom:defs.bzl", "sbom") +# - use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") + +load("@rules_python//python:defs.bzl", "py_library") + +package(default_visibility = ["//visibility:public"]) + +exports_files([ + "defs.bzl", + "extensions.bzl", +]) + +# Filegroup for all SBOM-related bzl files +filegroup( + name = "bzl_files", + srcs = [ + "defs.bzl", + "extensions.bzl", + "//sbom/internal:bzl_files", + ], +) + +# npm wrapper (uses system-installed npm from PATH) +sh_binary( + name = "npm_wrapper", + srcs = ["npm_wrapper.sh"], +) diff --git a/sbom/SBOM_Readme.md b/sbom/SBOM_Readme.md new file mode 100644 index 0000000..b242902 --- /dev/null +++ b/sbom/SBOM_Readme.md @@ -0,0 +1,340 @@ +# SBOM Setup Guide + +## 1. Configure MODULE.bazel + +Add the SBOM metadata extension in your **root** MODULE.bazel (e.g. `reference_integration/MODULE.bazel`): + +```starlark +# Enable SBOM metadata collection from all modules in the dependency graph +sbom_ext = use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") +use_repo(sbom_ext, "sbom_metadata") +``` + +**For modules using `local_path_override` or `git_override`**, also add a `track_module` tag for each such module. Without this, their versions cannot be auto-detected and will appear as `unknown` in the SBOM: + +```starlark +# Required for modules with local_path_override or git_override (no registry version) +sbom_ext.track_module(name = "score_baselibs") +sbom_ext.track_module(name = "score_communication") +sbom_ext.track_module(name = "score_orchestrator") +# ... one entry per overridden module +``` + +No manual license entries are needed — all license metadata is collected automatically. + +## 2. Add SBOM Target in BUILD + +```starlark +load("@score_tooling//sbom:defs.bzl", "sbom") + +sbom( + name = "my_sbom", + targets = ["//my/app:binary"], + component_name = "my_application", + component_version = "1.0.0", + # Rust crate metadata from multiple MODULE.bazel.lock files + module_lockfiles = [ + "@score_crates//:MODULE.bazel.lock", + ":MODULE.bazel.lock", # workspace's own lockfile for additional crates + ], + auto_crates_cache = True, + auto_cdxgen = True, # Requires system-installed npm/cdxgen (see below) +) +``` + +### Parameters + +| Parameter | Default | Description | +| :--- | :--- | :--- | +| `targets` | _(required)_ | Bazel targets to include in SBOM | +| `component_name` | rule name | Main component name | +| `component_version` | `""` | Version string | +| `output_formats` | `["spdx", "cyclonedx"]` | Output formats: `"spdx"` and/or `"cyclonedx"` | +| `module_lockfiles` | `[]` | List of MODULE.bazel.lock files for Rust crate metadata. Pass `@score_crates//:MODULE.bazel.lock` (centralized crate specs) and `:MODULE.bazel.lock` (workspace-local crates). Each lockfile is parsed for crate name, version, and sha256. | +| `cargo_lockfile` | `None` | Optional Cargo.lock for additional crates. Usually not needed when `module_lockfiles` covers all crates. | +| `auto_crates_cache` | `True` | Auto-generate crates cache when `module_lockfiles` or `cargo_lockfile` is set | +| `auto_cdxgen` | `False` | Auto-run cdxgen when no `cdxgen_sbom` is provided | +| `cdxgen_sbom` | `None` | Label to a pre-generated CycloneDX JSON from cdxgen for C++ enrichment | +| `producer_name` | `"Eclipse Foundation"` | SBOM producer organization name (appears in `metadata.supplier`) | +| `producer_url` | `"https://projects.eclipse.org/projects/automotive.score"` | SBOM producer URL | +| `sbom_authors` | `[]` | Author strings for `metadata.authors` (e.g. `["Eclipse SCORE Team"]`) | +| `generation_context` | `""` | Lifecycle phase: `"pre-build"`, `"build"`, or `"post-build"` | +| `sbom_tools` | `[]` | Additional tool names added to `metadata.tools` | +| `namespace` | `"https://eclipse.dev/score"` | Base URI for the SPDX document namespace | +| `exclude_patterns` | _(build tools)_ | List of repo name substrings to exclude (e.g. `rules_rust`, `bazel_tools`). Defaults exclude common Bazel build-tool repos. | +| `dep_module_files` | `[]` | Additional MODULE.bazel files from dependency modules for version extraction | + +## 3. Install Prerequisites + +### For `auto_crates_cache` (Rust crate metadata) + +License data for Rust crates is fetched via [dash-license-scan](https://github.com/eclipse-score/dash-license-scan). Description and supplier metadata is fetched from the crates.io API (parallel, ~10 concurrent requests). Requires: + +```bash +# Install uv (Python package runner) +curl -LsSf https://astral.sh/uv/install.sh | sh + +# Install Java >= 11 (required by Eclipse dash-licenses JAR) +# Option 1: Ubuntu/Debian +sudo apt install openjdk-11-jre-headless + +# Option 2: Fedora/RHEL +sudo dnf install java-11-openjdk-headless + +# Verify installation +uvx dash-license-scan --help +java -version +``` + +### For `auto_cdxgen` (C++ dependency scanning) + +If using `auto_cdxgen = True` to automatically scan C++ dependencies: + +```bash +# Install Node.js and cdxgen globally +# Option 1: Using nvm (recommended) +curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash +source ~/.bashrc +nvm install 20 +npm install -g @cyclonedx/cdxgen + +# Verify installation +which cdxgen +cdxgen --version +``` + +**Note:** If you don't have npm/cdxgen installed, set `auto_cdxgen = False` in your SBOM configuration. +When `auto_cdxgen` is enabled, the SBOM rule runs cdxgen against the repository path of the selected Bazel targets (for example `external/score_baselibs+` for `@score_baselibs//...` targets). + +## 4. Build + +```bash +bazel build //:my_sbom +``` + +## 5. Output + +Generated files in `bazel-bin/`: + +- `my_sbom.spdx.json` — SPDX 2.3 format +- `my_sbom.cdx.json` — CycloneDX 1.6 format +- `my_sbom_crates_metadata.json` — Auto-generated Rust crate cache (if `auto_crates_cache = True`) +- `my_sbom_cdxgen.cdx.json` — C++ dependencies from cdxgen (if `auto_cdxgen = True`) + +--- + +## Toolchain Components + +### Core Tools + +| Tool | Role | Required For | +|------|------|--------------| +| [Bazel](https://bazel.build) | Build system — rules, aspects, and module extensions drive dependency discovery and SBOM generation | All SBOM generation | +| [Python 3](https://www.python.org) | Runtime for the SBOM generator, formatters, and metadata extraction scripts | All SBOM generation | +| [dash-license-scan](https://github.com/eclipse-score/dash-license-scan) | Rust crate license metadata via Eclipse Foundation + ClearlyDefined | Rust metadata extraction when `auto_crates_cache = True` | +| [uv / uvx](https://docs.astral.sh/uv/) | Python package runner for dash-license-scan | Rust metadata extraction when `auto_crates_cache = True` | +| [Java >= 11](https://openjdk.org) | Runtime for Eclipse dash-licenses JAR (used by dash-license-scan) | Rust metadata extraction when `auto_crates_cache = True` | +| [crates.io API](https://crates.io) | Description and supplier metadata for Rust crates (parallel fetching) | Rust metadata extraction when `auto_crates_cache = True` | +| [@cyclonedx/cdxgen](https://github.com/CycloneDX/cdxgen) | C++ dependency scanner and license discovery tool | C++ metadata extraction when `auto_cdxgen = True` | +| [Node.js / npm](https://nodejs.org) | Runtime for cdxgen | C++ metadata extraction when `auto_cdxgen = True` | + +### Architecture + +``` + ┌──────────────────┐ + │ Bazel build │ + └────────┬─────────┘ + │ + ┌───────────────┼───────────────┐ + │ │ │ + v v v + MODULE.bazel Bazel targets Lockfiles + │ │ │ + v v v + metadata.json _deps.json License + metadata + (module versions) (dep graph, (dash-license-scan + dep edges) + crates.io API + │ │ + cdxgen) + └───────────────┼───────────────┘ + │ + v + ┌──────────────────┐ + │ sbom_generator │ + │ (match & resolve)│ + └────────┬─────────┘ + │ + ┌────────┴────────┐ + v v + .spdx.json .cdx.json +``` + +**Data sources:** +- **Bazel module graph** — version, PURL, and registry info for `bazel_dep` modules +- **Bazel aspect** — transitive dependency graph and external repo dependency edges +- **dash-license-scan** — Rust crate licenses via Eclipse Foundation + ClearlyDefined (from MODULE.bazel.lock or Cargo.lock) +- **crates.io API** — description and supplier for Rust crates (supplier extracted from GitHub repository URL) +- **cdxgen** — C++ dependency licenses, descriptions, and suppliers (from source tree scan) + +### Automatically Populated Fields + +The following SBOM fields are populated automatically without manual configuration: + +| Field | Rust Crates | C++ Dependencies | Bazel Modules | +|-------|-------------|------------------|---------------| +| License | dash-license-scan | cdxgen | — | +| Description | crates.io API | cdxgen (falls back to `"Missing"` when unavailable) | — | +| Supplier | crates.io API (GitHub org from repository URL) | cdxgen | — | +| Version | MODULE.bazel.lock / Cargo.lock | cdxgen (with MODULE.bazel.lock fallback for Bazel modules) | Bazel module graph | +| Checksum (SHA-256) | MODULE.bazel.lock / Cargo.lock | BCR `source.json` `sha256` + cdxgen `hashes` (when present) | http_archive `sha256` + MODULE.bazel.lock BCR `source.json` | +| PURL | Auto-generated (`pkg:cargo/...`) | cdxgen | Auto-generated | + +### Platform-Specific Crate Handling + +Crates with platform-specific suffixes (e.g. `iceoryx2-bb-lock-free-qnx8`) that don't exist on crates.io are handled by stripping the suffix and falling back to the base crate name for description and supplier lookup. + +### What Is Excluded from SBOM + +- Dependencies not in the transitive dep graph of your `targets` +- Build toolchain repos matching `exclude_patterns` (e.g. `rules_rust`, `rules_cc`, `bazel_tools`, `platforms`) + +## Example + +See [reference_integration/BUILD](../../reference_integration/BUILD) for working SBOM targets and [reference_integration/MODULE.bazel](../../reference_integration/MODULE.bazel) for the metadata extension setup. + +Each SBOM target uses `module_lockfiles` to provide crate version/checksum data from multiple lockfiles and `auto_crates_cache = True` to automatically fetch license, description, and supplier data. + +### score_crates Integration + +The `score_crates` module provides centralized Rust crate management for the SCORE project. Its `MODULE.bazel.lock` file contains the majority of resolved crate specs (name, version, sha256) generated by `cargo-bazel`. The workspace's own `MODULE.bazel.lock` may contain additional crates not in `score_crates`. Both lockfiles should be passed via `module_lockfiles` to ensure complete coverage. + +## CISA 2025 Element Coverage (CycloneDX) + +The table below maps the CISA 2025 draft elements to CycloneDX fields and notes current support in this SBOM generator. + +| CISA 2025 Element | CycloneDX Field (JSON) | Support | Notes | +|---|---|---|---| +| Software Producer | `components[].supplier.name` | **Supported** | Root producer is set in `metadata.component.supplier`. For components, supplier is auto-extracted from crates.io repository URL (Rust) or from cdxgen (C++); in the current baselibs example, Boost BCR modules have no supplier because cdxgen does not provide one. | +| Component Name | `components[].name` | **Supported** | Single name; aliases are stored as `properties` with `cdx:alias`. | +| Component Version | `components[].version` | **Supported** | If unknown and source is git repo with `commit_date`, version can fall back to that date. | +| Software Identifiers | `components[].purl`, `components[].cpe` | **Supported (PURL)** / **Optional (CPE)** | PURL is generated for all components. CPE is optional if provided in metadata. | +| Component Hash | `components[].hashes` | **Supported** | SHA-256 is populated for Rust crates (from lockfiles) and for BCR / http_archive / some cdxgen-backed C++ components. In the current examples, Rust crates and Boost BCR modules have hashes; some QNX-specific crates and other C++ deps may not. | +| License | `components[].licenses` | **Supported (Rust) / Best-effort (C++)** | Rust licenses are auto-fetched via dash-license-scan and are present for most crates (e.g. Kyron SBOM); some crates like `iceoryx2-*` may still lack licenses. For C++ components, licenses are only present when cdxgen (or an upstream SBOM) provides them; in the current baselibs example, Boost BCR modules have empty `licenses`. Compound SPDX expressions (AND/OR) use the `expression` field per CycloneDX spec. | +| Component Description | `components[].description` | **Supported** | Auto-fetched from crates.io API (Rust) and cdxgen (C++), with C++ falling back to `"Missing"` when no description is available (as seen for Boost in the baselibs SBOM). | +| Dependency Relationship | `dependencies` | **Supported** | Uses external repo dependency edges from Bazel aspect; both Kyron and baselibs SBOMs include a dependency graph for the root component. | +| Pedigree / Derivation | `components[].pedigree` | **Supported (manual)** | Must be provided via `sbom_ext.license()` with `pedigree_*` fields. Not auto-deduced. | +| SBOM Author | `metadata.authors` | **Supported** | Set via `sbom_authors` in `sbom()` rule (e.g. `"Eclipse SCORE Team"` in the examples). | +| Tool Name | `metadata.tools` | **Supported** | Always includes `score-sbom-generator`; extra tools can be added via `sbom_tools`. | +| Timestamp | `metadata.timestamp` | **Supported** | ISO 8601 UTC timestamp generated at build time. | +| Generation Context | `metadata.lifecycles` | **Supported** | Set via `generation_context` in `sbom()` rule (`pre-build`, `build`, `post-build`). | + +### SPDX-Specific Notes + +- **LicenseRef-* declarations**: Any `LicenseRef-*` identifiers used in license fields are automatically declared in `hasExtractedLicensingInfos` as required by SPDX 2.3. +- **Supplier**: Emitted as `Organization: ` in the SPDX `supplier` field. + +### Notes on Missing Data +If a field is absent in output, it usually means the source metadata was not provided: +- Licenses and suppliers are auto-populated from dash-license-scan (Rust) or cdxgen (C++). For C++ dependencies, licenses and suppliers are available only when cdxgen can resolve the component; Bazel Central Registry modules like `boost.*` may have empty licenses if cdxgen cannot infer them. +- CPE, aliases, and pedigree are optional and must be explicitly set via `sbom_ext.license()`. +- Rust crate licenses require a crates metadata cache; this is generated automatically when `module_lockfiles` (or `cargo_lockfile`) is provided to `sbom()`. License data is fetched via `dash-license-scan` (Eclipse Foundation + ClearlyDefined). The `score_crates` MODULE.bazel.lock combined with the workspace's MODULE.bazel.lock provides complete coverage. +- If cdxgen cannot resolve C++ package metadata for a Bazel-only dependency graph, SBOM generation sets C++ dependency descriptions to `"Missing"`. + +Examples (add to `MODULE.bazel`): + +```starlark +# Optional metadata (CPE, aliases, pedigree) +# Note: sbom_ext.license() should only be used for pedigree, CPE, and aliases. +# Licenses and suppliers are auto-populated from dash-license-scan (Rust) or cdxgen (C++). +sbom_ext.license( + name = "linux-kernel", + cpe = "cpe:2.3:o:linux:linux_kernel:*:*:*:*:*:*:*:*", + aliases = ["linux", "kernel"], + pedigree_ancestors = ["pkg:generic/linux-kernel@5.10.130"], + pedigree_notes = "Backported CVE-2025-12345 fix from 5.10.130", +) +``` + +### C++ license data and dash-license-scan + +- **Rust crates** + Rust licenses are obtained via `generate_crates_metadata_cache.py`, which reads `MODULE.bazel.lock` / `Cargo.lock`, builds a synthetic `Cargo.lock`, runs `uvx dash-license-scan` (backed by Eclipse dash-licenses), and writes a `crates_metadata.json` cache that `sbom_generator.py` consumes. + +- **C++ dependencies** + C++ licenses and suppliers are resolved through two mechanisms: + + 1. **cdxgen scan** — when `auto_cdxgen = True` (or a `cdxgen_sbom` label is provided), cdxgen scans the source tree for C++ package metadata. This is the primary automated source for C++ license, supplier, version, and PURL. + + 2. **`cpp_metadata.json` cache** — populated by running `generate_cpp_metadata_cache.py` against cdxgen output. **This file must always be generated by the script, never edited by hand.** See the no-manual-fallback requirement below. + + There is currently **no dash-license-scan integration for C++ SBOMs**. `dash-license-scan` understands purls like `pkg:cargo/...`, `pkg:pypi/...`, `pkg:npm/...`, and `pkg:maven/...`, but not `pkg:generic/...` (used for BCR modules), so running it on the C++ CycloneDX SBOM does not improve C++ license coverage. + +### No-manual-fallback requirement (MUST) + +**All SBOM fields must originate from automated sources. No manually-curated fallback values are permitted for any field — not checksum, not license, not supplier, not version, not PURL, not description.** + +This applies to every data source in the pipeline: + +| Source | Status | What it provides | +|---|---|---| +| `MODULE.bazel.lock` `source.json` sha256 | ✅ Automated | Checksum for BCR C++ modules | +| `http_archive sha256 =` field | ✅ Automated | Checksum for non-BCR deps | +| cdxgen source-tree scan | ✅ Automated | License, supplier, version, PURL for C++ | +| `generate_cpp_metadata_cache.py` output | ✅ Automated (generated from cdxgen) | Persistent C++ metadata cache | +| dash-license-scan | ✅ Automated | License for Rust crates | +| `cpp_metadata.json` with hand-written entries | ❌ **Forbidden** | — | +| `BCR_KNOWN_LICENSES` dict in `sbom_generator.py` | ⚠️ Known violation — must be removed | License/supplier for BCR C++ modules | + +**Why:** A manually-written value is version-pinned to whatever version string happens to be in the file at the time of writing. If the workspace resolves a different version of that component, the value silently describes the wrong artifact. An absent field is honest and correct; a manually-guessed field is a compliance violation and a traceability lie. + +**Correct behaviour for missing data:** If an automated source cannot determine a field, the field is absent in the SBOM output. This is expected and acceptable. + +**Enforcement:** `test_cpp_enrich_checksum.py::TestNoManualFallbackInCppMetadata` asserts that `cpp_metadata.json` is empty and contains no SBOM fields. If entries are needed, regenerate the file: + +```bash +npx @cyclonedx/cdxgen -t cpp --deep -r -o cdxgen_output.cdx.json +python3 tooling/sbom/scripts/generate_cpp_metadata_cache.py \ + cdxgen_output.cdx.json tooling/sbom/cpp_metadata.json +``` + +**Known violation — `BCR_KNOWN_LICENSES`:** The `BCR_KNOWN_LICENSES` dict hardcoded in `sbom_generator.py` is a manually-maintained license/supplier table for Bazel Central Registry C++ modules. It violates this requirement and must be replaced with automated BCR metadata fetching (e.g. querying the BCR `MODULE.bazel` or `metadata.json` at build time). Until that is implemented, BCR C++ modules that cdxgen cannot resolve will have missing license fields in the SBOM — which is the correct, honest output. + +--- + +## SPDX Version Decision (stay on 2.3) + +This generator emits **SPDX 2.3** and will not migrate to SPDX 3.0 until tooling support matures. + +### Why not SPDX 3.0? + +SPDX 3.0 is a **breaking rewrite**, not an additive update: + +| Aspect | SPDX 2.3 | SPDX 3.0 | +|---|---|---| +| Serialization | Flat JSON | JSON-LD (`@context` + `@graph`) | +| Top-level key | `spdxVersion: "SPDX-2.3"` | `@context: "https://spdx.org/rdf/3.0.1/spdx-context.jsonld"` | +| Package fields | `versionInfo`, `licenseConcluded`, `SPDXID` | `software_packageVersion`, licensing profile objects, `spdxId` | +| Relationships | Array in document | Standalone elements in `@graph` | +| Profiles | None | Mandatory `profileConformance` declaration | + +**Downstream consumer support as of Feb 2026 — tools that read/process our SBOM output, none support SPDX 3.0:** + +| Tool | SPDX 2.3 | SPDX 3.0 | +|---|---|---| +| GitHub Dependabot / Dependency Submission API | ✅ SPDX 2.3 (export) / action works with 2.3 in practice | ❌ | +| Trivy | ✅ generates 2.3 | ❌ | +| Grype | ✅ consumes 2.x | ❌ | +| Syft | ✅ generates 2.3 | ❌ | +| spdx-tools (Python) | ✅ full support | ⚠️ "experimental, unstable" | + +The `spdx-tools` Python library (latest: v0.8.4, Jan 2025) still describes its SPDX 3.0 support as "neither complete nor stable" and explicitly warns against production use. v0.8.4 added Python 3.14 support but made no SPDX 3.0 improvements. + +For SCORE's use case (license data, PURL, checksums, dependency graph), SPDX 2.3 covers all requirements with zero compatibility issues. + +### Revisit trigger + +Reconsider migration when **Trivy or GitHub Dependabot** announces production SPDX 3.0 support. At that point the required changes are: + +- `tooling/sbom/internal/generator/spdx_formatter.py` — full rewrite (flat JSON → JSON-LD `@graph`, new field names) +- `tooling/sbom/tests/test_spdx_formatter.py` — all 17 tests need rewriting +- `tooling/sbom/scripts/spdx_to_github_snapshot.py` — relationship and `externalRefs` parsing + diff --git a/sbom/cpp_metadata.json b/sbom/cpp_metadata.json new file mode 100644 index 0000000..0967ef4 --- /dev/null +++ b/sbom/cpp_metadata.json @@ -0,0 +1 @@ +{} diff --git a/sbom/crates_metadata.json b/sbom/crates_metadata.json new file mode 100644 index 0000000..2f1b7b6 --- /dev/null +++ b/sbom/crates_metadata.json @@ -0,0 +1,806 @@ +{ + "aho-corasick": { + "checksum": "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301", + "license": "Unlicense OR MIT", + "name": "aho-corasick", + "purl": "pkg:cargo/aho-corasick@1.1.4", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.1.4" + }, + "bindgen": { + "checksum": "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895", + "license": "BSD-3-Clause", + "name": "bindgen", + "purl": "pkg:cargo/bindgen@0.72.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.72.1" + }, + "bitflags": { + "checksum": "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3", + "license": "MIT OR Apache-2.0", + "name": "bitflags", + "purl": "pkg:cargo/bitflags@2.10.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.10.0" + }, + "byteorder": { + "checksum": "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b", + "license": "Unlicense OR MIT", + "name": "byteorder", + "purl": "pkg:cargo/byteorder@1.5.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.5.0" + }, + "cc": { + "checksum": "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215", + "license": "MIT OR Apache-2.0", + "name": "cc", + "purl": "pkg:cargo/cc@1.2.49", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.2.49" + }, + "cdr": { + "checksum": "9617422bf43fde9280707a7e90f8f7494389c182f5c70b0f67592d0f06d41dfa", + "license": "Apache-2.0 OR MIT", + "name": "cdr", + "purl": "pkg:cargo/cdr@0.2.4", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.4" + }, + "cexpr": { + "checksum": "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766", + "license": "Apache-2.0 OR MIT", + "name": "cexpr", + "purl": "pkg:cargo/cexpr@0.6.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.6.0" + }, + "cfg-if": { + "checksum": "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801", + "license": "MIT OR Apache-2.0", + "name": "cfg-if", + "purl": "pkg:cargo/cfg-if@1.0.4", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.4" + }, + "clang-sys": { + "checksum": "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4", + "license": "Apache-2.0", + "name": "clang-sys", + "purl": "pkg:cargo/clang-sys@1.8.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.8.1" + }, + "cobs": { + "checksum": "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1", + "license": "MIT OR Apache-2.0", + "name": "cobs", + "purl": "pkg:cargo/cobs@0.3.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.0" + }, + "crossbeam-channel": { + "checksum": "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2", + "license": "MIT OR Apache-2.0", + "name": "crossbeam-channel", + "purl": "pkg:cargo/crossbeam-channel@0.5.15", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.5.15" + }, + "crossbeam-utils": { + "checksum": "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28", + "license": "MIT OR Apache-2.0", + "name": "crossbeam-utils", + "purl": "pkg:cargo/crossbeam-utils@0.8.21", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.21" + }, + "deranged": { + "checksum": "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587", + "license": "MIT OR Apache-2.0", + "name": "deranged", + "purl": "pkg:cargo/deranged@0.5.5", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.5.5" + }, + "either": { + "checksum": "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719", + "license": "MIT OR Apache-2.0", + "name": "either", + "purl": "pkg:cargo/either@1.15.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.15.0" + }, + "embedded-io": { + "checksum": "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d", + "license": "MIT OR Apache-2.0", + "name": "embedded-io", + "purl": "pkg:cargo/embedded-io@0.6.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.6.1" + }, + "enum-iterator": { + "checksum": "a4549325971814bda7a44061bf3fe7e487d447cba01e4220a4b454d630d7a016", + "license": "0BSD OR MIT OR Apache-2.0", + "name": "enum-iterator", + "purl": "pkg:cargo/enum-iterator@2.3.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.3.0" + }, + "enum-iterator-derive": { + "checksum": "685adfa4d6f3d765a26bc5dbc936577de9abf756c1feeb3089b01dd395034842", + "license": "0BSD OR MIT OR Apache-2.0", + "name": "enum-iterator-derive", + "purl": "pkg:cargo/enum-iterator-derive@1.5.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.5.0" + }, + "equivalent": { + "checksum": "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f", + "license": "Apache-2.0 OR MIT", + "name": "equivalent", + "purl": "pkg:cargo/equivalent@1.0.2", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.2" + }, + "find-msvc-tools": { + "checksum": "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844", + "license": "MIT OR Apache-2.0", + "name": "find-msvc-tools", + "purl": "pkg:cargo/find-msvc-tools@0.1.5", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.5" + }, + "futures": { + "checksum": "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876", + "license": "MIT OR Apache-2.0", + "name": "futures", + "purl": "pkg:cargo/futures@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-channel": { + "checksum": "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10", + "license": "MIT OR Apache-2.0", + "name": "futures-channel", + "purl": "pkg:cargo/futures-channel@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-core": { + "checksum": "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e", + "license": "MIT OR Apache-2.0", + "name": "futures-core", + "purl": "pkg:cargo/futures-core@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-executor": { + "checksum": "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f", + "license": "MIT OR Apache-2.0", + "name": "futures-executor", + "purl": "pkg:cargo/futures-executor@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-io": { + "checksum": "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6", + "license": "MIT OR Apache-2.0", + "name": "futures-io", + "purl": "pkg:cargo/futures-io@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-macro": { + "checksum": "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650", + "license": "MIT OR Apache-2.0", + "name": "futures-macro", + "purl": "pkg:cargo/futures-macro@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-sink": { + "checksum": "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7", + "license": "MIT OR Apache-2.0", + "name": "futures-sink", + "purl": "pkg:cargo/futures-sink@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-task": { + "checksum": "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988", + "license": "MIT OR Apache-2.0", + "name": "futures-task", + "purl": "pkg:cargo/futures-task@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "futures-util": { + "checksum": "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81", + "license": "MIT OR Apache-2.0", + "name": "futures-util", + "purl": "pkg:cargo/futures-util@0.3.31", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.31" + }, + "generator": { + "checksum": "605183a538e3e2a9c1038635cc5c2d194e2ee8fd0d1b66b8349fad7dbacce5a2", + "license": "Apache-2.0 OR MIT", + "name": "generator", + "purl": "pkg:cargo/generator@0.8.7", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.7" + }, + "glob": { + "checksum": "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280", + "license": "MIT OR Apache-2.0", + "name": "glob", + "purl": "pkg:cargo/glob@0.3.3", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.3" + }, + "hashbrown": { + "checksum": "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100", + "license": "MIT OR Apache-2.0", + "name": "hashbrown", + "purl": "pkg:cargo/hashbrown@0.16.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.16.1" + }, + "indexmap": { + "checksum": "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2", + "license": "Apache-2.0 OR MIT", + "name": "indexmap", + "purl": "pkg:cargo/indexmap@2.12.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.12.1" + }, + "itertools": { + "checksum": "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186", + "license": "MIT OR Apache-2.0", + "name": "itertools", + "purl": "pkg:cargo/itertools@0.13.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.13.0" + }, + "itoa": { + "checksum": "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c", + "license": "MIT OR Apache-2.0", + "name": "itoa", + "purl": "pkg:cargo/itoa@1.0.15", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.15" + }, + "lazy_static": { + "checksum": "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe", + "license": "MIT OR Apache-2.0", + "name": "lazy_static", + "purl": "pkg:cargo/lazy_static@1.5.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.5.0" + }, + "libc": { + "checksum": "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091", + "license": "MIT OR Apache-2.0", + "name": "libc", + "purl": "pkg:cargo/libc@0.2.178", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.178" + }, + "libloading": { + "checksum": "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55", + "license": "ISC", + "name": "libloading", + "purl": "pkg:cargo/libloading@0.8.9", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.9" + }, + "log": { + "checksum": "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897", + "license": "MIT OR Apache-2.0", + "name": "log", + "purl": "pkg:cargo/log@0.4.29", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.4.29" + }, + "loom": { + "checksum": "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca", + "license": "MIT", + "name": "loom", + "purl": "pkg:cargo/loom@0.7.2", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.7.2" + }, + "matchers": { + "checksum": "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9", + "license": "MIT", + "name": "matchers", + "purl": "pkg:cargo/matchers@0.2.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "memchr": { + "checksum": "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273", + "license": "Unlicense OR MIT", + "name": "memchr", + "purl": "pkg:cargo/memchr@2.7.6", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.7.6" + }, + "minimal-lexical": { + "checksum": "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a", + "license": "MIT OR Apache-2.0", + "name": "minimal-lexical", + "purl": "pkg:cargo/minimal-lexical@0.2.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.1" + }, + "nom": { + "checksum": "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a", + "license": "MIT", + "name": "nom", + "purl": "pkg:cargo/nom@7.1.3", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "7.1.3" + }, + "nu-ansi-term": { + "checksum": "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5", + "license": "MIT", + "name": "nu-ansi-term", + "purl": "pkg:cargo/nu-ansi-term@0.50.3", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.50.3" + }, + "num-conv": { + "checksum": "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9", + "license": "MIT OR Apache-2.0", + "name": "num-conv", + "purl": "pkg:cargo/num-conv@0.1.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.0" + }, + "once_cell": { + "checksum": "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d", + "license": "MIT OR Apache-2.0", + "name": "once_cell", + "purl": "pkg:cargo/once_cell@1.21.3", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.21.3" + }, + "pin-project-lite": { + "checksum": "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b", + "license": "Apache-2.0 OR MIT", + "name": "pin-project-lite", + "purl": "pkg:cargo/pin-project-lite@0.2.16", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.16" + }, + "pin-utils": { + "checksum": "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184", + "name": "pin-utils", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.0" + }, + "postcard": { + "checksum": "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24", + "name": "postcard", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.1.3" + }, + "powerfmt": { + "checksum": "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391", + "name": "powerfmt", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "prettyplease": { + "checksum": "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b", + "license": "MIT OR Apache-2.0", + "name": "prettyplease", + "purl": "pkg:cargo/prettyplease@0.2.37", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.37" + }, + "proc-macro2": { + "checksum": "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8", + "license": "MIT OR Apache-2.0", + "name": "proc-macro2", + "purl": "pkg:cargo/proc-macro2@1.0.103", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.103" + }, + "quote": { + "checksum": "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f", + "license": "MIT OR Apache-2.0", + "name": "quote", + "purl": "pkg:cargo/quote@1.0.42", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.42" + }, + "regex": { + "checksum": "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4", + "license": "MIT OR Apache-2.0", + "name": "regex", + "purl": "pkg:cargo/regex@1.12.2", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.12.2" + }, + "regex-automata": { + "checksum": "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c", + "name": "regex-automata", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.4.13" + }, + "regex-syntax": { + "checksum": "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58", + "name": "regex-syntax", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.8" + }, + "rustc-hash": { + "checksum": "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d", + "license": "Apache-2.0 OR MIT", + "name": "rustc-hash", + "purl": "pkg:cargo/rustc-hash@2.1.1", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.1.1" + }, + "rustversion": { + "checksum": "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d", + "name": "rustversion", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.22" + }, + "ryu": { + "checksum": "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f", + "name": "ryu", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.20" + }, + "scoped-tls": { + "checksum": "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294", + "name": "scoped-tls", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.1" + }, + "serde": { + "checksum": "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e", + "license": "MIT OR Apache-2.0", + "name": "serde", + "purl": "pkg:cargo/serde@1.0.228", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.228" + }, + "serde_core": { + "checksum": "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad", + "name": "serde_core", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.228" + }, + "serde_derive": { + "checksum": "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79", + "license": "MIT OR Apache-2.0", + "name": "serde_derive", + "purl": "pkg:cargo/serde_derive@1.0.228", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.228" + }, + "serde_json": { + "checksum": "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c", + "license": "MIT OR Apache-2.0", + "name": "serde_json", + "purl": "pkg:cargo/serde_json@1.0.145", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.145" + }, + "serde_spanned": { + "checksum": "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3", + "name": "serde_spanned", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.6.9" + }, + "sha1_smol": { + "checksum": "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d", + "name": "sha1_smol", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.1" + }, + "sharded-slab": { + "checksum": "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6", + "name": "sharded-slab", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.7" + }, + "shlex": { + "checksum": "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64", + "license": "MIT OR Apache-2.0", + "name": "shlex", + "purl": "pkg:cargo/shlex@1.3.0", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.3.0" + }, + "slab": { + "checksum": "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589", + "name": "slab", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.4.11" + }, + "smallvec": { + "checksum": "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03", + "name": "smallvec", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.15.1" + }, + "syn": { + "checksum": "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87", + "license": "MIT OR Apache-2.0", + "name": "syn", + "purl": "pkg:cargo/syn@2.0.111", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.0.111" + }, + "thiserror": { + "checksum": "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8", + "license": "MIT OR Apache-2.0", + "name": "thiserror", + "purl": "pkg:cargo/thiserror@2.0.17", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.0.17" + }, + "thiserror-impl": { + "checksum": "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913", + "name": "thiserror-impl", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "2.0.17" + }, + "thread_local": { + "checksum": "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185", + "name": "thread_local", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.1.9" + }, + "time": { + "checksum": "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d", + "license": "MIT OR Apache-2.0", + "name": "time", + "purl": "pkg:cargo/time@0.3.44", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.44" + }, + "time-core": { + "checksum": "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b", + "license": "MIT OR Apache-2.0", + "name": "time-core", + "purl": "pkg:cargo/time-core@0.1.6", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.6" + }, + "time-macros": { + "checksum": "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3", + "license": "MIT OR Apache-2.0", + "name": "time-macros", + "purl": "pkg:cargo/time-macros@0.2.24", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.24" + }, + "tiny-fn": { + "checksum": "9659b108631d1e1cf3e8e489f894bee40bc9d68fd6cc67ec4d4ce9b72d565228", + "name": "tiny-fn", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.9" + }, + "toml": { + "checksum": "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362", + "license": "MIT OR Apache-2.0", + "name": "toml", + "purl": "pkg:cargo/toml@0.8.23", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.8.23" + }, + "toml_datetime": { + "checksum": "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c", + "name": "toml_datetime", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.6.11" + }, + "toml_edit": { + "checksum": "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a", + "name": "toml_edit", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.22.27" + }, + "toml_write": { + "checksum": "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801", + "name": "toml_write", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.2" + }, + "tracing": { + "checksum": "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647", + "license": "MIT", + "name": "tracing", + "purl": "pkg:cargo/tracing@0.1.43", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.43" + }, + "tracing-appender": { + "checksum": "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf", + "name": "tracing-appender", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.4" + }, + "tracing-attributes": { + "checksum": "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da", + "name": "tracing-attributes", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.31" + }, + "tracing-core": { + "checksum": "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c", + "license": "MIT", + "name": "tracing-core", + "purl": "pkg:cargo/tracing-core@0.1.35", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.35" + }, + "tracing-log": { + "checksum": "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3", + "name": "tracing-log", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "tracing-serde": { + "checksum": "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1", + "name": "tracing-serde", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "tracing-subscriber": { + "checksum": "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e", + "license": "MIT", + "name": "tracing-subscriber", + "purl": "pkg:cargo/tracing-subscriber@0.3.22", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.22" + }, + "unicode-ident": { + "checksum": "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5", + "license": "MIT OR Apache-2.0 AND Unicode-3.0", + "name": "unicode-ident", + "purl": "pkg:cargo/unicode-ident@1.0.22", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "1.0.22" + }, + "valuable": { + "checksum": "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65", + "name": "valuable", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.1" + }, + "windows": { + "checksum": "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893", + "name": "windows", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.61.3" + }, + "windows-collections": { + "checksum": "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8", + "name": "windows-collections", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "windows-core": { + "checksum": "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3", + "name": "windows-core", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.61.2" + }, + "windows-future": { + "checksum": "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e", + "name": "windows-future", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.1" + }, + "windows-implement": { + "checksum": "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf", + "name": "windows-implement", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.60.2" + }, + "windows-interface": { + "checksum": "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358", + "name": "windows-interface", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.59.3" + }, + "windows-link": { + "checksum": "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5", + "name": "windows-link", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.1" + }, + "windows-numerics": { + "checksum": "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1", + "name": "windows-numerics", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.2.0" + }, + "windows-result": { + "checksum": "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6", + "name": "windows-result", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.3.4" + }, + "windows-strings": { + "checksum": "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57", + "name": "windows-strings", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.4.2" + }, + "windows-sys": { + "checksum": "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc", + "name": "windows-sys", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.61.2" + }, + "windows-targets": { + "checksum": "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c", + "name": "windows-targets", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows-threading": { + "checksum": "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6", + "name": "windows-threading", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.1.0" + }, + "windows_aarch64_gnullvm": { + "checksum": "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8", + "name": "windows_aarch64_gnullvm", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_aarch64_msvc": { + "checksum": "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc", + "name": "windows_aarch64_msvc", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_i686_gnu": { + "checksum": "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e", + "name": "windows_i686_gnu", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_i686_msvc": { + "checksum": "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406", + "name": "windows_i686_msvc", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_x86_64_gnu": { + "checksum": "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e", + "name": "windows_x86_64_gnu", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_x86_64_gnullvm": { + "checksum": "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc", + "name": "windows_x86_64_gnullvm", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "windows_x86_64_msvc": { + "checksum": "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538", + "name": "windows_x86_64_msvc", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.48.5" + }, + "winnow": { + "checksum": "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829", + "name": "winnow", + "source": "registry+https://github.com/rust-lang/crates.io-index", + "version": "0.7.14" + } +} \ No newline at end of file diff --git a/sbom/defs.bzl b/sbom/defs.bzl new file mode 100644 index 0000000..c60233d --- /dev/null +++ b/sbom/defs.bzl @@ -0,0 +1,136 @@ +"""Public API for SBOM generation. + +This module provides the sbom() macro, which is the main entry point for +generating Software Bill of Materials for Bazel targets. + +Example usage: + load("@score_tooling//sbom:defs.bzl", "sbom") + + sbom( + name = "product_sbom", + targets = [ + "//feature_showcase/rust:orch_per_example", + "//feature_showcase/rust:kyron_example", + ], + component_version = "1.0.0", + ) +""" + +load("//sbom/internal:rules.bzl", "sbom_rule") + +def sbom( + name, + targets, + metadata_json = "@sbom_metadata//:metadata.json", + dep_module_files = None, + cdxgen_sbom = None, + auto_cdxgen = False, + cargo_lockfile = None, + module_lockfiles = None, + auto_crates_cache = True, + output_formats = ["spdx", "cyclonedx"], + producer_name = "Eclipse Foundation", + producer_url = "https://projects.eclipse.org/projects/automotive.score", + component_name = None, + component_version = None, + sbom_authors = None, + generation_context = None, + sbom_tools = None, + namespace = None, + exclude_patterns = None, + **kwargs): + """Generates SBOM for specified targets. + + This macro creates an SBOM (Software Bill of Materials) for the specified + targets, traversing their transitive dependencies and generating output + in SPDX 2.3 and/or CycloneDX 1.6 format. + + License metadata is collected automatically: + - Rust crates: from crates_metadata.json cache (bundled with tooling) + - C++ deps: from cpp_metadata.json cache (bundled with tooling) + - Bazel modules: version/PURL auto-extracted from module graph + + Prerequisites: + In your MODULE.bazel, you must enable the sbom_metadata extension: + ``` + sbom_ext = use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") + use_repo(sbom_ext, "sbom_metadata") + ``` + + Args: + name: Rule name, also used as output filename prefix + targets: List of targets to include in SBOM + metadata_json: Label to the metadata.json file from sbom_metadata extension + dep_module_files: MODULE.bazel files from dependency modules for automatic version extraction + cdxgen_sbom: Optional label to CycloneDX JSON from cdxgen for C++ enrichment + auto_cdxgen: Run cdxgen automatically when no cdxgen_sbom is provided + cargo_lockfile: Optional Cargo.lock for crates metadata cache generation + module_lockfiles: MODULE.bazel.lock files for crate metadata extraction (e.g., from score_crates and workspace) + auto_crates_cache: Run crates metadata cache generation when cargo_lockfile or module_lockfiles is provided + output_formats: List of formats to generate ("spdx", "cyclonedx") + producer_name: SBOM producer organization name + producer_url: SBOM producer URL + component_name: Main component name (defaults to rule name) + component_version: Component version string + namespace: SBOM namespace URI (defaults to https://eclipse.dev/score) + exclude_patterns: Repo patterns to exclude (e.g., build tools) + **kwargs: Additional arguments passed to the underlying rule + + Outputs: + {name}.spdx.json - SPDX 2.3 format (if "spdx" in output_formats) + {name}.cdx.json - CycloneDX 1.6 format (if "cyclonedx" in output_formats) + + Example: + # Single target SBOM + sbom( + name = "my_app_sbom", + targets = ["//src:my_app"], + component_version = "1.0.0", + ) + + # Multi-target SBOM + sbom( + name = "product_sbom", + targets = [ + "//feature_showcase/rust:orch_per_example", + "//feature_showcase/rust:kyron_example", + ], + component_name = "score_reference_integration", + component_version = "0.5.0-beta", + ) + """ + default_exclude_patterns = [ + "rules_rust", + "rules_cc", + "bazel_tools", + "platforms", + "bazel_skylib", + "rules_python", + "rules_proto", + "protobuf", + "local_config_", + "remote_", + ] + + sbom_rule( + name = name, + targets = targets, + metadata_json = metadata_json, + dep_module_files = dep_module_files if dep_module_files else [], + cdxgen_sbom = cdxgen_sbom, + auto_cdxgen = auto_cdxgen, + cargo_lockfile = cargo_lockfile, + module_lockfiles = module_lockfiles if module_lockfiles else [], + auto_crates_cache = auto_crates_cache, + output_formats = output_formats, + producer_name = producer_name, + producer_url = producer_url, + component_name = component_name if component_name else name, + component_version = component_version if component_version else "", + sbom_authors = sbom_authors if sbom_authors else [], + generation_context = generation_context if generation_context else "", + sbom_tools = sbom_tools if sbom_tools else [], + namespace = namespace if namespace else "https://eclipse.dev/score", + exclude_patterns = exclude_patterns if exclude_patterns else default_exclude_patterns, + **kwargs + ) diff --git a/sbom/docs/requirements/component_requirements.rst b/sbom/docs/requirements/component_requirements.rst new file mode 100644 index 0000000..4a8af86 --- /dev/null +++ b/sbom/docs/requirements/component_requirements.rst @@ -0,0 +1,88 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +.. _sbom_component_requirements: + +Component Requirements +###################### + +.. document:: SBOM Generator Component Requirements + :id: doc__sbom_component_requirements + :status: valid + :safety: QM + :security: NO + :realizes: wp__requirements_comp + + +Metadata Provenance +=================== + +.. comp_req:: Component Checksum Automated Source + :id: comp_req__sbom__checksum_automated_source + :reqtype: Functional + :security: NO + :safety: QM + :satisfies: feat_req__sbom__cisa_2025_minimum_elements + :status: valid + + The generator shall source component SHA-256 checksums exclusively from + the following automated inputs: + + - ``MODULE.bazel.lock`` ``registryFileHashes`` entries pointing to + ``source.json`` files (for Bazel Central Registry modules), and + - the ``sha256`` field of ``http_archive`` rules (for non-BCR + dependencies). + + If neither source provides a checksum for a component, the hash field + shall be omitted from that component's SBOM entry. Omitting the field is + the correct output; emitting an incorrect or stale value is not permitted. + + +Output Format +============= + +.. comp_req:: SPDX Output Version + :id: comp_req__sbom__spdx_version + :reqtype: Functional + :security: NO + :safety: QM + :satisfies: feat_req__sbom__dual_format_output + :status: valid + + The generator shall emit SPDX 2.3 compliant JSON. Migration to SPDX 3.0 + shall not be performed until SPDX 3.0 output is supported in production + by at least one of the following downstream consumers: Trivy, GitHub + Dependabot Dependency Submission API, or Grype. + + :rationale: SPDX 3.0 is a breaking JSON-LD rewrite of the format. As of + February 2026 none of the major consumers support it, and the + reference Python library (spdx-tools v0.8.4) describes its own + 3.0 support as experimental and not recommended for production. + + +.. comp_req:: CycloneDX Output Version + :id: comp_req__sbom__cyclonedx_version + :reqtype: Functional + :security: NO + :safety: QM + :satisfies: feat_req__sbom__dual_format_output + :status: valid + + The generator shall emit CycloneDX 1.6 compliant JSON with + ``"$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json"`` and + ``"specVersion": "1.6"``. + + +.. needextend:: docname is not None and "sbom" in id + :+tags: sbom diff --git a/sbom/docs/requirements/feature_requirements.rst b/sbom/docs/requirements/feature_requirements.rst new file mode 100644 index 0000000..80b121d --- /dev/null +++ b/sbom/docs/requirements/feature_requirements.rst @@ -0,0 +1,92 @@ +.. + # ******************************************************************************* + # Copyright (c) 2025 Contributors to the Eclipse Foundation + # + # See the NOTICE file(s) distributed with this work for additional + # information regarding copyright ownership. + # + # This program and the accompanying materials are made available under the + # terms of the Apache License Version 2.0 which is available at + # https://www.apache.org/licenses/LICENSE-2.0 + # + # SPDX-License-Identifier: Apache-2.0 + # ******************************************************************************* + +.. _sbom_feature_requirements: + +Feature Requirements +#################### + +.. document:: SBOM Generator Feature Requirements + :id: doc__sbom_feature_requirements + :status: valid + :safety: QM + :security: NO + :realizes: wp__requirements_feat + + +CISA 2025 Minimum Elements +=========================== + +.. feat_req:: CISA 2025 Mandatory SBOM Elements + :id: feat_req__sbom__cisa_2025_minimum_elements + :reqtype: Functional + :security: NO + :safety: QM + :status: valid + + The SBOM generator shall produce output that contains all minimum elements + mandated by CISA 2025 for every component entry: component name, component + version, component hash (SHA-256), software identifier (PURL), license + expression, dependency relationships, SBOM author, timestamp, tool name, + and generation context (lifecycle phase). + + +Metadata Provenance +=================== + +.. feat_req:: Automated Metadata Sources + :id: feat_req__sbom__automated_metadata_sources + :reqtype: Process + :security: NO + :safety: QM + :status: valid + + All field values written into generated SBOM output shall be derived + exclusively from automated sources. No manually-curated static data, + hardcoded lookup tables, or hand-edited cache files shall be used to + supply values for any SBOM field. + +Component Scope +=============== + +.. feat_req:: Build Target Dependency Scope + :id: feat_req__sbom__build_target_scope + :reqtype: Functional + :security: NO + :safety: QM + :status: valid + + The SBOM shall include only components that are part of the transitive + dependency closure of the declared build targets. Build-time tools that + are not part of the delivered software (compilers, build systems, test + frameworks, and code generation utilities) shall be excluded from the + SBOM output. + + +Output Formats +============== + +.. feat_req:: Dual Format SBOM Output + :id: feat_req__sbom__dual_format_output + :reqtype: Interface + :security: NO + :safety: QM + :status: valid + + The SBOM generator shall produce output simultaneously in both SPDX 2.3 + JSON format and CycloneDX 1.6 JSON format from a single invocation. + + +.. needextend:: docname is not None and "sbom" in id + :+tags: sbom diff --git a/sbom/extensions.bzl b/sbom/extensions.bzl new file mode 100644 index 0000000..9534a91 --- /dev/null +++ b/sbom/extensions.bzl @@ -0,0 +1,454 @@ +"""Module extension to collect dependency metadata from bzlmod. + +This extension collects version and metadata information for all modules +and other dependencies in the workspace, making it available for +SBOM generation. License metadata is collected automatically from +bundled caches (crates_metadata.json, cpp_metadata.json). + +Usage in MODULE.bazel: + sbom_ext = use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") + use_repo(sbom_ext, "sbom_metadata") +""" + +def _generate_purl_from_url(url, name, version): + """Generate Package URL from download URL.""" + if not url: + return "pkg:generic/{}@{}".format(name, version or "unknown") + + version_str = version or "unknown" + + # GitHub + if "github.com" in url: + parts = url.split("github.com/") + if len(parts) > 1: + path_parts = parts[1].split("/") + if len(path_parts) >= 2: + owner = path_parts[0] + repo = path_parts[1].split(".")[0].split("/")[0] + return "pkg:github/{}/{}@{}".format(owner, repo, version_str) + + # GitLab + if "gitlab.com" in url or "gitlab" in url: + if "gitlab.com/" in url: + parts = url.split("gitlab.com/") + if len(parts) > 1: + path_parts = parts[1].split("/") + if len(path_parts) >= 2: + owner = path_parts[0] + repo = path_parts[1].split(".")[0] + return "pkg:gitlab/{}/{}@{}".format(owner, repo, version_str) + + return "pkg:generic/{}@{}".format(name, version_str) + +def _generate_purl_from_git(remote, name, version): + """Generate Package URL from git remote.""" + if not remote: + return "pkg:generic/{}@{}".format(name, version or "unknown") + + version_str = version or "unknown" + + # GitHub (https or ssh) + if "github.com" in remote: + if "github.com:" in remote: + path = remote.split("github.com:")[-1] + else: + path = remote.split("github.com/")[-1] + parts = path.replace(".git", "").split("/") + if len(parts) >= 2: + return "pkg:github/{}/{}@{}".format(parts[0], parts[1], version_str) + + # GitLab + if "gitlab" in remote: + if "gitlab.com:" in remote: + path = remote.split("gitlab.com:")[-1] + elif "gitlab.com/" in remote: + path = remote.split("gitlab.com/")[-1] + else: + return "pkg:generic/{}@{}".format(name, version_str) + parts = path.replace(".git", "").split("/") + if len(parts) >= 2: + return "pkg:gitlab/{}/{}@{}".format(parts[0], parts[1], version_str) + + return "pkg:generic/{}@{}".format(name, version_str) + +def _extract_version_from_url(url): + """Extract version from URL patterns.""" + if not url: + return None + + # Try common patterns + for sep in ["/v", "/archive/v", "/archive/", "/releases/download/v", "/releases/download/"]: + if sep in url: + rest = url.split(sep)[-1] + version = rest.split("/")[0].split(".tar")[0].split(".zip")[0] + if version and len(version) > 0 and (version[0].isdigit() or version[0] == "v"): + return version.lstrip("v") + + # Try filename pattern: name-version.tar.gz + filename = url.split("/")[-1] + if "-" in filename: + parts = filename.rsplit("-", 1) + if len(parts) == 2: + version = parts[1].split(".tar")[0].split(".zip")[0] + if version and version[0].isdigit(): + return version + + return None + +def _parse_version_from_module_bazel(content): + """Parse module name and version from MODULE.bazel content using string ops. + + Starlark doesn't have regex, so we parse with string find/split operations. + + Args: + content: String content of a MODULE.bazel file + + Returns: + Tuple of (name, version) or (None, None) if not found + """ + idx = content.find("module(") + if idx < 0: + return None, None + + # Find the closing paren for the module() call + block_end = content.find(")", idx) + if block_end < 0: + return None, None + + block = content[idx:block_end] + + # Extract name + name = None + for quote in ['"', "'"]: + marker = "name = " + quote + name_idx = block.find(marker) + if name_idx >= 0: + name_start = name_idx + len(marker) + name_end = block.find(quote, name_start) + if name_end > name_start: + name = block[name_start:name_end] + break + + # Extract version + version = None + for quote in ['"', "'"]: + marker = "version = " + quote + ver_idx = block.find(marker) + if ver_idx >= 0: + ver_start = ver_idx + len(marker) + ver_end = block.find(quote, ver_start) + if ver_end > ver_start: + version = block[ver_start:ver_end] + break + + return name, version + +def _sbom_metadata_repo_impl(repository_ctx): + """Implementation of the sbom_metadata repository rule.""" + + # Start with metadata from the extension + metadata = json.decode(repository_ctx.attr.metadata_content) + modules = metadata.get("modules", {}) + + # Read MODULE.bazel from tracked dependency modules to extract versions + # Use canonical labels (@@module+) to bypass repo visibility restrictions + for module_name in repository_ctx.attr.tracked_modules: + if module_name in modules: + continue # Already have this module's info + + # Try to read the module's MODULE.bazel file using canonical label + label = Label("@@{}+//:MODULE.bazel".format(module_name)) + path = repository_ctx.path(label) + if path.exists: + content = repository_ctx.read(path) + parsed_name, parsed_version = _parse_version_from_module_bazel(content) + if parsed_name and parsed_version: + modules[parsed_name] = { + "version": parsed_version, + "purl": "pkg:generic/{}@{}".format(parsed_name, parsed_version), + } + + metadata["modules"] = modules + repository_ctx.file("metadata.json", json.encode(metadata)) + repository_ctx.file("BUILD.bazel", """\ +# Generated SBOM metadata repository +exports_files(["metadata.json"]) +""") + +_sbom_metadata_repo = repository_rule( + implementation = _sbom_metadata_repo_impl, + attrs = { + "metadata_content": attr.string(mandatory = True), + "tracked_modules": attr.string_list(default = []), + }, +) + +def _sbom_metadata_impl(module_ctx): + """Collects SBOM metadata from all modules in dependency graph.""" + all_http_archives = {} + all_git_repos = {} + all_modules = {} + all_crates = {} + all_licenses = {} + tracked_modules = [] + + for mod in module_ctx.modules: + # Collect tracked module names for version extraction + for tag in mod.tags.track_module: + if tag.name not in tracked_modules: + tracked_modules.append(tag.name) + module_name = mod.name + module_version = mod.version + + # Collect module info from bazel_dep automatically + if module_name and module_version: + all_modules[module_name] = { + "version": module_version, + "purl": "pkg:generic/{}@{}".format(module_name, module_version), + } + + # Collect http_archive metadata + for tag in mod.tags.http_archive: + url = tag.urls[0] if tag.urls else (tag.url if hasattr(tag, "url") and tag.url else "") + version = tag.version if tag.version else _extract_version_from_url(url) + purl = tag.purl if tag.purl else _generate_purl_from_url(url, tag.name, version) + + all_http_archives[tag.name] = { + "version": version or "unknown", + "url": url, + "purl": purl, + "license": tag.license if tag.license else "", + "supplier": tag.supplier if tag.supplier else "", + "sha256": tag.sha256 if tag.sha256 else "", + "cpe": tag.cpe if hasattr(tag, "cpe") and tag.cpe else "", + "aliases": tag.aliases if hasattr(tag, "aliases") and tag.aliases else [], + "pedigree_ancestors": tag.pedigree_ancestors if hasattr(tag, "pedigree_ancestors") and tag.pedigree_ancestors else [], + "pedigree_descendants": tag.pedigree_descendants if hasattr(tag, "pedigree_descendants") and tag.pedigree_descendants else [], + "pedigree_variants": tag.pedigree_variants if hasattr(tag, "pedigree_variants") and tag.pedigree_variants else [], + "pedigree_notes": tag.pedigree_notes if hasattr(tag, "pedigree_notes") and tag.pedigree_notes else "", + "declared_by": module_name, + } + + # Collect git_repository metadata + for tag in mod.tags.git_repository: + version = tag.tag if tag.tag else (tag.commit[:12] if tag.commit else "unknown") + purl = tag.purl if tag.purl else _generate_purl_from_git(tag.remote, tag.name, version) + + all_git_repos[tag.name] = { + "version": version, + "remote": tag.remote, + "commit": tag.commit if tag.commit else "", + "commit_date": tag.commit_date if hasattr(tag, "commit_date") and tag.commit_date else "", + "tag": tag.tag if tag.tag else "", + "purl": purl, + "license": tag.license if tag.license else "", + "supplier": tag.supplier if tag.supplier else "", + "cpe": tag.cpe if hasattr(tag, "cpe") and tag.cpe else "", + "aliases": tag.aliases if hasattr(tag, "aliases") and tag.aliases else [], + "pedigree_ancestors": tag.pedigree_ancestors if hasattr(tag, "pedigree_ancestors") and tag.pedigree_ancestors else [], + "pedigree_descendants": tag.pedigree_descendants if hasattr(tag, "pedigree_descendants") and tag.pedigree_descendants else [], + "pedigree_variants": tag.pedigree_variants if hasattr(tag, "pedigree_variants") and tag.pedigree_variants else [], + "pedigree_notes": tag.pedigree_notes if hasattr(tag, "pedigree_notes") and tag.pedigree_notes else "", + "declared_by": module_name, + } + + # Collect license info for bazel_dep modules, http_archive, git_repository, and crate deps + for tag in mod.tags.license: + dep_type = tag.type if hasattr(tag, "type") and tag.type else "" + url = "" + if hasattr(tag, "urls") and tag.urls: + url = tag.urls[0] + elif hasattr(tag, "url") and tag.url: + url = tag.url + remote = tag.remote if hasattr(tag, "remote") and tag.remote else "" + + explicit_version = tag.version if hasattr(tag, "version") and tag.version else "" + supplier = tag.supplier if hasattr(tag, "supplier") and tag.supplier else "" + cpe = tag.cpe if hasattr(tag, "cpe") and tag.cpe else "" + aliases = tag.aliases if hasattr(tag, "aliases") and tag.aliases else [] + pedigree_ancestors = tag.pedigree_ancestors if hasattr(tag, "pedigree_ancestors") and tag.pedigree_ancestors else [] + pedigree_descendants = tag.pedigree_descendants if hasattr(tag, "pedigree_descendants") and tag.pedigree_descendants else [] + pedigree_variants = tag.pedigree_variants if hasattr(tag, "pedigree_variants") and tag.pedigree_variants else [] + pedigree_notes = tag.pedigree_notes if hasattr(tag, "pedigree_notes") and tag.pedigree_notes else "" + + if dep_type == "cargo": + version = explicit_version if explicit_version else "unknown" + all_crates[tag.name] = { + "version": version, + "purl": tag.purl if tag.purl else "pkg:cargo/{}@{}".format(tag.name, version), + "license": tag.license, + "supplier": supplier, + "cpe": cpe, + "aliases": aliases, + "pedigree_ancestors": pedigree_ancestors, + "pedigree_descendants": pedigree_descendants, + "pedigree_variants": pedigree_variants, + "pedigree_notes": pedigree_notes, + } + elif url or (explicit_version and not remote): + version = explicit_version if explicit_version else _extract_version_from_url(url) + purl = tag.purl if tag.purl else _generate_purl_from_url(url, tag.name, version) + all_http_archives[tag.name] = { + "version": version or "unknown", + "url": url, + "purl": purl, + "license": tag.license, + "supplier": supplier, + "cpe": cpe, + "aliases": aliases, + "pedigree_ancestors": pedigree_ancestors, + "pedigree_descendants": pedigree_descendants, + "pedigree_variants": pedigree_variants, + "pedigree_notes": pedigree_notes, + "declared_by": module_name, + } + elif remote: + version = explicit_version if explicit_version else "unknown" + purl = tag.purl if tag.purl else _generate_purl_from_git(remote, tag.name, version) + all_git_repos[tag.name] = { + "version": version, + "remote": remote, + "commit": "", + "tag": "", + "purl": purl, + "license": tag.license, + "supplier": supplier, + "cpe": cpe, + "aliases": aliases, + "pedigree_ancestors": pedigree_ancestors, + "pedigree_descendants": pedigree_descendants, + "pedigree_variants": pedigree_variants, + "pedigree_notes": pedigree_notes, + "declared_by": module_name, + } + else: + all_licenses[tag.name] = { + "license": tag.license, + "supplier": supplier, + "purl": tag.purl if tag.purl else "", + "cpe": cpe, + "aliases": aliases, + "pedigree_ancestors": pedigree_ancestors, + "pedigree_descendants": pedigree_descendants, + "pedigree_variants": pedigree_variants, + "pedigree_notes": pedigree_notes, + } + + # Apply license/supplier overrides to modules + for name, license_info in all_licenses.items(): + if name in all_modules: + all_modules[name]["license"] = license_info["license"] + if license_info.get("supplier"): + all_modules[name]["supplier"] = license_info["supplier"] + if license_info.get("purl"): + all_modules[name]["purl"] = license_info["purl"] + if license_info.get("cpe"): + all_modules[name]["cpe"] = license_info["cpe"] + if license_info.get("aliases"): + all_modules[name]["aliases"] = license_info["aliases"] + if license_info.get("pedigree_ancestors"): + all_modules[name]["pedigree_ancestors"] = license_info["pedigree_ancestors"] + if license_info.get("pedigree_descendants"): + all_modules[name]["pedigree_descendants"] = license_info["pedigree_descendants"] + if license_info.get("pedigree_variants"): + all_modules[name]["pedigree_variants"] = license_info["pedigree_variants"] + if license_info.get("pedigree_notes"): + all_modules[name]["pedigree_notes"] = license_info["pedigree_notes"] + + # Generate metadata JSON + metadata_content = json.encode({ + "modules": all_modules, + "http_archives": all_http_archives, + "git_repositories": all_git_repos, + "crates": all_crates, + "licenses": all_licenses, + }) + + _sbom_metadata_repo( + name = "sbom_metadata", + metadata_content = metadata_content, + tracked_modules = tracked_modules, + ) + +# Tag for http_archive dependencies - mirrors http_archive attributes +_http_archive_tag = tag_class( + doc = "SBOM metadata for http_archive dependency (mirrors http_archive attrs)", + attrs = { + "name": attr.string(mandatory = True, doc = "Repository name"), + "urls": attr.string_list(doc = "Download URLs"), + "url": attr.string(doc = "Single download URL (alternative to urls)"), + "version": attr.string(doc = "Version (auto-extracted from URL if not provided)"), + "sha256": attr.string(doc = "SHA256 checksum"), + "license": attr.string(doc = "SPDX license identifier"), + "supplier": attr.string(doc = "Supplier/organization name"), + "purl": attr.string(doc = "Package URL (auto-generated if not provided)"), + "cpe": attr.string(doc = "CPE identifier"), + "aliases": attr.string_list(doc = "Alternate component names"), + "pedigree_ancestors": attr.string_list(doc = "Pedigree ancestor identifiers (PURL or name)"), + "pedigree_descendants": attr.string_list(doc = "Pedigree descendant identifiers (PURL or name)"), + "pedigree_variants": attr.string_list(doc = "Pedigree variant identifiers (PURL or name)"), + "pedigree_notes": attr.string(doc = "Pedigree notes"), + }, +) + +# Tag for git_repository dependencies - mirrors git_repository attributes +_git_repository_tag = tag_class( + doc = "SBOM metadata for git_repository dependency (mirrors git_repository attrs)", + attrs = { + "name": attr.string(mandatory = True, doc = "Repository name"), + "remote": attr.string(mandatory = True, doc = "Git remote URL"), + "commit": attr.string(doc = "Git commit hash"), + "tag": attr.string(doc = "Git tag"), + "commit_date": attr.string(doc = "Git commit date (ISO 8601)"), + "license": attr.string(doc = "SPDX license identifier"), + "supplier": attr.string(doc = "Supplier/organization name"), + "purl": attr.string(doc = "Package URL (auto-generated if not provided)"), + "cpe": attr.string(doc = "CPE identifier"), + "aliases": attr.string_list(doc = "Alternate component names"), + "pedigree_ancestors": attr.string_list(doc = "Pedigree ancestor identifiers (PURL or name)"), + "pedigree_descendants": attr.string_list(doc = "Pedigree descendant identifiers (PURL or name)"), + "pedigree_variants": attr.string_list(doc = "Pedigree variant identifiers (PURL or name)"), + "pedigree_notes": attr.string(doc = "Pedigree notes"), + }, +) + +# Tag to add license info to any dependency (bazel_dep, http_archive, git_repository, or crate) +_license_tag = tag_class( + doc = "Add license/supplier metadata for any dependency", + attrs = { + "name": attr.string(mandatory = True, doc = "Dependency name"), + "license": attr.string(mandatory = True, doc = "SPDX license identifier"), + "supplier": attr.string(doc = "Supplier/organization name (e.g., 'Boost.org', 'Google LLC')"), + "version": attr.string(doc = "Version string (for http_archive/git_repository/crate; auto-extracted for bazel_dep)"), + "type": attr.string(doc = "Dependency type: 'cargo' for Rust crates (affects PURL generation). Leave empty for auto-detection."), + "purl": attr.string(doc = "Override Package URL"), + "url": attr.string(doc = "Download URL for http_archive (for PURL generation)"), + "urls": attr.string_list(doc = "Download URLs for http_archive (for PURL generation)"), + "remote": attr.string(doc = "Git remote URL for git_repository (for PURL generation)"), + "cpe": attr.string(doc = "CPE identifier"), + "aliases": attr.string_list(doc = "Alternate component names"), + "pedigree_ancestors": attr.string_list(doc = "Pedigree ancestor identifiers (PURL or name)"), + "pedigree_descendants": attr.string_list(doc = "Pedigree descendant identifiers (PURL or name)"), + "pedigree_variants": attr.string_list(doc = "Pedigree variant identifiers (PURL or name)"), + "pedigree_notes": attr.string(doc = "Pedigree notes"), + }, +) + +# Tag to track a dependency module for automatic version extraction +_track_module_tag = tag_class( + doc = "Track a bazel_dep module for automatic version extraction from its MODULE.bazel", + attrs = { + "name": attr.string(mandatory = True, doc = "Module name (as declared in bazel_dep)"), + }, +) + +sbom_metadata = module_extension( + implementation = _sbom_metadata_impl, + tag_classes = { + "http_archive": _http_archive_tag, + "git_repository": _git_repository_tag, + "license": _license_tag, + "track_module": _track_module_tag, + }, + doc = "Collects SBOM metadata from dependency declarations", +) diff --git a/sbom/internal/BUILD b/sbom/internal/BUILD new file mode 100644 index 0000000..6237649 --- /dev/null +++ b/sbom/internal/BUILD @@ -0,0 +1,24 @@ +# Internal SBOM implementation package +# +# This package contains internal implementation details for SBOM generation. +# External consumers should use the public API in //sbom:defs.bzl + +package(default_visibility = ["//sbom:__subpackages__"]) + +exports_files([ + "aspect.bzl", + "metadata_rule.bzl", + "providers.bzl", + "rules.bzl", +]) + +# Filegroup for all internal bzl files +filegroup( + name = "bzl_files", + srcs = [ + "aspect.bzl", + "metadata_rule.bzl", + "providers.bzl", + "rules.bzl", + ], +) diff --git a/sbom/internal/__init__.py b/sbom/internal/__init__.py new file mode 100644 index 0000000..bd5f6fd --- /dev/null +++ b/sbom/internal/__init__.py @@ -0,0 +1 @@ +"""SBOM internal implementation package.""" diff --git a/sbom/internal/aspect.bzl b/sbom/internal/aspect.bzl new file mode 100644 index 0000000..cf68edc --- /dev/null +++ b/sbom/internal/aspect.bzl @@ -0,0 +1,115 @@ +"""Aspect to traverse and collect transitive dependencies of a target. + +This aspect traverses the dependency graph of specified targets and collects +information about all dependencies, including external repositories, which +is essential for SBOM generation. +""" + +load(":providers.bzl", "SbomDepsInfo") + +def _sbom_aspect_impl(target, ctx): + """Collects transitive dependency information for SBOM generation. + + Args: + target: The target being analyzed + ctx: The aspect context + + Returns: + A list containing SbomDepsInfo provider + """ + direct_deps = [] + transitive_deps_list = [] + external_repos_list = [] + external_repos_direct = [] + external_dep_edges_direct = [] + external_dep_edges_list = [] + + # Get this target's label info + label = target.label + if label.workspace_name: + # This is an external dependency + external_repos_direct.append(label.workspace_name) + from_repo = label.workspace_name + else: + from_repo = "" + + # Collect from rule attributes that represent dependencies + dep_attrs = ["deps", "srcs", "data", "proc_macro_deps", "crate_root", "compile_data"] + for attr_name in dep_attrs: + if hasattr(ctx.rule.attr, attr_name): + attr_val = getattr(ctx.rule.attr, attr_name) + if type(attr_val) == "list": + for dep in attr_val: + if hasattr(dep, "label"): + direct_deps.append(dep.label) + if from_repo and dep.label.workspace_name: + external_dep_edges_direct.append( + "{}::{}".format(from_repo, dep.label.workspace_name), + ) + if SbomDepsInfo in dep: + # Propagate transitive deps from dependencies + transitive_deps_list.append(dep[SbomDepsInfo].transitive_deps) + external_repos_list.append(dep[SbomDepsInfo].external_repos) + external_dep_edges_list.append(dep[SbomDepsInfo].external_dep_edges) + elif attr_val != None and hasattr(attr_val, "label"): + # Single target attribute (e.g., crate_root) + direct_deps.append(attr_val.label) + if from_repo and attr_val.label.workspace_name: + external_dep_edges_direct.append( + "{}::{}".format(from_repo, attr_val.label.workspace_name), + ) + if SbomDepsInfo in attr_val: + transitive_deps_list.append(attr_val[SbomDepsInfo].transitive_deps) + external_repos_list.append(attr_val[SbomDepsInfo].external_repos) + external_dep_edges_list.append(attr_val[SbomDepsInfo].external_dep_edges) + + # Handle cc_library specific attributes + cc_dep_attrs = ["hdrs", "textual_hdrs", "implementation_deps"] + for attr_name in cc_dep_attrs: + if hasattr(ctx.rule.attr, attr_name): + attr_val = getattr(ctx.rule.attr, attr_name) + if type(attr_val) == "list": + for dep in attr_val: + if hasattr(dep, "label"): + direct_deps.append(dep.label) + if from_repo and dep.label.workspace_name: + external_dep_edges_direct.append( + "{}::{}".format(from_repo, dep.label.workspace_name), + ) + if SbomDepsInfo in dep: + transitive_deps_list.append(dep[SbomDepsInfo].transitive_deps) + external_repos_list.append(dep[SbomDepsInfo].external_repos) + external_dep_edges_list.append(dep[SbomDepsInfo].external_dep_edges) + + return [SbomDepsInfo( + direct_deps = depset(direct_deps), + transitive_deps = depset( + direct = [label], + transitive = transitive_deps_list, + ), + external_repos = depset( + direct = external_repos_direct, + transitive = external_repos_list, + ), + external_dep_edges = depset( + direct = external_dep_edges_direct, + transitive = external_dep_edges_list, + ), + )] + +sbom_aspect = aspect( + implementation = _sbom_aspect_impl, + attr_aspects = [ + "deps", + "srcs", + "data", + "proc_macro_deps", + "crate_root", + "compile_data", + "hdrs", + "textual_hdrs", + "implementation_deps", + ], + provides = [SbomDepsInfo], + doc = "Traverses target dependencies and collects SBOM-relevant information", +) diff --git a/sbom/internal/generator/BUILD b/sbom/internal/generator/BUILD new file mode 100644 index 0000000..d3b96fc --- /dev/null +++ b/sbom/internal/generator/BUILD @@ -0,0 +1,38 @@ +# SBOM Generator Python package +# +# This package contains the Python tools for generating SBOM files +# in SPDX 2.3 and CycloneDX 1.6 formats. + +load("@rules_python//python:defs.bzl", "py_binary", "py_library") + +package(default_visibility = ["//sbom:__subpackages__"]) + +py_binary( + name = "sbom_generator", + srcs = ["sbom_generator.py"], + main = "sbom_generator.py", + deps = [ + ":cyclonedx_formatter", + ":spdx_formatter", + ], +) + +py_library( + name = "sbom_generator_lib", + srcs = ["sbom_generator.py"], + deps = [ + ":cyclonedx_formatter", + ":spdx_formatter", + ], +) + +py_library( + name = "spdx_formatter", + srcs = ["spdx_formatter.py"], +) + +py_library( + name = "cyclonedx_formatter", + srcs = ["cyclonedx_formatter.py"], +) + diff --git a/sbom/internal/generator/__init__.py b/sbom/internal/generator/__init__.py new file mode 100644 index 0000000..a34c1c3 --- /dev/null +++ b/sbom/internal/generator/__init__.py @@ -0,0 +1 @@ +"""SBOM generator package.""" diff --git a/sbom/internal/generator/cyclonedx_formatter.py b/sbom/internal/generator/cyclonedx_formatter.py new file mode 100644 index 0000000..9c31778 --- /dev/null +++ b/sbom/internal/generator/cyclonedx_formatter.py @@ -0,0 +1,376 @@ +"""CycloneDX 1.6 JSON formatter for SBOM generation. + +This module generates CycloneDX 1.6 compliant JSON output from the component +information collected by the Bazel aspect and module extension. + +CycloneDX 1.6 Specification: https://cyclonedx.org/docs/1.6/json/ +""" + +import re +import uuid +from typing import Any + + +def _normalize_spdx_license(expr: str) -> str: + """Normalize SPDX boolean operators to uppercase as required by the spec. + + dash-license-scan returns lowercase operators (e.g. 'Apache-2.0 or MIT'). + SPDX 2.3 Appendix IV and CycloneDX 1.6 both require uppercase OR/AND/WITH. + Uses space-delimited substitution to avoid modifying license identifiers + that contain 'or'/'and' as substrings (e.g. GPL-2.0-or-later). + """ + expr = re.sub(r" or ", " OR ", expr, flags=re.IGNORECASE) + expr = re.sub(r" and ", " AND ", expr, flags=re.IGNORECASE) + expr = re.sub(r" with ", " WITH ", expr, flags=re.IGNORECASE) + return expr + + +def generate_cyclonedx( + components: list[dict[str, Any]], + config: dict[str, Any], + timestamp: str, + external_dep_edges: list[str] | None = None, +) -> dict[str, Any]: + """Generate CycloneDX 1.6 JSON document. + + Args: + components: List of component dictionaries + config: Configuration dictionary with producer info + timestamp: ISO 8601 timestamp + + Returns: + CycloneDX 1.6 compliant dictionary + """ + component_name = config.get("component_name", "unknown") + component_version = config.get("component_version", "") + producer_name = config.get("producer_name", "Eclipse Foundation") + producer_url = config.get("producer_url", "") + + # Generate serial number (URN UUID) + serial_number = f"urn:uuid:{uuid.uuid4()}" + + cdx_doc: dict[str, Any] = { + "$schema": "https://cyclonedx.org/schema/bom-1.6.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "serialNumber": serial_number, + "version": 1, + "metadata": { + "timestamp": timestamp, + "tools": { + "components": [ + { + "type": "application", + "name": "score-sbom-generator", + "description": "Eclipse SCORE SBOM Generator (Bazel-native)", + "publisher": producer_name, + } + ] + }, + "component": { + "type": "application", + "name": component_name, + "version": component_version if component_version else "unversioned", + "bom-ref": _generate_bom_ref(component_name, component_version), + "purl": f"pkg:github/eclipse-score/{component_name}@{component_version}" + if component_version + else None, + "supplier": { + "name": producer_name, + "url": [producer_url] if producer_url else [], + }, + }, + "supplier": { + "name": producer_name, + "url": [producer_url] if producer_url else [], + }, + }, + "components": [], + "dependencies": [], + } + + # Clean up None values from metadata.component + if cdx_doc["metadata"]["component"].get("purl") is None: + del cdx_doc["metadata"]["component"]["purl"] + + # Add authors if provided + authors = config.get("sbom_authors", []) + if authors: + cdx_doc["metadata"]["authors"] = [_author_entry(a) for a in authors] + + # Add generation lifecycle if provided + generation_context = config.get("generation_context", "") + if generation_context: + cdx_doc["metadata"]["lifecycles"] = [{"phase": generation_context}] + + # Add extra tool names if provided + extra_tools = config.get("sbom_tools", []) + if extra_tools: + for tool_name in extra_tools: + cdx_doc["metadata"]["tools"]["components"].append( + { + "type": "application", + "name": tool_name, + } + ) + + # Root component bom-ref for dependencies + root_bom_ref = _generate_bom_ref(component_name, component_version) + + # Add components + dependency_refs = [] + for comp in components: + cdx_component = _create_cdx_component(comp) + cdx_doc["components"].append(cdx_component) + dependency_refs.append(cdx_component["bom-ref"]) + + # Build dependency graph + depends_map: dict[str, set[str]] = {} + if external_dep_edges: + for edge in external_dep_edges: + if "::" not in edge: + continue + src, dst = edge.split("::", 1) + if not src or not dst: + continue + src_ref = _generate_bom_ref(src, _component_version_lookup(components, src)) + dst_ref = _generate_bom_ref(dst, _component_version_lookup(components, dst)) + depends_map.setdefault(src_ref, set()).add(dst_ref) + + # Add root dependency (main component depends on all components) + cdx_doc["dependencies"].append( + { + "ref": root_bom_ref, + "dependsOn": dependency_refs, + } + ) + + # Add each component's dependency entry + for comp in components: + name = comp.get("name", "") + version = comp.get("version", "") + bom_ref = _generate_bom_ref(name, version) + cdx_doc["dependencies"].append( + { + "ref": bom_ref, + "dependsOn": sorted(depends_map.get(bom_ref, set())), + } + ) + + return cdx_doc + + +def _create_cdx_component(component: dict[str, Any]) -> dict[str, Any]: + """Create a CycloneDX component from component data. + + Args: + component: Component dictionary + + Returns: + CycloneDX component dictionary + """ + name = component.get("name", "unknown") + version = component.get("version", "unknown") + purl = component.get("purl", "") + license_id = _normalize_spdx_license(component.get("license", "")) + description = component.get("description", "") + supplier = component.get("supplier", "") + comp_type = component.get("type", "library") + source = component.get("source", "") + url = component.get("url", "") + checksum = component.get("checksum", "") + cpe = component.get("cpe", "") + aliases = component.get("aliases", []) + pedigree_ancestors = component.get("pedigree_ancestors", []) + pedigree_descendants = component.get("pedigree_descendants", []) + pedigree_variants = component.get("pedigree_variants", []) + pedigree_notes = component.get("pedigree_notes", "") + + cdx_comp: dict[str, Any] = { + "type": _map_type_to_cdx_type(comp_type), + "name": name, + "version": version, + "bom-ref": _generate_bom_ref(name, version), + } + + # Add description + if description: + cdx_comp["description"] = description + + # Add PURL + if purl: + cdx_comp["purl"] = purl + + # Add license + if license_id: + if " AND " in license_id or " OR " in license_id: + # Compound SPDX expression must use "expression", not "license.id" + cdx_comp["licenses"] = [{"expression": license_id}] + else: + cdx_comp["licenses"] = [{"license": {"id": license_id}}] + + # Add supplier + if supplier: + cdx_comp["supplier"] = { + "name": supplier, + } + + # Add hashes (SHA-256 from Cargo.lock) + if checksum: + cdx_comp["hashes"] = [ + { + "alg": "SHA-256", + "content": checksum, + } + ] + if cpe: + cdx_comp["cpe"] = cpe + + if aliases: + cdx_comp["properties"] = [ + {"name": "cdx:alias", "value": alias} for alias in aliases + ] + + pedigree = _build_pedigree( + pedigree_ancestors, + pedigree_descendants, + pedigree_variants, + pedigree_notes, + ) + if pedigree: + cdx_comp["pedigree"] = pedigree + + # Add external references + external_refs = [] + + # Add download/source URL + if url: + external_refs.append( + { + "type": "distribution", + "url": url, + } + ) + elif source == "crates.io": + external_refs.append( + { + "type": "distribution", + "url": f"https://crates.io/crates/{name}/{version}", + } + ) + + # Add VCS URL for git sources + if source == "git" and url: + external_refs.append( + { + "type": "vcs", + "url": url, + } + ) + + if external_refs: + cdx_comp["externalReferences"] = external_refs + + return cdx_comp + + +def _map_type_to_cdx_type(comp_type: str) -> str: + """Map component type to CycloneDX component type. + + Args: + comp_type: Component type string + + Returns: + CycloneDX component type string + """ + type_mapping = { + "application": "application", + "library": "library", + "framework": "framework", + "file": "file", + "container": "container", + "firmware": "firmware", + "device": "device", + "data": "data", + "operating-system": "operating-system", + "device-driver": "device-driver", + "machine-learning-model": "machine-learning-model", + "platform": "platform", + } + return type_mapping.get(comp_type, "library") + + +def _generate_bom_ref(name: str, version: str) -> str: + """Generate a unique bom-ref for a component. + + Args: + name: Component name + version: Component version + + Returns: + Unique bom-ref string + """ + # Create a deterministic but unique reference + sanitized_name = _sanitize_name(name) + sanitized_version = _sanitize_name(version) if version else "unknown" + return f"{sanitized_name}@{sanitized_version}" + + +def _sanitize_name(value: str) -> str: + """Sanitize a string for use in bom-ref. + + Args: + value: String to sanitize + + Returns: + Sanitized string + """ + result = [] + for char in value: + if char.isalnum() or char in (".", "-", "_"): + result.append(char) + elif char in (" ", "/"): + result.append("-") + return "".join(result) or "unknown" + + +def _author_entry(value: str) -> dict[str, Any]: + """Create author entry from a string.""" + value = value.strip() + if "<" in value and ">" in value: + name, rest = value.split("<", 1) + email = rest.split(">", 1)[0].strip() + return {"name": name.strip(), "email": email} + return {"name": value} + + +def _build_pedigree( + ancestors: list[str], + descendants: list[str], + variants: list[str], + notes: str, +) -> dict[str, Any] | None: + pedigree: dict[str, Any] = {} + if ancestors: + pedigree["ancestors"] = [_pedigree_ref(a) for a in ancestors] + if descendants: + pedigree["descendants"] = [_pedigree_ref(d) for d in descendants] + if variants: + pedigree["variants"] = [_pedigree_ref(v) for v in variants] + if notes: + pedigree["notes"] = notes + return pedigree or None + + +def _pedigree_ref(value: str) -> dict[str, Any]: + value = value.strip() + if value.startswith("pkg:"): + return {"purl": value} + return {"name": value} + + +def _component_version_lookup(components: list[dict[str, Any]], name: str) -> str: + for comp in components: + if comp.get("name") == name: + return comp.get("version", "") + return "" diff --git a/sbom/internal/generator/sbom_generator.py b/sbom/internal/generator/sbom_generator.py new file mode 100644 index 0000000..8ae3da6 --- /dev/null +++ b/sbom/internal/generator/sbom_generator.py @@ -0,0 +1,868 @@ +#!/usr/bin/env python3 +"""SBOM generator - creates SPDX and CycloneDX output from Bazel aspect data. + +This is the main entry point for SBOM generation. It reads dependency +information collected by the Bazel aspect and metadata from the module +extension, then generates SBOM files in SPDX 2.3 and CycloneDX 1.6 formats. +""" + +import argparse +import json +import re +import sys +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +from sbom.internal.generator.spdx_formatter import generate_spdx +from sbom.internal.generator.cyclonedx_formatter import generate_cyclonedx + + +def parse_module_bazel_files(file_paths: list[str]) -> dict[str, dict[str, str]]: + """Parse MODULE.bazel files to extract module name and version. + + Reads each MODULE.bazel file and extracts the module() call's name and + version fields. This allows automatic version detection for bazel_dep + modules that don't appear in the sbom_metadata extension's module list + (because they don't use_extension for sbom_metadata). + + Args: + file_paths: List of paths to MODULE.bazel files + + Returns: + Dict mapping module name to {"version": ..., "purl": ...} + """ + modules: dict[str, dict[str, str]] = {} + for fpath in file_paths: + try: + with open(fpath, encoding="utf-8") as f: + content = f.read() + except OSError: + continue + + # Extract module(name = "...", version = "...") + module_match = re.search( + r"module\s*\((.*?)\)", + content, + re.DOTALL, + ) + if not module_match: + continue + + module_block = module_match.group(1) + name_match = re.search(r'name\s*=\s*["\']([^"\']+)["\']', module_block) + version_match = re.search(r'version\s*=\s*["\']([^"\']+)["\']', module_block) + + if name_match and version_match: + name = name_match.group(1) + version = version_match.group(1) + modules[name] = { + "version": version, + "purl": f"pkg:generic/{name}@{version}", + } + + return modules + + +def parse_module_lockfiles(file_paths: list[str]) -> dict[str, dict[str, str]]: + """Parse MODULE.bazel.lock files to infer module versions and checksums. + + Uses registry URL keys from lockfiles. Only modules with a single unique + observed version are emitted to avoid ambiguous version selection. + + For modules coming from the Bazel Central Registry, this also extracts the + SHA-256 checksum from the corresponding ``source.json`` entry so that + CycloneDX hashes can be populated for C/C++ dependencies. + """ + # Track all observed versions per module and (optional) sha256 per + # (module, version) tuple. + module_versions: dict[str, set[str]] = {} + module_sha256: dict[tuple[str, str], str] = {} + + for fpath in file_paths: + try: + with open(fpath, encoding="utf-8") as f: + lock_data = json.load(f) + except (OSError, json.JSONDecodeError): + continue + + registry_hashes = lock_data.get("registryFileHashes", {}) + if not isinstance(registry_hashes, dict): + continue + + for url, sha in registry_hashes.items(): + if not isinstance(url, str) or not isinstance(sha, str): + continue + + # MODULE.bazel entry – records which version was selected. + module_match = re.search( + r"/modules/([^/]+)/([^/]+)/MODULE\.bazel$", + url, + ) + if module_match: + module_name, version = module_match.groups() + module_versions.setdefault(module_name, set()).add(version) + + # source.json entry – carries the sha256 of the downloaded source + # tarball for this module@version. Use it as the component hash. + source_match = re.search( + r"/modules/([^/]+)/([^/]+)/source\.json$", + url, + ) + if source_match: + src_module, src_version = source_match.groups() + module_sha256[(src_module, src_version)] = sha + + modules: dict[str, dict[str, str]] = {} + for name, versions in module_versions.items(): + if len(versions) != 1: + # Skip modules with ambiguous versions. + continue + version = next(iter(versions)) + entry: dict[str, str] = { + "version": version, + "purl": f"pkg:generic/{name}@{version}", + } + sha = module_sha256.get((name, version)) + if sha: + # Expose as sha256 so downstream code can turn it into a CycloneDX + # SHA-256 hash entry. + entry["sha256"] = sha + modules[name] = entry + + return modules + + +def load_crates_cache(cache_path: str | None = None) -> dict[str, Any]: + """Load crates metadata cache generated at build time. + + Args: + cache_path: Path to crates_metadata.json (from --crates-cache) + + Returns: + Dict mapping crate name to metadata (license, checksum, etc.) + """ + if not cache_path: + return {} + try: + with open(cache_path, encoding="utf-8") as f: + return json.load(f) + except (OSError, json.JSONDecodeError): + return {} + + + + +# Known licenses for Bazel Central Registry (BCR) C++ modules. +# Used as a fallback when cdxgen and lockfile parsing cannot provide license data. +# Keys are BCR module names (exact or prefix for sub-modules like boost.*). +BCR_KNOWN_LICENSES: dict[str, dict[str, str]] = { + "boost": {"license": "BSL-1.0", "supplier": "Boost.org"}, + "abseil-cpp": {"license": "Apache-2.0", "supplier": "Google LLC"}, + "zlib": {"license": "Zlib", "supplier": "Jean-loup Gailly and Mark Adler"}, + "nlohmann_json": {"license": "MIT", "supplier": "Niels Lohmann"}, + "nlohmann-json": {"license": "MIT", "supplier": "Niels Lohmann"}, + "googletest": {"license": "BSD-3-Clause", "supplier": "Google LLC"}, + "google-benchmark": {"license": "Apache-2.0", "supplier": "Google LLC"}, + "flatbuffers": {"license": "Apache-2.0", "supplier": "Google LLC"}, + "protobuf": {"license": "BSD-3-Clause", "supplier": "Google LLC"}, + "re2": {"license": "BSD-3-Clause", "supplier": "Google LLC"}, + "openssl": {"license": "Apache-2.0", "supplier": "OpenSSL Software Foundation"}, + "curl": {"license": "curl", "supplier": "Daniel Stenberg"}, + "libpng": {"license": "libpng", "supplier": "Glenn Randers-Pehrson"}, + "libjpeg": {"license": "IJG", "supplier": "Independent JPEG Group"}, +} + + +def apply_known_licenses(metadata: dict[str, Any]) -> None: + """Apply BCR known licenses and user license overrides to modules. + + Priority (highest to lowest): + 1. Module already has a license (skip). + 2. Exact match in metadata["licenses"] (user-declared via sbom_ext.license). + 3. Parent match in metadata["licenses"] (e.g., "boost" covers "boost.config"). + 4. BCR_KNOWN_LICENSES exact match. + 5. BCR_KNOWN_LICENSES parent match (e.g., "boost" entry covers "boost.config"). + + Args: + metadata: Metadata dict with "modules" and "licenses" keys. Modified in place. + """ + modules = metadata.get("modules", {}) + licenses = metadata.get("licenses", {}) + + for module_name, module_data in modules.items(): + if module_data.get("license"): + continue # Already has a license — do not overwrite + + license_source: dict[str, str] | None = None + + # 1. Exact match in user-declared licenses (highest priority) + if module_name in licenses: + license_source = licenses[module_name] + # 2. Parent match in user-declared licenses (e.g. "boost" → "boost.config") + elif "." in module_name: + parent = module_name.split(".")[0] + if parent in licenses: + license_source = licenses[parent] + + # 3. BCR known licenses — exact match + if license_source is None and module_name in BCR_KNOWN_LICENSES: + license_source = BCR_KNOWN_LICENSES[module_name] + # 4. BCR known licenses — parent prefix match (e.g. boost.config → boost) + if license_source is None and "." in module_name: + parent = module_name.split(".")[0] + if parent in BCR_KNOWN_LICENSES: + license_source = BCR_KNOWN_LICENSES[parent] + + if license_source: + module_data["license"] = license_source["license"] + if not module_data.get("supplier") and license_source.get("supplier"): + module_data["supplier"] = license_source["supplier"] + + +def normalize_name(name: str) -> str: + """Normalize a dependency name for fuzzy matching. + + Handles naming differences between Bazel repos and C++ metadata cache: + e.g. nlohmann_json vs nlohmann-json, libfmt vs fmt. + + Args: + name: Dependency name to normalize + + Returns: + Normalized name string for comparison + """ + n = name.lower().strip() + for prefix in ("lib", "lib_"): + if n.startswith(prefix) and len(n) > len(prefix): + n = n[len(prefix) :] + n = n.replace("-", "").replace("_", "").replace(".", "") + return n + + +def enrich_components_from_cpp_cache( + components: list[dict[str, Any]], + cpp_components: list[dict[str, Any]], + metadata: dict[str, Any], +) -> list[dict[str, Any]]: + """Enrich Bazel-discovered components with C++ metadata cache. + + For each Bazel component, finds a matching C++ cache entry by normalized + name and fills in missing fields (license, supplier, version, purl). + Components not present in Bazel's discovered dependency graph are ignored. + + Args: + components: Bazel-discovered components to enrich + cpp_components: Components from C++ metadata cache + metadata: Metadata dict + + Returns: + Enriched list of components + """ + # Build lookup: normalized_name -> cache component + cpp_by_name: dict[str, dict[str, Any]] = {} + for cc in cpp_components: + norm = normalize_name(cc["name"]) + cpp_by_name[norm] = cc + cpp_by_name[cc["name"].lower()] = cc + + for comp in components: + comp_name = comp.get("name", "") + norm_name = normalize_name(comp_name) + + cpp_match = cpp_by_name.get(norm_name) or cpp_by_name.get(comp_name.lower()) + # Try parent name match (e.g., boost.config+ -> boost) + if not cpp_match: + base_name = comp_name.rstrip("+") + if "." in base_name: + parent = base_name.split(".")[0] + cpp_match = cpp_by_name.get(normalize_name(parent)) + if not cpp_match: + continue + + # Enrich missing fields only + if not comp.get("license") and cpp_match.get("license"): + comp["license"] = cpp_match["license"] + + if not comp.get("description") and cpp_match.get("description"): + comp["description"] = cpp_match["description"] + + if not comp.get("supplier") and cpp_match.get("supplier"): + comp["supplier"] = cpp_match["supplier"] + + if comp.get("version") in ("unknown", "") and cpp_match.get("version") not in ( + "unknown", + "", + ): + comp["version"] = cpp_match["version"] + + if comp.get("purl", "").endswith("@unknown") and cpp_match.get("purl"): + comp["purl"] = cpp_match["purl"] + + if not comp.get("url") and cpp_match.get("url"): + comp["url"] = cpp_match["url"] + + if not comp.get("checksum") and cpp_match.get("checksum"): + comp["checksum"] = cpp_match["checksum"] + + return components + + +def load_cdxgen_sbom(cdxgen_path: str) -> list[dict[str, Any]]: + """Load and convert cdxgen CycloneDX SBOM to component list. + + Args: + cdxgen_path: Path to cdxgen-generated CycloneDX JSON file + + Returns: + List of component dicts in internal format + """ + try: + with open(cdxgen_path, encoding="utf-8") as f: + cdx_data = json.load(f) + except (OSError, json.JSONDecodeError): + return [] + + components: list[dict[str, Any]] = [] + for comp in cdx_data.get("components", []): + # Extract license information + licenses = comp.get("licenses", []) + license_str = "" + if licenses: + # Take first license + lic = licenses[0] + if isinstance(lic, dict): + license_str = ( + lic.get("expression", "") + or lic.get("license", {}).get("id", "") + or lic.get("license", {}).get("name", "") + ) + + # Extract purl + purl = comp.get("purl", "") + + # Extract SHA-256 hash if present + checksum = "" + for h in comp.get("hashes", []): + if not isinstance(h, dict): + continue + if h.get("alg") == "SHA-256" and h.get("content"): + checksum = str(h["content"]) + break + + # Build component + component = { + "name": comp.get("name", ""), + "version": comp.get("version", "unknown"), + "purl": purl, + "type": comp.get("type", "library"), + "license": license_str, + "description": comp.get("description", ""), + "supplier": comp.get("supplier", {}).get("name", "") + if isinstance(comp.get("supplier"), dict) + else "", + "cpe": comp.get("cpe", ""), + "url": "", + "checksum": checksum, + } + + # Add component if it has a name + if component["name"]: + components.append(component) + + return components + + +def mark_missing_cpp_descriptions(components: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Mark missing descriptions for non-Rust libraries as 'Missing'.""" + for comp in components: + if comp.get("description"): + continue + if comp.get("type") != "library": + continue + purl = comp.get("purl", "") + if purl.startswith("pkg:cargo/"): + continue + comp["description"] = "Missing" + return components + + +def main() -> int: + """Main entry point for SBOM generation.""" + parser = argparse.ArgumentParser(description="Generate SBOM from Bazel deps") + parser.add_argument("--input", required=True, help="Input JSON from Bazel rule") + parser.add_argument( + "--metadata", required=True, help="Metadata JSON from module extension" + ) + parser.add_argument("--spdx-output", help="SPDX 2.3 JSON output file") + parser.add_argument("--cyclonedx-output", help="CycloneDX 1.6 output file") + parser.add_argument("--crates-cache", help="Path to crates_metadata.json override") + parser.add_argument( + "--cdxgen-sbom", + help="Path to cdxgen-generated CycloneDX JSON for C++ enrichment", + ) + args = parser.parse_args() + + # Load dependency data from Bazel + with open(args.input, encoding="utf-8") as f: + data = json.load(f) + + # Load metadata from module extension + with open(args.metadata, encoding="utf-8") as f: + metadata = json.load(f) + + # Parse MODULE.bazel files from dependency modules for version extraction + # This fills in versions for bazel_dep modules that don't use the sbom_metadata extension + dep_module_files = data.get("dep_module_files", []) + if dep_module_files: + dep_modules = parse_module_bazel_files(dep_module_files) + if "modules" not in metadata: + metadata["modules"] = {} + for name, mod_data in dep_modules.items(): + # Don't override entries already in metadata (from the extension) + if name not in metadata["modules"]: + metadata["modules"][name] = mod_data + + # Parse MODULE.bazel.lock files to infer selected module versions. + # This helps for modules that don't participate in the sbom_metadata + # extension (for example, transitive Bazel modules like boost.*). + module_lockfiles = data.get("module_lockfiles", []) + if module_lockfiles: + lock_modules = parse_module_lockfiles(module_lockfiles) + if "modules" not in metadata: + metadata["modules"] = {} + for name, mod_data in lock_modules.items(): + if name not in metadata["modules"]: + metadata["modules"][name] = mod_data + + # Load crates metadata cache (licenses + checksums + versions) + crates_cache = load_crates_cache(args.crates_cache) + + # Add crates cache to metadata + if crates_cache: + if "crates" not in metadata: + metadata["crates"] = {} + for name, cache_data in crates_cache.items(): + metadata["crates"].setdefault(name, cache_data) + + # Apply BCR known licenses and user overrides to modules + apply_known_licenses(metadata) + + # Load cdxgen SBOM if provided (C++ dependency enrichment) + cpp_components = [] + if args.cdxgen_sbom: + cpp_components = load_cdxgen_sbom(args.cdxgen_sbom) + + # Filter external repos (exclude build tools) + external_repos = data.get("external_repos", []) + exclude_patterns = data.get("exclude_patterns", []) + filtered_repos = filter_repos(external_repos, exclude_patterns) + + # Build component list with metadata + components = [] + + for repo in filtered_repos: + component = resolve_component(repo, metadata) + if component: + components.append(component) + + # Deduplicate components by name + components = deduplicate_components(components) + + # Enrich components with C++ metadata cache + if cpp_components: + components = enrich_components_from_cpp_cache( + components, cpp_components, metadata + ) + components = deduplicate_components(components) + + # Mark missing C++ descriptions explicitly when cdxgen has no description. + components = mark_missing_cpp_descriptions(components) + + # Generate timestamp in SPDX-compliant format (YYYY-MM-DDTHH:MM:SSZ) + timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + + # Get configuration + config = data.get("config", {}) + + # Auto-detect component_version from metadata if not explicitly set + component_name = config.get("component_name", "") + if not config.get("component_version") and component_name: + modules = metadata.get("modules", {}) + if component_name in modules: + config["component_version"] = modules[component_name].get("version", "") + + # Filter out the main component from the dependency list to avoid self-dependency + # (e.g., sbom for score_kyron should not list score_kyron as its own dependency) + if component_name: + components = [ + c for c in components if c.get("name") != component_name + ] + + # Generate outputs + if args.spdx_output: + spdx = generate_spdx(components, config, timestamp) + Path(args.spdx_output).write_text(json.dumps(spdx, indent=2), encoding="utf-8") + + if args.cyclonedx_output: + cdx = generate_cyclonedx( + components, + config, + timestamp, + external_dep_edges=data.get("external_dep_edges", []), + ) + Path(args.cyclonedx_output).write_text( + json.dumps(cdx, indent=2), encoding="utf-8" + ) + + return 0 + + +def filter_repos(repos: list[str], exclude_patterns: list[str]) -> list[str]: + """Filter out build tool repositories based on exclude patterns. + + Crates from crate_universe are always kept even if they match exclude patterns, + since they are legitimate dependencies, not build tools. + + Args: + repos: List of repository names + exclude_patterns: Patterns to exclude + + Returns: + Filtered list of repository names + """ + filtered = [] + for repo in repos: + # Always keep crates from crate_universe - these are real dependencies + if "crate_index__" in repo or "crates_io__" in repo or "_crates__" in repo: + filtered.append(repo) + continue + + should_exclude = False + for pattern in exclude_patterns: + if pattern in repo: + should_exclude = True + break + if not should_exclude: + filtered.append(repo) + return filtered + + +def resolve_component( + repo_name: str, metadata: dict[str, Any] +) -> dict[str, Any] | None: + """Resolve repository to component with version and PURL. + + Args: + repo_name: Name of the repository + metadata: Metadata dictionary from module extension + + Returns: + Component dictionary or None if not resolved + """ + # Normalize repo name - bzlmod adds "+" suffix to module repos + normalized_name = repo_name.rstrip("+") + + # Check if it's a bazel_dep module + modules = metadata.get("modules", {}) + if normalized_name in modules: + mod = modules[normalized_name] + result: dict[str, Any] = { + "name": normalized_name, + "version": mod.get("version", "unknown"), + "purl": mod.get("purl", f"pkg:generic/{normalized_name}@unknown"), + "type": "library", + "supplier": mod.get("supplier", ""), + "license": mod.get("license", ""), + "cpe": mod.get("cpe", ""), + "aliases": mod.get("aliases", []), + "pedigree_ancestors": mod.get("pedigree_ancestors", []), + "pedigree_descendants": mod.get("pedigree_descendants", []), + "pedigree_variants": mod.get("pedigree_variants", []), + "pedigree_notes": mod.get("pedigree_notes", ""), + } + # MODULE.bazel.lock can provide a sha256 via source.json; expose it as + # checksum so CycloneDX hashes are populated for C/C++ modules. + if mod.get("sha256"): + result["checksum"] = mod["sha256"] + return result + + # Check if it's an http_archive dependency + http_archives = metadata.get("http_archives", {}) + if normalized_name in http_archives: + archive = http_archives[normalized_name] + result = { + "name": normalized_name, + "version": archive.get("version", "unknown"), + "purl": archive.get("purl", f"pkg:generic/{normalized_name}@unknown"), + "type": "library", + "url": archive.get("url", ""), + "license": archive.get("license", ""), + "supplier": archive.get("supplier", ""), + "cpe": archive.get("cpe", ""), + "aliases": archive.get("aliases", []), + "pedigree_ancestors": archive.get("pedigree_ancestors", []), + "pedigree_descendants": archive.get("pedigree_descendants", []), + "pedigree_variants": archive.get("pedigree_variants", []), + "pedigree_notes": archive.get("pedigree_notes", ""), + } + if archive.get("sha256"): + result["checksum"] = archive["sha256"] + return result + + # Check if it's a git_repository dependency + git_repos = metadata.get("git_repositories", {}) + if normalized_name in git_repos: + repo = git_repos[normalized_name] + result = { + "name": normalized_name, + "version": repo.get("version", "unknown"), + "purl": repo.get("purl", f"pkg:generic/{normalized_name}@unknown"), + "type": "library", + "url": repo.get("remote", ""), + "license": repo.get("license", ""), + "supplier": repo.get("supplier", ""), + "cpe": repo.get("cpe", ""), + "aliases": repo.get("aliases", []), + "pedigree_ancestors": repo.get("pedigree_ancestors", []), + "pedigree_descendants": repo.get("pedigree_descendants", []), + "pedigree_variants": repo.get("pedigree_variants", []), + "pedigree_notes": repo.get("pedigree_notes", ""), + } + commit_date = repo.get("commit_date", "") + if result.get("version") in ("unknown", "") and commit_date: + result["version"] = commit_date + return result + + # Check if it's a crate from the metadata cache + # Cargo.lock uses underscores, Bazel uses hyphens — try both + crates = metadata.get("crates", {}) + crate_key = ( + normalized_name + if normalized_name in crates + else normalized_name.replace("-", "_") + ) + if crate_key in crates: + crate = crates[crate_key] + result = { + "name": normalized_name, + "version": crate.get("version", "unknown"), + "purl": crate.get("purl", f"pkg:cargo/{normalized_name}@unknown"), + "type": "library", + "source": "crates.io", + "license": crate.get("license", ""), + "description": crate.get("description", ""), + "supplier": crate.get("supplier", ""), + "cpe": crate.get("cpe", ""), + "aliases": crate.get("aliases", []), + "pedigree_ancestors": crate.get("pedigree_ancestors", []), + "pedigree_descendants": crate.get("pedigree_descendants", []), + "pedigree_variants": crate.get("pedigree_variants", []), + "pedigree_notes": crate.get("pedigree_notes", ""), + } + if crate.get("checksum"): + result["checksum"] = crate["checksum"] + return result + + # Handle score_ prefixed repos that might be modules + if normalized_name.startswith("score_"): + return { + "name": normalized_name, + "version": "unknown", + "purl": f"pkg:github/eclipse-score/{normalized_name}@unknown", + "type": "library", + "supplier": "Eclipse Foundation", + "license": "", + "cpe": "", + "aliases": [], + "pedigree_ancestors": [], + "pedigree_descendants": [], + "pedigree_variants": [], + "pedigree_notes": "", + } + + # Handle crate universe repos - bzlmod format + # e.g., rules_rust++crate+crate_index__serde-1.0.228 + # e.g., rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0 + cached_crates = metadata.get("crates", {}) + + if "crate_index__" in repo_name or "crate+" in repo_name: + # Extract the crate info part after crate_index__ + if "crate_index__" in repo_name: + crate_part = repo_name.split("crate_index__")[-1] + else: + crate_part = repo_name.split("+")[-1] + + # Parse name-version format (e.g., "serde-1.0.228") + # Handle complex names like "iceoryx2-qnx8-0.7.0" where last part is version + parts = crate_part.split("-") + if len(parts) >= 2: + # Find the version part (starts with a digit) + version_idx = -1 + for i, part in enumerate(parts): + if part and part[0].isdigit(): + version_idx = i + break + + if version_idx > 0: + crate_name = "-".join(parts[:version_idx]).replace("_", "-") + version = "-".join(parts[version_idx:]) + + # Look up crate metadata from cache + # Cargo.lock uses underscores, Bazel uses hyphens — try both + crate_meta = cached_crates.get(crate_name) or cached_crates.get( + crate_name.replace("-", "_"), {} + ) + + result = { + "name": crate_name, + "version": version, + "purl": f"pkg:cargo/{crate_name}@{version}", + "type": "library", + "source": "crates.io", + } + if crate_meta.get("license"): + result["license"] = crate_meta["license"] + if crate_meta.get("description"): + result["description"] = crate_meta["description"] + if crate_meta.get("supplier"): + result["supplier"] = crate_meta["supplier"] + if crate_meta.get("cpe"): + result["cpe"] = crate_meta["cpe"] + if crate_meta.get("aliases"): + result["aliases"] = crate_meta["aliases"] + if crate_meta.get("pedigree_ancestors"): + result["pedigree_ancestors"] = crate_meta["pedigree_ancestors"] + if crate_meta.get("pedigree_descendants"): + result["pedigree_descendants"] = crate_meta["pedigree_descendants"] + if crate_meta.get("pedigree_variants"): + result["pedigree_variants"] = crate_meta["pedigree_variants"] + if crate_meta.get("pedigree_notes"): + result["pedigree_notes"] = crate_meta["pedigree_notes"] + if crate_meta.get("repository"): + result["url"] = crate_meta["repository"] + if crate_meta.get("checksum"): + result["checksum"] = crate_meta["checksum"] + return result + + # Handle legacy crate universe format (e.g., crates_io__tokio-1.10.0) + if repo_name.startswith("crates_io__") or "_crates__" in repo_name: + parts = repo_name.split("__") + if len(parts) >= 2: + crate_info = parts[-1] + # Try to split by last hyphen to get name-version + last_hyphen = crate_info.rfind("-") + if last_hyphen > 0: + crate_name = crate_info[:last_hyphen].replace("_", "-") + version = crate_info[last_hyphen + 1 :] + + # Look up crate metadata from cache + # Cargo.lock uses underscores, Bazel uses hyphens — try both + crate_meta = cached_crates.get(crate_name) or cached_crates.get( + crate_name.replace("-", "_"), {} + ) + + result = { + "name": crate_name, + "version": version, + "purl": f"pkg:cargo/{crate_name}@{version}", + "type": "library", + "source": "crates.io", + } + if crate_meta.get("license"): + result["license"] = crate_meta["license"] + if crate_meta.get("description"): + result["description"] = crate_meta["description"] + if crate_meta.get("supplier"): + result["supplier"] = crate_meta["supplier"] + if crate_meta.get("cpe"): + result["cpe"] = crate_meta["cpe"] + if crate_meta.get("aliases"): + result["aliases"] = crate_meta["aliases"] + if crate_meta.get("pedigree_ancestors"): + result["pedigree_ancestors"] = crate_meta["pedigree_ancestors"] + if crate_meta.get("pedigree_descendants"): + result["pedigree_descendants"] = crate_meta["pedigree_descendants"] + if crate_meta.get("pedigree_variants"): + result["pedigree_variants"] = crate_meta["pedigree_variants"] + if crate_meta.get("pedigree_notes"): + result["pedigree_notes"] = crate_meta["pedigree_notes"] + if crate_meta.get("repository"): + result["url"] = crate_meta["repository"] + if crate_meta.get("checksum"): + result["checksum"] = crate_meta["checksum"] + return result + + # Check if repo is a sub-library of a known parent (e.g., boost.config+ -> boost) + # rules_boost splits Boost into individual repos like boost.config+, boost.assert+, etc. + if "." in normalized_name: + parent_name = normalized_name.split(".")[0].rstrip("+") + # Look up parent in all metadata sources (modules, http_archives, git_repos, licenses) + licenses = metadata.get("licenses", {}) + parent = None + if parent_name in modules: + parent = modules[parent_name] + elif parent_name in http_archives: + parent = http_archives[parent_name] + elif parent_name in git_repos: + parent = git_repos[parent_name] + elif parent_name in licenses: + parent = licenses[parent_name] + if parent: + parent_version = parent.get("version", "unknown") + result: dict[str, Any] = { + "name": normalized_name, + "version": parent_version, + "purl": f"pkg:generic/{normalized_name}@{parent_version}", + "type": "library", + "license": parent.get("license", ""), + "supplier": parent.get("supplier", ""), + } + # Propagate checksum from parent if available (e.g., http_archive + # sha256 or module sha256 from MODULE.bazel.lock). + if parent.get("sha256"): + result["checksum"] = parent["sha256"] + elif parent.get("checksum"): + result["checksum"] = parent["checksum"] + return result + + # Unknown repository - return with unknown version + return { + "name": repo_name, + "version": "unknown", + "purl": f"pkg:generic/{repo_name}@unknown", + "type": "library", + } + + +def deduplicate_components(components: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Remove duplicate components, keeping the one with most metadata. + + Args: + components: List of component dictionaries + + Returns: + Deduplicated list of components + """ + seen: dict[str, dict[str, Any]] = {} + for comp in components: + name = comp.get("name", "") + if name not in seen: + seen[name] = comp + else: + # Keep the one with more information (non-unknown version preferred) + existing = seen[name] + if ( + existing.get("version") == "unknown" + and comp.get("version") != "unknown" + ): + seen[name] = comp + elif comp.get("license") and not existing.get("license"): + # Prefer component with license info + seen[name] = comp + + return list(seen.values()) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/sbom/internal/generator/spdx_formatter.py b/sbom/internal/generator/spdx_formatter.py new file mode 100644 index 0000000..6ad8af4 --- /dev/null +++ b/sbom/internal/generator/spdx_formatter.py @@ -0,0 +1,242 @@ +"""SPDX 2.3 JSON formatter for SBOM generation. + +This module generates SPDX 2.3 compliant JSON output from the component +information collected by the Bazel aspect and module extension. + +SPDX 2.3 Specification: https://spdx.github.io/spdx-spec/v2.3/ +""" + +import re +import uuid +from typing import Any + + +def _normalize_spdx_license(expr: str) -> str: + """Normalize SPDX boolean operators to uppercase as required by the spec. + + dash-license-scan returns lowercase operators (e.g. 'Apache-2.0 or MIT'). + SPDX 2.3 requires uppercase OR/AND/WITH (Appendix IV). + Uses space-delimited substitution to avoid modifying license identifiers + that contain 'or'/'and' as substrings (e.g. GPL-2.0-or-later). + """ + expr = re.sub(r" or ", " OR ", expr, flags=re.IGNORECASE) + expr = re.sub(r" and ", " AND ", expr, flags=re.IGNORECASE) + expr = re.sub(r" with ", " WITH ", expr, flags=re.IGNORECASE) + return expr + + +def generate_spdx( + components: list[dict[str, Any]], + config: dict[str, Any], + timestamp: str, +) -> dict[str, Any]: + """Generate SPDX 2.3 JSON document. + + Args: + components: List of component dictionaries + config: Configuration dictionary with producer info + timestamp: ISO 8601 timestamp + + Returns: + SPDX 2.3 compliant dictionary + """ + + namespace = config.get("namespace", "https://eclipse.dev/score") + component_name = config.get("component_name", "unknown") + component_version = config.get("component_version", "") + producer_name = config.get("producer_name", "Eclipse Foundation") + + doc_uuid = uuid.uuid4() + + packages: list[dict[str, Any]] = [] + relationships: list[dict[str, Any]] = [] + + # Root package + root_spdx_id = "SPDXRef-RootPackage" + root_package: dict[str, Any] = { + "SPDXID": root_spdx_id, + "name": component_name, + "versionInfo": component_version if component_version else "unversioned", + "downloadLocation": "https://github.com/eclipse-score", + "supplier": f"Organization: {producer_name}", + "primaryPackagePurpose": "APPLICATION", + "filesAnalyzed": False, + "licenseConcluded": "NOASSERTION", + "licenseDeclared": "NOASSERTION", + "copyrightText": "NOASSERTION", + } + packages.append(root_package) + + # DESCRIBES relationship + relationships.append( + { + "spdxElementId": "SPDXRef-DOCUMENT", + "relationshipType": "DESCRIBES", + "relatedSpdxElement": root_spdx_id, + } + ) + + # Add dependency packages + for comp in components: + pkg, spdx_id = _create_spdx_package(comp) + packages.append(pkg) + + # Root depends on each component + relationships.append( + { + "spdxElementId": root_spdx_id, + "relationshipType": "DEPENDS_ON", + "relatedSpdxElement": spdx_id, + } + ) + + # Collect LicenseRef-* identifiers used in packages and declare them + extracted = _collect_extracted_license_infos(packages) + + doc: dict[str, Any] = { + "spdxVersion": "SPDX-2.3", + "dataLicense": "CC0-1.0", + "SPDXID": "SPDXRef-DOCUMENT", + "name": f"SBOM for {component_name}", + "documentNamespace": f"{namespace}/spdx/{_sanitize_id(component_name)}-{doc_uuid}", + "creationInfo": { + "created": timestamp, + "creators": [ + f"Organization: {producer_name}", + "Tool: score-sbom-generator", + ], + }, + "packages": packages, + "relationships": relationships, + } + + if extracted: + doc["hasExtractedLicensingInfos"] = extracted + + return doc + + +def _create_spdx_package( + component: dict[str, Any], +) -> tuple[dict[str, Any], str]: + """Create an SPDX 2.3 Package for a component. + + Args: + component: Component dictionary + + Returns: + Tuple of (SPDX Package dictionary, spdx_id string) + """ + name = component.get("name", "unknown") + version = component.get("version", "unknown") + purl = component.get("purl", "") + license_id = _normalize_spdx_license(component.get("license", "")) + description = component.get("description", "") + supplier = component.get("supplier", "") + comp_type = component.get("type", "library") + checksum = component.get("checksum", "") + + spdx_id = f"SPDXRef-{_sanitize_id(name)}-{_sanitize_id(version)}" + + # Determine download location + url = component.get("url", "") + source = component.get("source", "") + if url: + download_location = url + elif source == "crates.io": + download_location = f"https://crates.io/crates/{name}/{version}" + else: + download_location = "NOASSERTION" + + package: dict[str, Any] = { + "SPDXID": spdx_id, + "name": name, + "versionInfo": version, + "downloadLocation": download_location, + "primaryPackagePurpose": _map_type_to_purpose(comp_type), + "filesAnalyzed": False, + "licenseConcluded": license_id if license_id else "NOASSERTION", + "licenseDeclared": license_id if license_id else "NOASSERTION", + "copyrightText": "NOASSERTION", + } + + if checksum: + package["checksums"] = [{"algorithm": "SHA256", "checksumValue": checksum}] + + if description: + package["description"] = description + + if supplier: + package["supplier"] = f"Organization: {supplier}" + + # Add PURL as external reference + if purl: + package["externalRefs"] = [ + { + "referenceCategory": "PACKAGE-MANAGER", + "referenceType": "purl", + "referenceLocator": purl, + }, + ] + + return package, spdx_id + + +def _map_type_to_purpose(comp_type: str) -> str: + """Map component type to SPDX 2.3 primary package purpose.""" + type_mapping = { + "application": "APPLICATION", + "library": "LIBRARY", + "framework": "FRAMEWORK", + "file": "FILE", + "container": "CONTAINER", + "firmware": "FIRMWARE", + "device": "DEVICE", + "data": "DATA", + } + return type_mapping.get(comp_type, "LIBRARY") + + +def _collect_extracted_license_infos( + packages: list[dict[str, Any]], +) -> list[dict[str, str]]: + """Collect LicenseRef-* identifiers from packages and build declarations. + + SPDX requires every LicenseRef-* used in license expressions to be + declared in hasExtractedLicensingInfos. + + Args: + packages: List of SPDX package dicts + + Returns: + List of extractedLicensingInfo entries + """ + license_refs: set[str] = set() + pattern = re.compile(r"LicenseRef-[A-Za-z0-9\-.]+") + + for pkg in packages: + for field in ("licenseConcluded", "licenseDeclared"): + value = pkg.get(field, "") + license_refs.update(pattern.findall(value)) + + return [ + { + "licenseId": ref, + "extractedText": f"See package metadata for license details ({ref})", + } + for ref in sorted(license_refs) + ] + + +def _sanitize_id(value: str) -> str: + """Sanitize a string for use in SPDX IDs. + + SPDX 2.3 IDs must match [a-zA-Z0-9.-]+ + """ + result = [] + for char in value: + if char.isalnum() or char in (".", "-"): + result.append(char) + elif char in ("_", " ", "/", "@"): + result.append("-") + return "".join(result) or "unknown" diff --git a/sbom/internal/metadata_rule.bzl b/sbom/internal/metadata_rule.bzl new file mode 100644 index 0000000..7d3ffc3 --- /dev/null +++ b/sbom/internal/metadata_rule.bzl @@ -0,0 +1,49 @@ +"""Rule to expose SBOM metadata collected by the module extension. + +This rule wraps the metadata JSON file generated by the module extension +and makes it available for the SBOM generation action. +""" + +load(":providers.bzl", "SbomMetadataInfo") + +def _sbom_metadata_rule_impl(ctx): + """Implementation of sbom_metadata_rule. + + The metadata is passed as a JSON file to the SBOM generator action, + rather than being parsed at analysis time. + + Args: + ctx: The rule context + + Returns: + A list of providers including SbomMetadataInfo with file reference + """ + metadata_file = ctx.file.metadata_json + + # We can't read files at analysis time in Bazel rules, so we pass + # the file reference and let the generator read it at execution time. + # The SbomMetadataInfo provider carries empty dicts here - the actual + # metadata is read by the Python generator from the JSON file. + return [ + DefaultInfo(files = depset([metadata_file])), + SbomMetadataInfo( + modules = {}, + crates = {}, + http_archives = {}, + ), + # Also provide the file itself for the rule to use + OutputGroupInfo(metadata_file = depset([metadata_file])), + ] + +sbom_metadata_rule = rule( + implementation = _sbom_metadata_rule_impl, + attrs = { + "metadata_json": attr.label( + mandatory = True, + allow_single_file = [".json"], + doc = "JSON file containing SBOM metadata", + ), + }, + provides = [SbomMetadataInfo], + doc = "Exposes SBOM metadata collected by the module extension", +) diff --git a/sbom/internal/providers.bzl b/sbom/internal/providers.bzl new file mode 100644 index 0000000..e2d909a --- /dev/null +++ b/sbom/internal/providers.bzl @@ -0,0 +1,28 @@ +"""Providers for SBOM data propagation. + +This module defines the providers used to pass SBOM-related information +between different phases of the build: +- SbomDepsInfo: Collected by aspect - deps of a specific target +- SbomMetadataInfo: Collected by extension - metadata for all modules +""" + +# Collected by aspect - deps of a specific target +SbomDepsInfo = provider( + doc = "Transitive dependency information for SBOM generation", + fields = { + "direct_deps": "depset of direct dependency labels", + "transitive_deps": "depset of all transitive dependency labels", + "external_repos": "depset of external repository names used", + "external_dep_edges": "depset of external repo dependency edges (from::to)", + }, +) + +# Collected by extension - metadata for all modules +SbomMetadataInfo = provider( + doc = "Metadata about all available modules/crates", + fields = { + "modules": "dict of module_name -> {version, commit, registry, purl}", + "crates": "dict of crate_name -> {version, checksum, purl}", + "http_archives": "dict of repo_name -> {url, version, sha256, purl}", + }, +) diff --git a/sbom/internal/rules.bzl b/sbom/internal/rules.bzl new file mode 100644 index 0000000..80918d8 --- /dev/null +++ b/sbom/internal/rules.bzl @@ -0,0 +1,286 @@ +"""SBOM generation rule implementation. + +This module contains the main _sbom_impl rule that combines data from +the aspect (target dependencies) with metadata from the module extension +to generate SPDX and CycloneDX format SBOMs. +""" + +load(":aspect.bzl", "sbom_aspect") +load(":providers.bzl", "SbomDepsInfo") + +def _sbom_impl(ctx): + """Generates SBOM by combining aspect data with extension metadata. + + Args: + ctx: The rule context + + Returns: + DefaultInfo with generated SBOM files + """ + + # Collect all external repos used by targets + all_external_repos = depset(transitive = [ + target[SbomDepsInfo].external_repos + for target in ctx.attr.targets + ]) + + # Collect all transitive deps + all_transitive_deps = depset(transitive = [ + target[SbomDepsInfo].transitive_deps + for target in ctx.attr.targets + ]) + + # Collect external dependency edges + all_external_dep_edges = depset(transitive = [ + target[SbomDepsInfo].external_dep_edges + for target in ctx.attr.targets + ]) + + # Get the metadata JSON file from the extension + metadata_file = ctx.file.metadata_json + + # Create input file with dependency info for Python generator + deps_json = ctx.actions.declare_file(ctx.attr.name + "_deps.json") + + # Build target labels list + target_labels = [str(t.label) for t in ctx.attr.targets] + + # Infer scan root for cdxgen: + # - If all targets come from the same external repo, scan that repo tree. + # - Otherwise scan the current execroot. + target_repos = [] + for t in ctx.attr.targets: + repo = t.label.workspace_name + if repo and repo not in target_repos: + target_repos.append(repo) + cdxgen_scan_root = "." + if len(target_repos) == 1: + cdxgen_scan_root = "external/{}".format(target_repos[0]) + + # Build exclude patterns list + exclude_patterns = ctx.attr.exclude_patterns + + # Collect MODULE.bazel files from dependency modules for version extraction + dep_module_paths = [f.path for f in ctx.files.dep_module_files] + module_lock_paths = [f.path for f in ctx.files.module_lockfiles] + + deps_data = { + "external_repos": all_external_repos.to_list(), + "transitive_deps": [str(d) for d in all_transitive_deps.to_list()], + "external_dep_edges": all_external_dep_edges.to_list(), + "target_labels": target_labels, + "exclude_patterns": exclude_patterns, + "dep_module_files": dep_module_paths, + "module_lockfiles": module_lock_paths, + "config": { + "producer_name": ctx.attr.producer_name, + "producer_url": ctx.attr.producer_url, + "component_name": ctx.attr.component_name if ctx.attr.component_name else ctx.attr.name, + "component_version": ctx.attr.component_version, + "namespace": ctx.attr.namespace, + "sbom_authors": ctx.attr.sbom_authors, + "generation_context": ctx.attr.generation_context, + "sbom_tools": ctx.attr.sbom_tools, + }, + } + + ctx.actions.write( + output = deps_json, + content = json.encode(deps_data), + ) + + # Declare outputs + outputs = [] + args = ctx.actions.args() + args.add("--input", deps_json) + args.add("--metadata", metadata_file) + + if "spdx" in ctx.attr.output_formats: + spdx_out = ctx.actions.declare_file(ctx.attr.name + ".spdx.json") + outputs.append(spdx_out) + args.add("--spdx-output", spdx_out) + + if "cyclonedx" in ctx.attr.output_formats: + cdx_out = ctx.actions.declare_file(ctx.attr.name + ".cdx.json") + outputs.append(cdx_out) + args.add("--cyclonedx-output", cdx_out) + + # Build inputs list + generator_inputs = [deps_json, metadata_file] + ctx.files.dep_module_files + ctx.files.module_lockfiles + + # Auto-generate crates metadata cache if enabled and a lockfile is provided + crates_cache = None + if (ctx.file.cargo_lockfile or ctx.files.module_lockfiles) and ctx.attr.auto_crates_cache: + crates_cache = ctx.actions.declare_file(ctx.attr.name + "_crates_metadata.json") + cache_inputs = [ctx.file._crates_cache_script] + cache_cmd = "set -euo pipefail\npython3 {} {}".format( + ctx.file._crates_cache_script.path, + crates_cache.path, + ) + if ctx.file.cargo_lockfile: + cache_inputs.append(ctx.file.cargo_lockfile) + cache_cmd += " --cargo-lock {}".format(ctx.file.cargo_lockfile.path) + for lock in ctx.files.module_lockfiles: + cache_inputs.append(lock) + cache_cmd += " --module-lock {}".format(lock.path) + ctx.actions.run_shell( + inputs = cache_inputs, + outputs = [crates_cache], + command = cache_cmd, + mnemonic = "CratesCacheGenerate", + progress_message = "Generating crates metadata cache for %s" % ctx.attr.name, + execution_requirements = {"requires-network": ""}, + use_default_shell_env = True, + ) + + # Add cdxgen SBOM if provided; otherwise auto-generate if enabled + cdxgen_sbom = ctx.file.cdxgen_sbom + if not cdxgen_sbom and ctx.attr.auto_cdxgen: + cdxgen_sbom = ctx.actions.declare_file(ctx.attr.name + "_cdxgen.cdx.json") + ctx.actions.run( + outputs = [cdxgen_sbom], + executable = ctx.executable._npm, + arguments = [ + "exec", + "--", + "@cyclonedx/cdxgen", + "-t", + "cpp", + "--deep", + "-r", + "-o", + cdxgen_sbom.path, + cdxgen_scan_root, + ], + mnemonic = "CdxgenGenerate", + progress_message = "Generating cdxgen SBOM for %s" % ctx.attr.name, + # cdxgen needs to recursively scan source trees. Running sandboxed with + # only declared file inputs makes the scan effectively empty. + execution_requirements = {"no-sandbox": "1"}, + ) + + if cdxgen_sbom: + args.add("--cdxgen-sbom", cdxgen_sbom) + generator_inputs.append(cdxgen_sbom) + + if crates_cache: + args.add("--crates-cache", crates_cache) + generator_inputs.append(crates_cache) + + # Run Python generator + ctx.actions.run( + inputs = generator_inputs, + outputs = outputs, + executable = ctx.executable._generator, + arguments = [args], + mnemonic = "SbomGenerate", + progress_message = "Generating SBOM for %s" % ctx.attr.name, + ) + + return [DefaultInfo(files = depset(outputs))] + +sbom_rule = rule( + implementation = _sbom_impl, + attrs = { + "targets": attr.label_list( + mandatory = True, + aspects = [sbom_aspect], + doc = "Targets to generate SBOM for", + ), + "output_formats": attr.string_list( + default = ["spdx", "cyclonedx"], + doc = "Output formats: spdx, cyclonedx", + ), + "producer_name": attr.string( + default = "Eclipse Foundation", + doc = "SBOM producer organization name", + ), + "producer_url": attr.string( + default = "https://projects.eclipse.org/projects/automotive.score", + doc = "SBOM producer URL", + ), + "component_name": attr.string( + doc = "Component name (defaults to rule name)", + ), + "component_version": attr.string( + default = "", + doc = "Component version", + ), + "sbom_authors": attr.string_list( + default = [], + doc = "SBOM author(s) (distinct from software producers)", + ), + "generation_context": attr.string( + default = "", + doc = "SBOM generation context: pre-build, build, post-build", + ), + "sbom_tools": attr.string_list( + default = [], + doc = "Additional SBOM generation tool names", + ), + "namespace": attr.string( + default = "https://eclipse.dev/score", + doc = "SBOM namespace URI", + ), + "exclude_patterns": attr.string_list( + default = [ + "rules_rust", + "rules_cc", + "bazel_tools", + "platforms", + "bazel_skylib", + "rules_python", + "rules_proto", + "protobuf", + "local_config_", + "remote_", + ], + doc = "External repo patterns to exclude (build tools)", + ), + "metadata_json": attr.label( + mandatory = True, + allow_single_file = [".json"], + doc = "Metadata JSON file from sbom_metadata extension", + ), + "dep_module_files": attr.label_list( + allow_files = True, + default = [], + doc = "MODULE.bazel files from dependency modules for automatic version extraction", + ), + "cargo_lockfile": attr.label( + allow_single_file = True, + doc = "Optional Cargo.lock file for automatic crate metadata extraction", + ), + "module_lockfiles": attr.label_list( + allow_files = True, + doc = "MODULE.bazel.lock files for crate metadata extraction (e.g., from score_crates and workspace)", + ), + "cdxgen_sbom": attr.label( + allow_single_file = [".json"], + doc = "Optional CycloneDX JSON from cdxgen for C++ dependency enrichment", + ), + "auto_cdxgen": attr.bool( + default = False, + doc = "Automatically run cdxgen when no cdxgen_sbom is provided", + ), + "_npm": attr.label( + default = "//sbom:npm_wrapper", + executable = True, + cfg = "exec", + ), + "auto_crates_cache": attr.bool( + default = True, + doc = "Automatically build crates metadata cache when cargo_lockfile or module_lockfile is provided", + ), + "_crates_cache_script": attr.label( + default = "//sbom/scripts:generate_crates_metadata_cache.py", + allow_single_file = True, + ), + "_generator": attr.label( + default = "//sbom/internal/generator:sbom_generator", + executable = True, + cfg = "exec", + ), + }, + doc = "Generates SBOM for specified targets in SPDX and CycloneDX formats", +) diff --git a/sbom/npm_wrapper.sh b/sbom/npm_wrapper.sh new file mode 100755 index 0000000..0c312a6 --- /dev/null +++ b/sbom/npm_wrapper.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +# Wrapper to use system-installed npm/cdxgen +# This relies on npm/cdxgen being available in the system PATH + +# Add common Node.js installation paths to PATH +export PATH="/home/lj/.nvm/versions/node/v24.13.0/bin:$PATH" +export PATH="$HOME/.nvm/versions/node/v24.13.0/bin:$PATH" +export PATH="/usr/local/bin:/usr/bin:/bin:$PATH" + +# If called with "exec -- @cyclonedx/cdxgen", just run cdxgen directly +if [[ "$1" == "exec" && "$2" == "--" && "$3" == "@cyclonedx/cdxgen" ]]; then + shift 3 # Remove "exec -- @cyclonedx/cdxgen" + exec cdxgen "$@" +else + # Otherwise, run npm normally + exec npm "$@" +fi diff --git a/sbom/scripts/BUILD.bazel b/sbom/scripts/BUILD.bazel new file mode 100644 index 0000000..c33b3c6 --- /dev/null +++ b/sbom/scripts/BUILD.bazel @@ -0,0 +1,5 @@ +package(default_visibility = ["//sbom:__subpackages__"]) + +exports_files([ + "generate_crates_metadata_cache.py", +]) diff --git a/sbom/scripts/generate_cpp_metadata_cache.py b/sbom/scripts/generate_cpp_metadata_cache.py new file mode 100644 index 0000000..a45ec38 --- /dev/null +++ b/sbom/scripts/generate_cpp_metadata_cache.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python3 +"""Generate cpp_metadata.json cache from cdxgen CycloneDX output. + +Usage: + # Generate from cdxgen output: + npx @cyclonedx/cdxgen -t cpp --deep -r -o cdxgen_output.cdx.json + python3 generate_cpp_metadata_cache.py cdxgen_output.cdx.json ../cpp_metadata.json + + # Or pipe directly: + npx @cyclonedx/cdxgen -t cpp --deep -r | python3 generate_cpp_metadata_cache.py - ../cpp_metadata.json +""" + +import argparse +import json +import sys + + +def convert_cdxgen_to_cache(cdxgen_path: str) -> dict: + """Convert CycloneDX JSON from cdxgen to internal cache format.""" + if cdxgen_path == "-": + cdx_data = json.load(sys.stdin) + else: + with open(cdxgen_path, encoding="utf-8") as f: + cdx_data = json.load(f) + + if cdx_data.get("bomFormat") != "CycloneDX": + print("Error: Input is not a CycloneDX JSON file", file=sys.stderr) + sys.exit(1) + + cache = {} + for comp in cdx_data.get("components", []): + name = comp.get("name", "") + if not name: + continue + + entry = { + "version": comp.get("version", "unknown"), + } + + # License + licenses = comp.get("licenses", []) + if licenses: + first = licenses[0] + lic_obj = first.get("license", {}) + lic_id = lic_obj.get("id", "") or lic_obj.get("name", "") + if not lic_id: + lic_id = first.get("expression", "") + if lic_id: + entry["license"] = lic_id + + # Description + if comp.get("description"): + entry["description"] = comp["description"] + + # Supplier + supplier = comp.get("supplier", {}) + if supplier and supplier.get("name"): + entry["supplier"] = supplier["name"] + elif comp.get("publisher"): + entry["supplier"] = comp["publisher"] + + # PURL + if comp.get("purl"): + entry["purl"] = comp["purl"] + + # URL from externalReferences + for ref in comp.get("externalReferences", []): + if ref.get("type") in ("website", "distribution", "vcs") and ref.get("url"): + entry["url"] = ref["url"] + break + + cache[name] = entry + + return cache + + +def main(): + parser = argparse.ArgumentParser( + description="Convert cdxgen CycloneDX output to cpp_metadata.json cache" + ) + parser.add_argument("input", help="cdxgen CycloneDX JSON file (or - for stdin)") + parser.add_argument( + "output", + nargs="?", + default="cpp_metadata.json", + help="Output cache file (default: cpp_metadata.json)", + ) + parser.add_argument( + "--merge", + help="Merge with existing cache file (existing entries take precedence)", + ) + args = parser.parse_args() + + cache = convert_cdxgen_to_cache(args.input) + + if args.merge: + try: + with open(args.merge, encoding="utf-8") as f: + existing = json.load(f) + # Existing entries take precedence + for name, data in cache.items(): + if name not in existing: + existing[name] = data + cache = existing + except (OSError, json.JSONDecodeError): + pass + + with open(args.output, "w", encoding="utf-8") as f: + json.dump(cache, f, indent=2, sort_keys=True) + f.write("\n") + + print(f"Generated {args.output} with {len(cache)} C++ dependencies") + + +if __name__ == "__main__": + main() diff --git a/sbom/scripts/generate_crates_metadata_cache.py b/sbom/scripts/generate_crates_metadata_cache.py new file mode 100755 index 0000000..20eb138 --- /dev/null +++ b/sbom/scripts/generate_crates_metadata_cache.py @@ -0,0 +1,540 @@ +#!/usr/bin/env python3 +"""Generate crates metadata cache for SBOM generation. + +This script parses Cargo.lock files and/or MODULE.bazel.lock files for +crate version/checksum data, then fetches license metadata via +dash-license-scan (Eclipse Foundation + ClearlyDefined) and creates a +cache file for SBOM generation. + +Usage: + python3 generate_crates_metadata_cache.py --module-lock + python3 generate_crates_metadata_cache.py --cargo-lock + python3 generate_crates_metadata_cache.py --cargo-lock --module-lock + +Example: + python3 generate_crates_metadata_cache.py crates_metadata.json \\ + --module-lock ../../score-crates/MODULE.bazel.lock +""" + +import argparse +import json +import os +import re +import shutil +import subprocess +import sys +import tempfile +import urllib.request +from concurrent.futures import ThreadPoolExecutor, as_completed +from pathlib import Path +from typing import Any, Dict + + +def parse_cargo_lock(lockfile_path: str) -> Dict[str, Dict[str, Any]]: + """Parse Cargo.lock and extract crate information. + + Args: + lockfile_path: Path to Cargo.lock file + + Returns: + Dict mapping crate name to {version, checksum, source} + """ + try: + import tomllib as tomli # Python 3.11+ + except ImportError: + try: + import tomli + except ImportError: + print( + "ERROR: tomli/tomllib library not found. Use Python 3.11+ or install tomli", + file=sys.stderr, + ) + sys.exit(1) + + with open(lockfile_path, "rb") as f: + lock_data = tomli.load(f) + + crates = {} + for package in lock_data.get("package", []): + name = package["name"] + source = package.get("source", "") + + # Only include crates from crates.io + if "registry+https://github.com/rust-lang/crates.io-index" in source: + crates[name] = { + "name": name, + "version": package["version"], + "checksum": package.get("checksum", ""), + "source": source, + } + + return crates + + +def parse_module_bazel_lock(lockfile_path: str) -> Dict[str, Dict[str, Any]]: + """Parse MODULE.bazel.lock and extract crate information from cargo-bazel resolution. + + The MODULE.bazel.lock (from score_crates or similar) contains resolved crate + specs under moduleExtensions -> crate_universe -> generatedRepoSpecs. + Each crate entry has name, version, sha256, and download URL. + + Args: + lockfile_path: Path to MODULE.bazel.lock file + + Returns: + Dict mapping crate name to {version, checksum, source} + """ + with open(lockfile_path, encoding="utf-8") as f: + lock_data = json.load(f) + + crates = {} + extensions = lock_data.get("moduleExtensions", {}) + + # Find the crate_universe extension (key contains "crate_universe" or "crate") + crate_ext = None + for ext_key, ext_val in extensions.items(): + if "crate" in ext_key.lower(): + crate_ext = ext_val + break + + if not crate_ext: + print( + " WARNING: No crate extension found in MODULE.bazel.lock", file=sys.stderr + ) + return crates + + # Get generatedRepoSpecs from 'general' (or the first available key) + general = crate_ext.get("general", {}) + specs = general.get("generatedRepoSpecs", {}) + + for repo_name, spec in specs.items(): + # Skip the crate_index meta-repo itself + if repo_name == "crate_index" or not repo_name.startswith("crate_index__"): + continue + + crate_part = repo_name.replace("crate_index__", "") + + # Parse name-version (e.g., "serde-1.0.228", "iceoryx2-qnx8-0.7.0") + m = re.match(r"^(.+?)-(\d+\.\d+\.\d+.*)$", crate_part) + if not m: + continue + + name = m.group(1) + version = m.group(2) + attrs = spec.get("attributes", {}) + sha256 = attrs.get("sha256", "") + + crates[name] = { + "name": name, + "version": version, + "checksum": sha256, + "source": "module-bazel-lock", + } + + return crates + + +def build_dash_coordinates(crates: Dict[str, Dict[str, Any]]) -> list[str]: + """Build Eclipse dash-license-scan coordinate strings from crate data. + + Args: + crates: Dict mapping crate name to {name, version, checksum, ...} + + Returns: + Sorted list of coordinate strings: "crate/cratesio/-/{name}/{version}" + """ + return [ + f"crate/cratesio/-/{info['name']}/{info['version']}" + for _key, info in sorted(crates.items()) + ] + + +def generate_synthetic_cargo_lock( + crates: Dict[str, Dict[str, Any]], output_path: str +) -> None: + """Generate a minimal synthetic Cargo.lock from parsed crate data. + + The dash-license-scan parser splits on '[[package]]' blocks and extracts + name, version, and source fields. Source must contain 'crates' if present. + + Args: + crates: Dict mapping crate name to {name, version, checksum, source} + output_path: Path to write the synthetic Cargo.lock + """ + lines = ["version = 4", ""] + for _name, info in sorted(crates.items()): + lines.append("[[package]]") + lines.append(f'name = "{info["name"]}"') + lines.append(f'version = "{info["version"]}"') + lines.append( + 'source = "registry+https://github.com/rust-lang/crates.io-index"' + ) + lines.append("") + + with open(output_path, "w", encoding="utf-8") as f: + f.write("\n".join(lines)) + + +def _find_uvx() -> str: + """Locate the uvx binary, checking PATH and common install locations.""" + found = shutil.which("uvx") + if found: + return found + + # Standard uv install location (works inside Bazel sandbox where PATH is minimal) + home = os.environ.get("HOME", os.path.expanduser("~")) + candidate = os.path.join(home, ".local", "bin", "uvx") + if os.path.isfile(candidate) and os.access(candidate, os.X_OK): + return candidate + + return "uvx" # fall back, will raise FileNotFoundError in subprocess + + +def run_dash_license_scan( + cargo_lock_path: str, summary_output_path: str +) -> None: + """Invoke dash-license-scan via uvx and write summary to file. + + Args: + cargo_lock_path: Path to (real or synthetic) Cargo.lock + summary_output_path: Path to write the dash-licenses summary CSV + + Raises: + SystemExit: If uvx/dash-license-scan is not found or fatally crashes + """ + uvx = _find_uvx() + cmd = [ + uvx, + "--from", + "dash-license-scan@git+https://github.com/eclipse-score/dash-license-scan", + "dash-license-scan", + "--summary", + summary_output_path, + cargo_lock_path, + ] + print(f"Running: {' '.join(cmd)}") + + # Redirect uv's cache and tool directories to writable temp locations. + # Inside Bazel sandbox, ~/.cache and ~/.local/share are read-only. + env = os.environ.copy() + uv_tmp = os.path.join(tempfile.gettempdir(), "uv_sbom") + if "UV_CACHE_DIR" not in env: + env["UV_CACHE_DIR"] = os.path.join(uv_tmp, "cache") + if "UV_TOOL_DIR" not in env: + env["UV_TOOL_DIR"] = os.path.join(uv_tmp, "tools") + + try: + result = subprocess.run( + cmd, + capture_output=True, + text=True, + timeout=600, + env=env, + ) + except FileNotFoundError: + print( + "ERROR: 'uvx' not found on PATH or ~/.local/bin/. Install uv: https://docs.astral.sh/uv/", + file=sys.stderr, + ) + sys.exit(1) + except subprocess.TimeoutExpired: + print( + "ERROR: dash-license-scan timed out after 600 seconds", file=sys.stderr + ) + sys.exit(1) + + # dash-license-scan exits with returncode = number of restricted items. + # This is normal behavior, not an error. Only signal kills are fatal. + if result.returncode < 0: + print( + f"ERROR: dash-license-scan killed by signal {-result.returncode}", + file=sys.stderr, + ) + if result.stderr: + print(result.stderr, file=sys.stderr) + sys.exit(1) + + if result.stderr: + # Print dash-license-scan's own output (INFO lines from the JAR) + for line in result.stderr.splitlines(): + print(f" {line}") + + if not os.path.exists(summary_output_path): + print( + f"ERROR: dash-license-scan did not produce summary file: {summary_output_path}", + file=sys.stderr, + ) + sys.exit(1) + + if result.returncode > 0: + print( + f" NOTE: {result.returncode} crate(s) have 'restricted' license status" + ) + + +def parse_dash_summary(summary_path: str) -> Dict[str, str]: + """Parse the dash-licenses summary CSV file into a license lookup dict. + + Each line has format: + crate/cratesio/-//, , , + + Args: + summary_path: Path to the dash-licenses summary file + + Returns: + Dict mapping crate name to SPDX license expression string + """ + licenses: Dict[str, str] = {} + with open(summary_path, encoding="utf-8") as f: + for line in f: + line = line.strip() + if not line: + continue + parts = [p.strip() for p in line.split(",")] + if len(parts) < 4: + continue + + content_id = parts[0] + license_expr = parts[1].strip() + + # Extract crate name from content_id: "crate/cratesio/-//" + id_parts = content_id.split("/") + if len(id_parts) >= 5 and id_parts[0] == "crate": + crate_name = id_parts[3] + if license_expr: + licenses[crate_name] = license_expr + + return licenses + + +def _extract_supplier(repository_url: str) -> str: + """Extract supplier (GitHub org/user) from a repository URL. + + Examples: + https://github.com/serde-rs/serde -> serde-rs + https://github.com/eclipse-iceoryx/iceoryx2 -> eclipse-iceoryx + """ + if not repository_url: + return "" + m = re.match(r"https?://github\.com/([^/]+)/", repository_url) + return m.group(1) if m else "" + + +def _fetch_one_crate_meta(name: str) -> tuple[str, Dict[str, str]]: + """Fetch metadata for a single crate from crates.io API. + + Returns (name, {description, supplier}) dict. + If the crate isn't found, retries with platform suffixes stripped + (e.g. "-qnx8") to find the upstream crate. + """ + candidates = [name] + # Platform-specific forks (e.g. iceoryx2-bb-lock-free-qnx8 -> iceoryx2-bb-lock-free) + for suffix in ("-qnx8",): + if name.endswith(suffix): + candidates.append(name[: -len(suffix)]) + + for candidate in candidates: + url = f"https://crates.io/api/v1/crates/{candidate}" + req = urllib.request.Request( + url, + headers={"User-Agent": "score-sbom-tool (https://eclipse.dev/score)"}, + ) + try: + with urllib.request.urlopen(req, timeout=10) as resp: + data = json.loads(resp.read()) + crate = data.get("crate", {}) + desc = (crate.get("description") or "").strip() + supplier = _extract_supplier(crate.get("repository", "")) + if desc or supplier: + return name, {"description": desc, "supplier": supplier} + except Exception: + continue + return name, {} + + +def fetch_crate_metadata_from_cratesio( + crate_names: list[str], +) -> Dict[str, Dict[str, str]]: + """Fetch metadata (description, supplier) from crates.io API (parallel). + + Args: + crate_names: List of crate names to look up + + Returns: + Dict mapping crate name to {description, supplier} + """ + total = len(crate_names) + print(f"Fetching metadata from crates.io for {total} crates...") + + metadata: Dict[str, Dict[str, str]] = {} + done = 0 + with ThreadPoolExecutor(max_workers=10) as pool: + futures = {pool.submit(_fetch_one_crate_meta, n): n for n in crate_names} + for future in as_completed(futures): + name, meta = future.result() + if meta: + metadata[name] = meta + done += 1 + if done % 50 == 0: + print(f" ... {done}/{total} crates queried") + + with_desc = sum(1 for m in metadata.values() if m.get("description")) + with_supplier = sum(1 for m in metadata.values() if m.get("supplier")) + print( + f"Retrieved from crates.io: {with_desc} descriptions, {with_supplier} suppliers" + ) + return metadata + + +def generate_cache( + cargo_lock_path: str | None = None, + module_lock_paths: list[str] | None = None, +) -> Dict[str, Dict[str, Any]]: + """Generate metadata cache from lockfiles + dash-license-scan. + + 1. Parse Cargo.lock and/or MODULE.bazel.lock files for crate names, versions, checksums + 2. Generate a synthetic Cargo.lock combining all crates + 3. Run dash-license-scan for license data + 4. Fetch descriptions from crates.io (parallel) + 5. Combine version/checksum from lockfile with license and description + + Args: + cargo_lock_path: Optional path to Cargo.lock file + module_lock_paths: Optional list of paths to MODULE.bazel.lock files + + Returns: + Dict mapping crate name to metadata + """ + crates: Dict[str, Dict[str, Any]] = {} + + if cargo_lock_path: + print(f"Parsing {cargo_lock_path}...") + crates = parse_cargo_lock(cargo_lock_path) + print(f"Found {len(crates)} crates from Cargo.lock") + + # Merge crates from MODULE.bazel.lock files + for module_lock_path in (module_lock_paths or []): + print(f"Parsing {module_lock_path}...") + module_crates = parse_module_bazel_lock(module_lock_path) + added = 0 + for name, info in module_crates.items(): + if name not in crates: + crates[name] = info + added += 1 + print(f"Found {len(module_crates)} crates in {module_lock_path} ({added} new)") + + if not crates: + print("No crates found in lockfiles.") + return {} + + # Generate synthetic Cargo.lock containing only crates.io crates. + # This avoids dash-license-scan's ValueError on non-crates.io sources + # (git dependencies, path dependencies) that may be in a real Cargo.lock. + temp_dir = tempfile.mkdtemp(prefix="sbom_dash_") + synthetic_path = os.path.join(temp_dir, "Cargo.lock") + generate_synthetic_cargo_lock(crates, synthetic_path) + print(f"Generated synthetic Cargo.lock with {len(crates)} crates") + + summary_path = os.path.join(temp_dir, "dash_summary.txt") + + try: + print("Fetching license data via dash-license-scan...") + run_dash_license_scan(synthetic_path, summary_path) + license_map = parse_dash_summary(summary_path) + print(f"Retrieved licenses for {len(license_map)} crates") + finally: + shutil.rmtree(temp_dir, ignore_errors=True) + + # Fetch descriptions + suppliers from crates.io (parallel, ~10 concurrent requests) + cratesio_meta = fetch_crate_metadata_from_cratesio(list(crates.keys())) + + # Build final cache + cache: Dict[str, Dict[str, Any]] = {} + for name, info in crates.items(): + meta = cratesio_meta.get(name, {}) + cache[name] = { + "version": info["version"], + "checksum": info["checksum"], + "purl": f"pkg:cargo/{name}@{info['version']}", + "license": license_map.get(name, ""), + "description": meta.get("description", ""), + "supplier": meta.get("supplier", ""), + } + + return cache + + +def main(): + parser = argparse.ArgumentParser( + description="Generate crates metadata cache for SBOM generation (via dash-license-scan)" + ) + parser.add_argument( + "output", + nargs="?", + default="crates_metadata.json", + help="Output JSON file (default: crates_metadata.json)", + ) + parser.add_argument("--cargo-lock", help="Path to Cargo.lock file") + parser.add_argument( + "--module-lock", + action="append", + default=[], + help="Path to MODULE.bazel.lock for additional crates (can be repeated)", + ) + parser.add_argument( + "--merge", help="Merge with existing cache file instead of overwriting" + ) + + args = parser.parse_args() + + if not args.cargo_lock and not args.module_lock: + parser.error("At least one of --cargo-lock or --module-lock is required") + + # Generate new cache + cache = generate_cache( + cargo_lock_path=args.cargo_lock, + module_lock_paths=args.module_lock, + ) + + # Merge with existing cache if requested + if args.merge and Path(args.merge).exists(): + print(f"\nMerging with existing cache: {args.merge}") + with open(args.merge) as f: + existing = json.load(f) + + # Prefer new data, but keep entries not in current lockfiles + merged = existing.copy() + merged.update(cache) + cache = merged + print(f"Merged cache now contains {len(cache)} entries") + + if not cache: + print("\nNo crates to write.") + with open(args.output, "w") as f: + json.dump({}, f) + return 0 + + # Write cache + print(f"\nWriting cache to {args.output}...") + with open(args.output, "w") as f: + json.dump(cache, f, indent=2, sort_keys=True) + + # Print statistics + total = len(cache) + with_license = sum(1 for c in cache.values() if c.get("license")) + with_checksum = sum(1 for c in cache.values() if c.get("checksum")) + with_desc = sum(1 for c in cache.values() if c.get("description")) + with_supplier = sum(1 for c in cache.values() if c.get("supplier")) + + print(f"\n✓ Cache generated successfully!") + print(f" Total crates: {total}") + print(f" With licenses: {with_license} ({with_license / total * 100:.1f}%)") + print(f" With checksums: {with_checksum} ({with_checksum / total * 100:.1f}%)") + print(f" With descriptions: {with_desc} ({with_desc / total * 100:.1f}%)") + print(f" With suppliers: {with_supplier} ({with_supplier / total * 100:.1f}%)") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/sbom/scripts/spdx_to_github_snapshot.py b/sbom/scripts/spdx_to_github_snapshot.py new file mode 100644 index 0000000..c62e13c --- /dev/null +++ b/sbom/scripts/spdx_to_github_snapshot.py @@ -0,0 +1,230 @@ +#!/usr/bin/env python3 +"""Convert SPDX 2.3 JSON to GitHub Dependency Submission API snapshot format. + +This script converts an SPDX 2.3 SBOM JSON file into the snapshot format +expected by the GitHub Dependency Submission API, enabling Dependabot +vulnerability alerts on dependencies declared in the SBOM. + +GitHub Dependency Submission API: + https://docs.github.com/en/rest/dependency-graph/dependency-submission + +Usage: + python3 spdx_to_github_snapshot.py \\ + --input my_sbom.spdx.json \\ + --output snapshot.json \\ + --sha \\ + --ref refs/heads/main \\ + --job-correlator my-workflow_sbom \\ + --job-id +""" + +import argparse +import json +import sys +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + + +DETECTOR_NAME = "score-sbom-generator" +DETECTOR_VERSION = "0.1.0" +DETECTOR_URL = "https://github.com/eclipse-score/tooling/tree/main/sbom" + + +def _extract_purl(package: dict[str, Any]) -> str | None: + """Extract PURL from SPDX package externalRefs.""" + for ref in package.get("externalRefs", []): + if ref.get("referenceType") == "purl": + return ref.get("referenceLocator", "") + return None + + +def _package_key(package: dict[str, Any]) -> str: + """Return a stable key for a package (name@version or SPDXID).""" + name = package.get("name", "") + version = package.get("versionInfo", "") + if name and version: + return f"{name}@{version}" + return package.get("SPDXID", name or "unknown") + + +def convert_spdx_to_snapshot( + spdx: dict[str, Any], + sha: str, + ref: str, + job_correlator: str, + job_id: str, +) -> dict[str, Any]: + """Convert SPDX 2.3 document to GitHub Dependency Submission snapshot. + + Args: + spdx: Parsed SPDX 2.3 JSON document + sha: Git commit SHA (40 hex chars) + ref: Git ref (e.g. refs/heads/main) + job_correlator: Unique string identifying the workflow + SBOM target + job_id: GitHub Actions run ID (or any unique job identifier) + + Returns: + GitHub Dependency Submission snapshot dict + """ + packages_by_id: dict[str, dict[str, Any]] = {} + for pkg in spdx.get("packages", []): + spdx_id = pkg.get("SPDXID", "") + if spdx_id: + packages_by_id[spdx_id] = pkg + + # Find the root document package (DESCRIBES relationship target) + relationships = spdx.get("relationships", []) + root_ids: set[str] = set() + direct_ids: set[str] = set() + + for rel in relationships: + rel_type = rel.get("relationshipType", "") + element = rel.get("spdxElementId", "") + related = rel.get("relatedSpdxElement", "") + + if rel_type == "DESCRIBES": + root_ids.add(related) + elif rel_type in ("DEPENDS_ON", "DYNAMIC_LINK", "STATIC_LINK", "CONTAINS"): + if element in root_ids: + direct_ids.add(related) + + # Build dependency map: which packages depend on which + depends_on: dict[str, list[str]] = {} + for rel in relationships: + rel_type = rel.get("relationshipType", "") + element = rel.get("spdxElementId", "") + related = rel.get("relatedSpdxElement", "") + if rel_type in ("DEPENDS_ON", "DYNAMIC_LINK", "STATIC_LINK", "CONTAINS"): + depends_on.setdefault(element, []).append(related) + + # Manifest name from SBOM document name or file name + doc_name = spdx.get("name", "sbom") + manifest_name = doc_name.replace(" ", "_").replace("/", "_") + + # Build resolved packages dict (exclude root/document descriptor packages) + resolved: dict[str, dict[str, Any]] = {} + + for spdx_id, pkg in packages_by_id.items(): + # Skip the SBOM document itself (SPDXRef-DOCUMENT) and root component + if spdx_id in root_ids or spdx_id == "SPDXRef-DOCUMENT": + continue + + purl = _extract_purl(pkg) + if not purl: + # Skip packages without a PURL — Dependabot can't use them + continue + + key = _package_key(pkg) + + # Relationship: direct if root explicitly depends on it, else indirect + relationship = "direct" if spdx_id in direct_ids else "indirect" + + # Dependencies of this package + dep_purls = [] + for dep_id in depends_on.get(spdx_id, []): + dep_pkg = packages_by_id.get(dep_id) + if dep_pkg: + dep_purl = _extract_purl(dep_pkg) + if dep_purl: + dep_purls.append(dep_purl) + + resolved[key] = { + "package_url": purl, + "relationship": relationship, + "dependencies": dep_purls, + } + + scanned = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + + snapshot: dict[str, Any] = { + "version": 0, + "sha": sha, + "ref": ref, + "job": { + "correlator": job_correlator, + "id": job_id, + }, + "detector": { + "name": DETECTOR_NAME, + "version": DETECTOR_VERSION, + "url": DETECTOR_URL, + }, + "scanned": scanned, + "manifests": { + manifest_name: { + "name": manifest_name, + "resolved": resolved, + } + }, + } + + return snapshot + + +def main() -> int: + parser = argparse.ArgumentParser( + description="Convert SPDX 2.3 JSON to GitHub Dependency Submission snapshot" + ) + parser.add_argument("--input", required=True, help="Path to SPDX 2.3 JSON file") + parser.add_argument("--output", required=True, help="Output snapshot JSON path") + parser.add_argument( + "--sha", required=True, help="Git commit SHA (40 hex chars)" + ) + parser.add_argument( + "--ref", required=True, help="Git ref (e.g. refs/heads/main)" + ) + parser.add_argument( + "--job-correlator", + default="score-sbom_sbom", + help="Unique workflow+target identifier for Dependency Submission API", + ) + parser.add_argument( + "--job-id", default="0", help="GitHub Actions run ID (or unique job ID)" + ) + args = parser.parse_args() + + input_path = Path(args.input) + if not input_path.exists(): + print(f"Error: input file not found: {input_path}", file=sys.stderr) + return 1 + + with input_path.open() as f: + try: + spdx = json.load(f) + except json.JSONDecodeError as e: + print(f"Error: invalid JSON in {input_path}: {e}", file=sys.stderr) + return 1 + + spdx_version = spdx.get("spdxVersion", "") + if not spdx_version.startswith("SPDX-"): + print( + f"Warning: unexpected spdxVersion '{spdx_version}', expected SPDX-2.x", + file=sys.stderr, + ) + + snapshot = convert_spdx_to_snapshot( + spdx=spdx, + sha=args.sha, + ref=args.ref, + job_correlator=args.job_correlator, + job_id=args.job_id, + ) + + output_path = Path(args.output) + with output_path.open("w") as f: + json.dump(snapshot, f, indent=2) + + total_packages = sum( + len(m["resolved"]) for m in snapshot["manifests"].values() + ) + print( + f"Converted {len(spdx.get('packages', []))} SPDX packages → " + f"{total_packages} Dependency Submission packages" + ) + print(f"Output: {output_path}") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/sbom/tests/BUILD b/sbom/tests/BUILD new file mode 100644 index 0000000..1f90375 --- /dev/null +++ b/sbom/tests/BUILD @@ -0,0 +1,32 @@ +# SBOM Tests Package +# +# This package contains tests for the SBOM generation system. + +load("@rules_python//python:defs.bzl", "py_test") + +package(default_visibility = ["//visibility:private"]) + +py_test( + name = "test_spdx_formatter", + srcs = ["test_spdx_formatter.py"], + deps = ["//sbom/internal/generator:spdx_formatter"], +) + +py_test( + name = "test_cyclonedx_formatter", + srcs = ["test_cyclonedx_formatter.py"], + deps = ["//sbom/internal/generator:cyclonedx_formatter"], +) + +py_test( + name = "test_bcr_known_licenses", + srcs = ["test_bcr_known_licenses.py"], + deps = ["//sbom/internal/generator:sbom_generator_lib"], +) + +py_test( + name = "test_cpp_enrich_checksum", + srcs = ["test_cpp_enrich_checksum.py"], + data = ["//sbom:cpp_metadata.json"], + deps = ["//sbom/internal/generator:sbom_generator_lib"], +) diff --git a/sbom/tests/__init__.py b/sbom/tests/__init__.py new file mode 100644 index 0000000..b82b623 --- /dev/null +++ b/sbom/tests/__init__.py @@ -0,0 +1 @@ +"""SBOM tests package.""" diff --git a/sbom/tests/test_bcr_known_licenses.py b/sbom/tests/test_bcr_known_licenses.py new file mode 100644 index 0000000..16aafee --- /dev/null +++ b/sbom/tests/test_bcr_known_licenses.py @@ -0,0 +1,250 @@ +"""Tests for BCR known-license resolution in sbom_generator. + +These tests verify that C++ modules from the Bazel Central Registry +(e.g. boost.*) receive correct license data even when cdxgen and +lockfile parsing cannot provide it. +""" + +import unittest + +from sbom.internal.generator.sbom_generator import ( + BCR_KNOWN_LICENSES, + apply_known_licenses, + resolve_component, +) + + +class TestBcrKnownLicenses(unittest.TestCase): + """Verify the BCR_KNOWN_LICENSES table contents.""" + + def test_boost_entry_exists(self): + self.assertIn("boost", BCR_KNOWN_LICENSES) + self.assertEqual(BCR_KNOWN_LICENSES["boost"]["license"], "BSL-1.0") + + def test_all_entries_have_license(self): + for name, info in BCR_KNOWN_LICENSES.items(): + self.assertTrue( + info.get("license"), + f"BCR_KNOWN_LICENSES['{name}'] has no license", + ) + + +class TestApplyKnownLicenses(unittest.TestCase): + """Tests for apply_known_licenses().""" + + # -- BCR known-license fallback ------------------------------------------- + + def test_boost_submodule_gets_license(self): + """boost.config should inherit BSL-1.0 from the 'boost' BCR entry.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0") + self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "Boost.org") + + def test_multiple_boost_submodules(self): + """All boost.* sub-modules should receive BSL-1.0.""" + names = [ + "boost.config", "boost.assert", "boost.mp11", "boost.container", + "boost.interprocess", "boost.core", "boost.predef", + ] + metadata = { + "modules": { + n: {"version": "1.87.0", "purl": f"pkg:bazel/{n}@1.87.0"} + for n in names + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + for n in names: + self.assertEqual( + metadata["modules"][n]["license"], "BSL-1.0", + f"{n} should have BSL-1.0 license", + ) + + def test_exact_bcr_match(self): + """A module matching a BCR key exactly gets the license.""" + metadata = { + "modules": { + "abseil-cpp": {"version": "20230802.0", "purl": "pkg:bazel/abseil-cpp@20230802.0"}, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["abseil-cpp"]["license"], "Apache-2.0") + + def test_unknown_module_unchanged(self): + """Modules not in BCR_KNOWN_LICENSES remain without a license.""" + metadata = { + "modules": { + "some_unknown_lib": {"version": "1.0.0", "purl": "pkg:bazel/some_unknown_lib@1.0.0"}, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["some_unknown_lib"].get("license", ""), "") + + # -- Explicit license overrides (sbom_ext.license) ------------------------ + + def test_explicit_license_override(self): + """User-declared license in metadata['licenses'] takes priority.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + "licenses": { + "boost.config": {"license": "MIT", "supplier": "Custom"}, + }, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "MIT") + self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "Custom") + + def test_parent_license_override(self): + """Parent-level license declaration covers all sub-modules.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + "boost.container": {"version": "1.87.0", "purl": "pkg:bazel/boost.container@1.87.0"}, + }, + "licenses": { + "boost": {"license": "BSL-1.0-custom", "supplier": "My Boost Fork"}, + }, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0-custom") + self.assertEqual(metadata["modules"]["boost.container"]["license"], "BSL-1.0-custom") + + def test_explicit_beats_parent(self): + """Exact-name license takes priority over parent-level declaration.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + "licenses": { + "boost": {"license": "BSL-1.0", "supplier": "Boost.org"}, + "boost.config": {"license": "MIT-override", "supplier": "Override"}, + }, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "MIT-override") + + def test_explicit_beats_bcr_known(self): + """User-declared license overrides the BCR known-license database.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + "licenses": { + "boost": {"license": "Apache-2.0", "supplier": "Custom Boost"}, + }, + } + apply_known_licenses(metadata) + + # User's declaration should win over BCR_KNOWN_LICENSES["boost"] + self.assertEqual(metadata["modules"]["boost.config"]["license"], "Apache-2.0") + + # -- Preserves existing data ---------------------------------------------- + + def test_existing_license_not_overwritten(self): + """Modules that already have a license are not modified.""" + metadata = { + "modules": { + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + "license": "Already-Set", + "supplier": "Original", + }, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "Already-Set") + self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "Original") + + def test_supplier_not_overwritten_when_present(self): + """Existing supplier is preserved even when license is filled from BCR.""" + metadata = { + "modules": { + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + "supplier": "My Custom Supplier", + }, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0") + self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "My Custom Supplier") + + # -- Edge cases ----------------------------------------------------------- + + def test_empty_metadata(self): + """Empty metadata does not raise.""" + metadata = {} + apply_known_licenses(metadata) # Should not raise + + def test_no_licenses_key(self): + """Missing 'licenses' key does not raise.""" + metadata = { + "modules": { + "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + }, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0") + + def test_module_without_dot_not_treated_as_parent(self): + """A module name without dots only matches exact BCR entries.""" + metadata = { + "modules": { + "zlib": {"version": "1.3.1", "purl": "pkg:bazel/zlib@1.3.1"}, + }, + "licenses": {}, + } + apply_known_licenses(metadata) + + self.assertEqual(metadata["modules"]["zlib"]["license"], "Zlib") + + +class TestResolveComponentWithLicenses(unittest.TestCase): + """Verify that resolve_component returns licenses from metadata modules.""" + + def test_module_with_license_from_apply(self): + """After apply_known_licenses, resolve_component picks up the license.""" + metadata = { + "modules": { + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + "license": "BSL-1.0", + "supplier": "Boost.org", + }, + }, + "licenses": {}, + } + comp = resolve_component("boost.config+", metadata) + + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "boost.config") + self.assertEqual(comp["license"], "BSL-1.0") + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_cpp_enrich_checksum.py b/sbom/tests/test_cpp_enrich_checksum.py new file mode 100644 index 0000000..258eba3 --- /dev/null +++ b/sbom/tests/test_cpp_enrich_checksum.py @@ -0,0 +1,156 @@ +"""Tests for enrich_components_from_cpp_cache and the no-manual-fallback rule. + +Requirement: All SBOM fields must originate from automated sources. +No manually-curated fallback values are permitted for any field — +not checksum, not license, not supplier, not version, not PURL. +""" + +import json +import pathlib +import unittest + +from sbom.internal.generator.sbom_generator import enrich_components_from_cpp_cache + +# SBOM fields that must never appear as manually-curated static values. +# If any of these appear in cpp_metadata.json they were hand-written and must +# be removed. The only valid sources are automated tooling (cdxgen, lockfiles). +_SBOM_FIELDS = {"checksum", "license", "supplier", "version", "purl", "description"} + + +class TestCppEnrichChecksumPropagation(unittest.TestCase): + """enrich_components_from_cpp_cache field propagation mechanics. + + These tests exercise the code path using synthetic cache data generated + by cdxgen (not manually written). The logic itself is valid — the + restriction is on what may appear in the on-disk cpp_metadata.json. + """ + + def _run(self, components, cpp_components): + return enrich_components_from_cpp_cache(components, cpp_components, {}) + + def test_checksum_propagated_when_component_has_none(self): + """SHA-256 from the cdxgen-generated cache is copied to a component with no checksum.""" + sha = "a22461d13119ac5c78f205d3df1db13403e58ce1bb1794edc9313677313f4a9d" + components = [{"name": "nlohmann-json", "version": "3.11.3", "checksum": ""}] + cpp_cache = [{"name": "nlohmann-json", "version": "3.11.3", "checksum": sha}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], sha) + + def test_checksum_not_overwritten_when_already_present(self): + """An existing checksum on a component is preserved — cache is skipped.""" + existing = "aaaa" * 16 + cache_sha = "bbbb" * 16 + components = [{"name": "flatbuffers", "version": "25.2.10", "checksum": existing}] + cpp_cache = [{"name": "flatbuffers", "checksum": cache_sha}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], existing) + + def test_no_checksum_in_cache_leaves_component_without_checksum(self): + """When the cache entry has no checksum the component remains without one.""" + components = [{"name": "boost", "version": "1.87.0", "checksum": ""}] + cpp_cache = [{"name": "boost", "license": "BSL-1.0"}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], "") + + def test_component_without_matching_cache_entry_unchanged(self): + """A component with no matching cache entry is not modified.""" + components = [{"name": "some-unknown-lib", "checksum": ""}] + cpp_cache = [{"name": "nlohmann-json", "checksum": "aaaa"}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], "") + + def test_checksum_propagated_via_normalised_name(self): + """nlohmann_json (underscore) component matches nlohmann-json cache entry.""" + sha = "a22461d13119ac5c78f205d3df1db13403e58ce1bb1794edc9313677313f4a9d" + components = [{"name": "nlohmann_json", "checksum": ""}] + cpp_cache = [{"name": "nlohmann-json", "checksum": sha}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], sha) + + def test_checksum_propagated_via_parent_match(self): + """boost.config component matches the 'boost' cache entry.""" + sha = "deadbeef" * 8 + components = [{"name": "boost.config", "checksum": ""}] + cpp_cache = [{"name": "boost", "checksum": sha}] + + result = self._run(components, cpp_cache) + + self.assertEqual(result[0]["checksum"], sha) + + +class TestNoManualFallbackInCppMetadata(unittest.TestCase): + """Enforce the no-manual-fallback requirement on the on-disk cache. + + MUST REQUIREMENT: cpp_metadata.json must never contain manually-curated + SBOM field values. The file must either be empty ({}) or contain only + entries generated automatically by generate_cpp_metadata_cache.py from + cdxgen output. + + Rationale: A manually-written value is tied to a specific version string + in the file. If the workspace resolves a different version of that library, + the value silently describes the wrong artifact — an incorrect SBOM entry + is worse than an absent one. All SBOM fields must trace back to an + automated source (cdxgen scan, MODULE.bazel.lock, http_archive sha256). + + Known violations still to be resolved: + - BCR_KNOWN_LICENSES dict in sbom_generator.py (manual license/supplier + lookup for BCR C++ modules — must be replaced by automated BCR metadata + fetching or removed). + """ + + _CACHE_PATH = pathlib.Path(__file__).parent.parent / "cpp_metadata.json" + + def setUp(self): + self._data = json.loads(self._CACHE_PATH.read_text(encoding="utf-8")) + + def test_cpp_metadata_json_is_empty(self): + """cpp_metadata.json must be empty. + + Any entry in this file was written by hand. All C++ metadata must be + produced by automated tooling at build time (cdxgen via auto_cdxgen, + or lockfile parsing). If you need to populate this file, run: + + npx @cyclonedx/cdxgen -t cpp --deep -r -o cdxgen_output.cdx.json + python3 tooling/sbom/scripts/generate_cpp_metadata_cache.py \\ + cdxgen_output.cdx.json tooling/sbom/cpp_metadata.json + """ + self.assertEqual( + self._data, + {}, + "cpp_metadata.json must be empty. Found manually-curated entries: " + + ", ".join(self._data.keys()) + + ". Remove them — use generate_cpp_metadata_cache.py to populate " + "this file from cdxgen output instead.", + ) + + def test_no_sbom_fields_in_any_entry(self): + """No entry in cpp_metadata.json may contain any SBOM metadata field. + + This is a belt-and-suspenders check: even if the file is non-empty + (which the previous test already flags), no SBOM field value may be + manually written. Automated generation via generate_cpp_metadata_cache.py + is the only permitted source. + """ + for lib, entry in self._data.items(): + manually_present = _SBOM_FIELDS & set(entry.keys()) + with self.subTest(lib=lib): + self.assertFalse( + manually_present, + f"cpp_metadata.json['{lib}'] contains manually-curated SBOM " + f"fields: {manually_present}. All SBOM fields must come from " + f"automated sources only.", + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_cyclonedx_formatter.py b/sbom/tests/test_cyclonedx_formatter.py new file mode 100644 index 0000000..46de425 --- /dev/null +++ b/sbom/tests/test_cyclonedx_formatter.py @@ -0,0 +1,199 @@ +"""Tests for CycloneDX 1.6 formatter.""" + +import unittest +from datetime import datetime, timezone + +from sbom.internal.generator.cyclonedx_formatter import generate_cyclonedx, _normalize_spdx_license + + +class TestCycloneDXFormatter(unittest.TestCase): + """Tests for CycloneDX 1.6 generation.""" + + def setUp(self): + """Set up test fixtures.""" + self.timestamp = datetime( + 2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc + ).isoformat() + self.config = { + "component_name": "test-component", + "component_version": "1.0.0", + "producer_name": "Eclipse Foundation", + "producer_url": "https://eclipse.dev/score", + "namespace": "https://eclipse.dev/score", + } + self.components = [ + { + "name": "tokio", + "version": "1.10.0", + "purl": "pkg:cargo/tokio@1.10.0", + "type": "library", + "license": "MIT", + "source": "crates.io", + }, + { + "name": "serde", + "version": "1.0.0", + "purl": "pkg:cargo/serde@1.0.0", + "type": "library", + "license": "MIT OR Apache-2.0", + "source": "crates.io", + }, + ] + + def test_generate_cyclonedx_structure(self): + """Test that generated CycloneDX has correct structure.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + self.assertEqual(cdx["bomFormat"], "CycloneDX") + self.assertEqual(cdx["specVersion"], "1.6") + self.assertIn("serialNumber", cdx) + self.assertTrue(cdx["serialNumber"].startswith("urn:uuid:")) + self.assertEqual(cdx["version"], 1) + + def test_generate_cyclonedx_metadata(self): + """Test that CycloneDX metadata is correct.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + metadata = cdx["metadata"] + self.assertEqual(metadata["timestamp"], self.timestamp) + self.assertIn("tools", metadata) + self.assertIn("component", metadata) + + root_component = metadata["component"] + self.assertEqual(root_component["name"], "test-component") + self.assertEqual(root_component["version"], "1.0.0") + self.assertEqual(root_component["type"], "application") + + def test_generate_cyclonedx_components(self): + """Test that components are properly added.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + components = cdx["components"] + self.assertEqual(len(components), 2) + + component_names = {c["name"] for c in components} + self.assertEqual(component_names, {"tokio", "serde"}) + + def test_generate_cyclonedx_component_details(self): + """Test that component details are correct.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + tokio = next(c for c in cdx["components"] if c["name"] == "tokio") + + self.assertEqual(tokio["version"], "1.10.0") + self.assertEqual(tokio["type"], "library") + self.assertEqual(tokio["purl"], "pkg:cargo/tokio@1.10.0") + self.assertIn("bom-ref", tokio) + + def test_generate_cyclonedx_licenses(self): + """Test that licenses are properly set.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + tokio = next(c for c in cdx["components"] if c["name"] == "tokio") + + self.assertIn("licenses", tokio) + self.assertEqual(len(tokio["licenses"]), 1) + self.assertEqual(tokio["licenses"][0]["license"]["id"], "MIT") + + def test_generate_cyclonedx_dependencies(self): + """Test that dependencies are created.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + dependencies = cdx["dependencies"] + + # Should have root + 2 component dependency entries + self.assertEqual(len(dependencies), 3) + + # Find root dependency + root_dep = next(d for d in dependencies if d["ref"] == "test-component@1.0.0") + self.assertEqual(len(root_dep["dependsOn"]), 2) + + def test_generate_cyclonedx_external_references(self): + """Test that external references are added for crates.io sources.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + tokio = next(c for c in cdx["components"] if c["name"] == "tokio") + + self.assertIn("externalReferences", tokio) + ext_refs = tokio["externalReferences"] + + distribution_ref = next( + (r for r in ext_refs if r["type"] == "distribution"), None + ) + self.assertIsNotNone(distribution_ref) + self.assertIn("crates.io", distribution_ref["url"]) + + def test_generate_cyclonedx_cratesio_external_ref_from_source_field(self): + """Crates with source=crates.io get a distribution externalReference URL.""" + components = [ + { + "name": "serde", + "version": "1.0.228", + "purl": "pkg:cargo/serde@1.0.228", + "type": "library", + "license": "MIT OR Apache-2.0", + "source": "crates.io", + } + ] + cdx = generate_cyclonedx(components, self.config, self.timestamp) + serde = next(c for c in cdx["components"] if c["name"] == "serde") + ext_refs = serde.get("externalReferences", []) + dist_ref = next((r for r in ext_refs if r["type"] == "distribution"), None) + self.assertIsNotNone(dist_ref, "Expected distribution externalReference for crates.io crate") + self.assertIn("crates.io/crates/serde/1.0.228", dist_ref["url"]) + + def test_generate_cyclonedx_schema_url_uses_https(self): + """Test that $schema URL uses https:// not http://.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + self.assertTrue( + cdx["$schema"].startswith("https://"), + f"$schema should use https://, got: {cdx['$schema']}", + ) + + def test_generate_cyclonedx_with_empty_components(self): + """Test generating CycloneDX with no components.""" + cdx = generate_cyclonedx([], self.config, self.timestamp) + + self.assertEqual(len(cdx["components"]), 0) + self.assertEqual(len(cdx["dependencies"]), 1) # Just root + + def test_generate_cyclonedx_bom_refs_unique(self): + """Test that bom-refs are unique across components.""" + cdx = generate_cyclonedx(self.components, self.config, self.timestamp) + + bom_refs = [c["bom-ref"] for c in cdx["components"]] + self.assertEqual(len(bom_refs), len(set(bom_refs))) + + +class TestNormalizeSpdxLicenseCdx(unittest.TestCase): + """Verify lowercase operator normalization for CycloneDX formatter.""" + + def test_lowercase_or_normalized(self): + self.assertEqual(_normalize_spdx_license("Apache-2.0 or MIT"), "Apache-2.0 OR MIT") + + def test_gpl_or_later_not_mangled(self): + self.assertEqual(_normalize_spdx_license("GPL-2.0-or-later"), "GPL-2.0-or-later") + + def test_lowercase_or_routes_to_expression_field(self): + """'Apache-2.0 or MIT' from dash-license-scan must use expression field, not license.id.""" + config = { + "component_name": "test", + "component_version": "1.0", + "producer_name": "Test", + "namespace": "https://example.com", + } + timestamp = "2024-01-01T00:00:00+00:00" + components = [{"name": "serde", "version": "1.0.228", "purl": "pkg:cargo/serde@1.0.228", + "type": "library", "license": "Apache-2.0 or MIT"}] + cdx = generate_cyclonedx(components, config, timestamp) + serde = next(c for c in cdx["components"] if c["name"] == "serde") + licenses = serde.get("licenses", []) + self.assertEqual(len(licenses), 1) + # Must use 'expression' field with uppercase OR, not 'license.id' + self.assertIn("expression", licenses[0], "compound license must use 'expression' field") + self.assertEqual(licenses[0]["expression"], "Apache-2.0 OR MIT") + self.assertNotIn("license", licenses[0]) + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_generate_crates_metadata_cache.py b/sbom/tests/test_generate_crates_metadata_cache.py new file mode 100644 index 0000000..30bdc3e --- /dev/null +++ b/sbom/tests/test_generate_crates_metadata_cache.py @@ -0,0 +1,398 @@ +"""Tests for generate_crates_metadata_cache.py. + +These tests verify the core parsing and data transformation functions +used to extract Rust crate license metadata via dash-license-scan. +""" + +import json +import os +import tempfile +import unittest + +# The script lives under sbom/scripts/ and is not a regular Python package. +# Import functions by adding the scripts directory to sys.path. +import sys + +sys.path.insert( + 0, + os.path.join(os.path.dirname(__file__), "..", "scripts"), +) + +from generate_crates_metadata_cache import ( + build_dash_coordinates, + generate_synthetic_cargo_lock, + parse_dash_summary, + parse_module_bazel_lock, +) + + +class TestParseDashSummary(unittest.TestCase): + """Tests for parse_dash_summary — the JAR summary CSV parser.""" + + def _write_summary(self, content: str) -> str: + """Helper: write content to a temp file and return its path.""" + fd, path = tempfile.mkstemp(suffix=".txt") + with os.fdopen(fd, "w") as f: + f.write(content) + self.addCleanup(os.unlink, path) + return path + + def test_basic_parsing(self): + """Standard summary lines produce correct crate→license mapping.""" + summary = ( + "crate/cratesio/-/serde/1.0.228, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/tokio/1.10.0, MIT, approved, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(result["serde"], "Apache-2.0 OR MIT") + self.assertEqual(result["tokio"], "MIT") + + def test_empty_license_skipped(self): + """Entries with empty license expressions are not included.""" + summary = ( + "crate/cratesio/-/serde/1.0.228, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/unknown-crate/0.1.0, , restricted, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertIn("serde", result) + self.assertNotIn("unknown-crate", result) + + def test_compound_spdx_expression(self): + """Compound SPDX expressions (AND/OR) are preserved.""" + summary = ( + "crate/cratesio/-/ring/0.17.14, " + "Apache-2.0 AND LicenseRef-scancode-iso-8879 AND (GPL-2.0-only AND MIT), " + "restricted, #25641\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertIn("ring", result) + self.assertIn("Apache-2.0", result["ring"]) + + def test_malformed_lines_skipped(self): + """Lines with fewer than 4 comma-separated fields are ignored.""" + summary = ( + "crate/cratesio/-/serde/1.0.228, MIT, approved, clearlydefined\n" + "this is not a valid line\n" + "only, two, parts\n" + "\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(len(result), 1) + self.assertEqual(result["serde"], "MIT") + + def test_non_crate_entries_skipped(self): + """Non-crate entries (pypi, npm, etc.) are ignored.""" + summary = ( + "crate/cratesio/-/serde/1.0.228, MIT, approved, clearlydefined\n" + "pypi/pypi/-/requests/2.31.0, Apache-2.0, approved, clearlydefined\n" + "npm/npmjs/-/express/4.18.2, MIT, approved, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(len(result), 1) + self.assertIn("serde", result) + + def test_empty_file(self): + """An empty summary file produces an empty dict.""" + path = self._write_summary("") + result = parse_dash_summary(path) + self.assertEqual(result, {}) + + def test_restricted_crate_still_gets_license(self): + """Restricted crates still have their license extracted.""" + summary = ( + "crate/cratesio/-/openssl-sys/0.9.104, OpenSSL, restricted, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(result["openssl-sys"], "OpenSSL") + + def test_licenseref_expression(self): + """LicenseRef-* expressions are preserved.""" + summary = ( + "crate/cratesio/-/ring/0.17.14, LicenseRef-ring, restricted, clearlydefined\n" + ) + path = self._write_summary(summary) + result = parse_dash_summary(path) + + self.assertEqual(result["ring"], "LicenseRef-ring") + + +class TestBuildDashCoordinates(unittest.TestCase): + """Tests for build_dash_coordinates — coordinate string construction.""" + + def test_basic_coordinate_building(self): + """Crate data produces correct coordinate strings.""" + crates = { + "serde": {"name": "serde", "version": "1.0.228", "checksum": "abc123"}, + "tokio": {"name": "tokio", "version": "1.10.0", "checksum": "def456"}, + } + coords = build_dash_coordinates(crates) + + self.assertEqual(len(coords), 2) + self.assertIn("crate/cratesio/-/serde/1.0.228", coords) + self.assertIn("crate/cratesio/-/tokio/1.10.0", coords) + + def test_empty_crates(self): + """Empty crates dict produces empty coordinates list.""" + coords = build_dash_coordinates({}) + self.assertEqual(coords, []) + + def test_coordinates_are_sorted(self): + """Coordinates are sorted by crate name.""" + crates = { + "z-crate": {"name": "z-crate", "version": "1.0.0", "checksum": ""}, + "a-crate": {"name": "a-crate", "version": "2.0.0", "checksum": ""}, + } + coords = build_dash_coordinates(crates) + + self.assertEqual(coords[0], "crate/cratesio/-/a-crate/2.0.0") + self.assertEqual(coords[1], "crate/cratesio/-/z-crate/1.0.0") + + def test_hyphenated_crate_name(self): + """Crate names with hyphens are preserved in coordinates.""" + crates = { + "iceoryx2-bb-lock-free": { + "name": "iceoryx2-bb-lock-free", + "version": "0.7.0", + "checksum": "", + }, + } + coords = build_dash_coordinates(crates) + + self.assertEqual( + coords[0], "crate/cratesio/-/iceoryx2-bb-lock-free/0.7.0" + ) + + +class TestParseModuleBazelLock(unittest.TestCase): + """Tests for parse_module_bazel_lock — MODULE.bazel.lock crate extraction.""" + + def _write_lockfile(self, data: dict) -> str: + """Helper: write JSON data to a temp file and return its path.""" + fd, path = tempfile.mkstemp(suffix=".json") + with os.fdopen(fd, "w") as f: + json.dump(data, f) + self.addCleanup(os.unlink, path) + return path + + def test_basic_crate_extraction(self): + """Crates are correctly extracted from generatedRepoSpecs.""" + lockfile = { + "moduleExtensions": { + "@@rules_rust+//crate_universe:extensions.bzl%crate": { + "general": { + "generatedRepoSpecs": { + "crate_index__serde-1.0.228": { + "attributes": {"sha256": "abc123def456"} + }, + "crate_index__tokio-1.10.0": { + "attributes": {"sha256": "789xyz"} + }, + } + } + } + } + } + path = self._write_lockfile(lockfile) + result = parse_module_bazel_lock(path) + + self.assertEqual(len(result), 2) + self.assertEqual(result["serde"]["version"], "1.0.228") + self.assertEqual(result["serde"]["checksum"], "abc123def456") + self.assertEqual(result["tokio"]["version"], "1.10.0") + + def test_crate_index_meta_repo_skipped(self): + """The crate_index meta-repo entry is not treated as a crate.""" + lockfile = { + "moduleExtensions": { + "crate_universe": { + "general": { + "generatedRepoSpecs": { + "crate_index": {"attributes": {}}, + "crate_index__serde-1.0.228": { + "attributes": {"sha256": "abc"} + }, + } + } + } + } + } + path = self._write_lockfile(lockfile) + result = parse_module_bazel_lock(path) + + self.assertEqual(len(result), 1) + self.assertIn("serde", result) + + def test_complex_crate_name(self): + """Crate names with multiple hyphens (e.g. iceoryx2-qnx8) are parsed correctly.""" + lockfile = { + "moduleExtensions": { + "crate": { + "general": { + "generatedRepoSpecs": { + "crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0": { + "attributes": {"sha256": "xyz"} + }, + } + } + } + } + } + path = self._write_lockfile(lockfile) + result = parse_module_bazel_lock(path) + + self.assertEqual(len(result), 1) + self.assertIn("iceoryx2-bb-lock-free-qnx8", result) + self.assertEqual(result["iceoryx2-bb-lock-free-qnx8"]["version"], "0.7.0") + + def test_no_crate_extension(self): + """Lockfile without crate extension returns empty dict.""" + lockfile = { + "moduleExtensions": { + "some_other_extension": {"general": {}} + } + } + path = self._write_lockfile(lockfile) + result = parse_module_bazel_lock(path) + + self.assertEqual(result, {}) + + def test_empty_lockfile(self): + """Lockfile with no moduleExtensions returns empty dict.""" + path = self._write_lockfile({}) + result = parse_module_bazel_lock(path) + self.assertEqual(result, {}) + + +class TestGenerateSyntheticCargoLock(unittest.TestCase): + """Tests for generate_synthetic_cargo_lock.""" + + def test_generates_valid_toml(self): + """Generated Cargo.lock has correct TOML structure.""" + crates = { + "serde": {"name": "serde", "version": "1.0.228", "checksum": "abc"}, + "tokio": {"name": "tokio", "version": "1.10.0", "checksum": "def"}, + } + fd, path = tempfile.mkstemp(suffix=".lock") + os.close(fd) + self.addCleanup(os.unlink, path) + + generate_synthetic_cargo_lock(crates, path) + + with open(path) as f: + content = f.read() + + self.assertIn("version = 4", content) + self.assertIn('name = "serde"', content) + self.assertIn('version = "1.0.228"', content) + self.assertIn('name = "tokio"', content) + self.assertIn("[[package]]", content) + self.assertIn("crates.io-index", content) + + def test_entries_are_sorted(self): + """Cargo.lock entries are sorted by crate name.""" + crates = { + "z-crate": {"name": "z-crate", "version": "1.0.0", "checksum": ""}, + "a-crate": {"name": "a-crate", "version": "2.0.0", "checksum": ""}, + } + fd, path = tempfile.mkstemp(suffix=".lock") + os.close(fd) + self.addCleanup(os.unlink, path) + + generate_synthetic_cargo_lock(crates, path) + + with open(path) as f: + content = f.read() + + a_pos = content.index('name = "a-crate"') + z_pos = content.index('name = "z-crate"') + self.assertLess(a_pos, z_pos) + + +class TestEndToEndLicenseExtraction(unittest.TestCase): + """Integration tests verifying the full license extraction pipeline. + + These tests verify that the parse_dash_summary function correctly + handles the output format of the Eclipse dash-licenses JAR, which + is the format that build_dash_coordinates + JAR invocation produces. + """ + + def _write_summary(self, content: str) -> str: + fd, path = tempfile.mkstemp(suffix=".txt") + with os.fdopen(fd, "w") as f: + f.write(content) + self.addCleanup(os.unlink, path) + return path + + def test_coordinates_match_summary_format(self): + """Coordinates built by build_dash_coordinates match the format + that parse_dash_summary expects in the JAR output.""" + crates = { + "serde": {"name": "serde", "version": "1.0.228", "checksum": "abc"}, + "tokio": {"name": "tokio", "version": "1.10.0", "checksum": "def"}, + } + + # Build coordinates (what we send to the JAR) + coords = build_dash_coordinates(crates) + self.assertEqual(coords[0], "crate/cratesio/-/serde/1.0.228") + self.assertEqual(coords[1], "crate/cratesio/-/tokio/1.10.0") + + # Simulate JAR summary output (what the JAR would produce) + summary = ( + "crate/cratesio/-/serde/1.0.228, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/tokio/1.10.0, MIT, approved, clearlydefined\n" + ) + path = self._write_summary(summary) + license_map = parse_dash_summary(path) + + # Verify licenses are correctly mapped back to crate names + self.assertEqual(license_map["serde"], "Apache-2.0 OR MIT") + self.assertEqual(license_map["tokio"], "MIT") + + # Verify all crates got licenses + for name in crates: + self.assertIn(name, license_map, f"Missing license for crate: {name}") + + def test_kyron_style_crates(self): + """Verify license extraction works for crates typical in the score_kyron module.""" + crates = { + "proc-macro2": {"name": "proc-macro2", "version": "1.0.92", "checksum": ""}, + "quote": {"name": "quote", "version": "1.0.37", "checksum": ""}, + "syn": {"name": "syn", "version": "2.0.96", "checksum": ""}, + "iceoryx2": {"name": "iceoryx2", "version": "0.7.0", "checksum": ""}, + } + + coords = build_dash_coordinates(crates) + self.assertEqual(len(coords), 4) + + # Simulate JAR output + summary = ( + "crate/cratesio/-/proc-macro2/1.0.92, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/quote/1.0.37, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/syn/2.0.96, Apache-2.0 OR MIT, approved, clearlydefined\n" + "crate/cratesio/-/iceoryx2/0.7.0, Apache-2.0 OR MIT, approved, clearlydefined\n" + ) + path = self._write_summary(summary) + license_map = parse_dash_summary(path) + + # All crates should have licenses + for name in crates: + self.assertIn(name, license_map, f"Missing license for {name}") + self.assertTrue( + license_map[name], f"Empty license for {name}" + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_spdx_formatter.py b/sbom/tests/test_spdx_formatter.py new file mode 100644 index 0000000..7074e97 --- /dev/null +++ b/sbom/tests/test_spdx_formatter.py @@ -0,0 +1,191 @@ +"""Tests for SPDX 2.3 formatter.""" + +import unittest +from datetime import datetime, timezone + +from sbom.internal.generator.spdx_formatter import generate_spdx, _normalize_spdx_license + + +class TestSpdxFormatter(unittest.TestCase): + """Tests for SPDX 2.3 generation.""" + + def setUp(self): + """Set up test fixtures.""" + self.timestamp = datetime( + 2024, 1, 15, 12, 0, 0, tzinfo=timezone.utc + ).isoformat() + self.config = { + "component_name": "test-component", + "component_version": "1.0.0", + "producer_name": "Eclipse Foundation", + "producer_url": "https://eclipse.dev/score", + "namespace": "https://eclipse.dev/score", + } + self.components = [ + { + "name": "tokio", + "version": "1.10.0", + "purl": "pkg:cargo/tokio@1.10.0", + "type": "library", + "license": "MIT", + }, + { + "name": "serde", + "version": "1.0.0", + "purl": "pkg:cargo/serde@1.0.0", + "type": "library", + "license": "MIT OR Apache-2.0", + }, + ] + + def test_generate_spdx_structure(self): + """Test that generated SPDX has correct structure.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + self.assertEqual(spdx["spdxVersion"], "SPDX-2.3") + self.assertEqual(spdx["dataLicense"], "CC0-1.0") + self.assertEqual(spdx["SPDXID"], "SPDXRef-DOCUMENT") + self.assertIn("documentNamespace", spdx) + self.assertIn("packages", spdx) + self.assertIn("relationships", spdx) + + def test_generate_spdx_document_info(self): + """Test that SPDX document has correct metadata.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + self.assertEqual(spdx["name"], "SBOM for test-component") + creation_info = spdx["creationInfo"] + self.assertEqual(creation_info["created"], self.timestamp) + creators = creation_info["creators"] + self.assertIn("Organization: Eclipse Foundation", creators) + self.assertIn("Tool: score-sbom-generator", creators) + + def test_generate_spdx_components(self): + """Test that components are properly added to SPDX.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + packages = spdx["packages"] + # root package + 2 components + self.assertEqual(len(packages), 3) + + def test_generate_spdx_relationships(self): + """Test that dependency relationships are created.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + relationships = spdx["relationships"] + # DESCRIBES + 2 DEPENDS_ON + describes = [r for r in relationships if r["relationshipType"] == "DESCRIBES"] + depends_on = [r for r in relationships if r["relationshipType"] == "DEPENDS_ON"] + + self.assertEqual(len(describes), 1) + self.assertEqual(len(depends_on), 2) + + def test_generate_spdx_with_empty_components(self): + """Test generating SPDX with no components.""" + spdx = generate_spdx([], self.config, self.timestamp) + + packages = spdx["packages"] + # Only root package + self.assertEqual(len(packages), 1) + + def test_generate_spdx_component_purl(self): + """Test that component PURLs are properly set.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + packages = spdx["packages"] + tokio_pkg = next((p for p in packages if p["name"] == "tokio"), None) + + self.assertIsNotNone(tokio_pkg) + ext_refs = tokio_pkg.get("externalRefs", []) + purl_ref = next( + (r for r in ext_refs if r.get("referenceType") == "purl"), + None, + ) + self.assertIsNotNone(purl_ref) + self.assertEqual(purl_ref["referenceLocator"], "pkg:cargo/tokio@1.10.0") + + + def test_generate_spdx_component_checksum(self): + """Test that SHA-256 checksums are emitted when available.""" + components_with_hash = [ + { + "name": "serde", + "version": "1.0.0", + "purl": "pkg:cargo/serde@1.0.0", + "type": "library", + "license": "MIT OR Apache-2.0", + "checksum": "abc123def456abc123def456abc123def456abc123def456abc123def456abcd", + } + ] + spdx = generate_spdx(components_with_hash, self.config, self.timestamp) + + packages = spdx["packages"] + serde_pkg = next((p for p in packages if p["name"] == "serde"), None) + self.assertIsNotNone(serde_pkg) + self.assertIn("checksums", serde_pkg) + self.assertEqual(len(serde_pkg["checksums"]), 1) + self.assertEqual(serde_pkg["checksums"][0]["algorithm"], "SHA256") + self.assertEqual( + serde_pkg["checksums"][0]["checksumValue"], + "abc123def456abc123def456abc123def456abc123def456abc123def456abcd", + ) + + def test_generate_spdx_no_checksum_when_absent(self): + """Test that checksums field is absent when no checksum available.""" + spdx = generate_spdx(self.components, self.config, self.timestamp) + + packages = spdx["packages"] + tokio_pkg = next((p for p in packages if p["name"] == "tokio"), None) + self.assertIsNotNone(tokio_pkg) + self.assertNotIn("checksums", tokio_pkg) + + +class TestNormalizeSpdxLicense(unittest.TestCase): + """Tests for SPDX boolean operator normalization.""" + + def test_lowercase_or_uppercased(self): + self.assertEqual(_normalize_spdx_license("Apache-2.0 or MIT"), "Apache-2.0 OR MIT") + + def test_lowercase_and_uppercased(self): + self.assertEqual(_normalize_spdx_license("MIT and Apache-2.0"), "MIT AND Apache-2.0") + + def test_lowercase_with_uppercased(self): + self.assertEqual(_normalize_spdx_license("GPL-2.0 with Classpath-exception-2.0"), "GPL-2.0 WITH Classpath-exception-2.0") + + def test_already_uppercase_unchanged(self): + self.assertEqual(_normalize_spdx_license("Apache-2.0 OR MIT"), "Apache-2.0 OR MIT") + + def test_gpl_or_later_identifier_not_mangled(self): + """GPL-2.0-or-later has '-or-' (hyphen-delimited) — must not be uppercased.""" + self.assertEqual(_normalize_spdx_license("GPL-2.0-or-later"), "GPL-2.0-or-later") + + def test_mixed_compound_expression(self): + self.assertEqual( + _normalize_spdx_license("(Apache-2.0 or MIT) and Unicode-DFS-2016"), + "(Apache-2.0 OR MIT) AND Unicode-DFS-2016", + ) + + def test_empty_string(self): + self.assertEqual(_normalize_spdx_license(""), "") + + def test_single_license_unchanged(self): + self.assertEqual(_normalize_spdx_license("MIT"), "MIT") + + def test_lowercase_operator_in_spdx_output_end_to_end(self): + """Verify that lowercase 'or' from dash-license-scan is normalized in SPDX output.""" + config = { + "component_name": "test", + "component_version": "1.0", + "producer_name": "Test", + "namespace": "https://example.com", + } + timestamp = "2024-01-01T00:00:00+00:00" + components = [{"name": "serde", "version": "1.0.228", "license": "Apache-2.0 or MIT"}] + spdx = generate_spdx(components, config, timestamp) + serde_pkg = next(p for p in spdx["packages"] if p["name"] == "serde") + self.assertEqual(serde_pkg["licenseConcluded"], "Apache-2.0 OR MIT") + self.assertEqual(serde_pkg["licenseDeclared"], "Apache-2.0 OR MIT") + + +if __name__ == "__main__": + unittest.main() diff --git a/sbom/tests/test_spdx_to_github_snapshot.py b/sbom/tests/test_spdx_to_github_snapshot.py new file mode 100644 index 0000000..de93f6a --- /dev/null +++ b/sbom/tests/test_spdx_to_github_snapshot.py @@ -0,0 +1,189 @@ +"""Tests for SPDX 2.3 → GitHub Dependency Submission snapshot conversion.""" + +import unittest + +from sbom.scripts.spdx_to_github_snapshot import convert_spdx_to_snapshot + + +def _make_spdx( + packages: list[dict], + relationships: list[dict] | None = None, + doc_name: str = "test-sbom", +) -> dict: + return { + "spdxVersion": "SPDX-2.3", + "name": doc_name, + "SPDXID": "SPDXRef-DOCUMENT", + "packages": packages, + "relationships": relationships or [], + } + + +def _cargo_pkg( + spdx_id: str, name: str, version: str, purl: str | None = None +) -> dict: + pkg: dict = { + "SPDXID": spdx_id, + "name": name, + "versionInfo": version, + "downloadLocation": "https://crates.io", + } + if purl: + pkg["externalRefs"] = [ + {"referenceCategory": "PACKAGE-MANAGER", "referenceType": "purl", "referenceLocator": purl} + ] + return pkg + + +class TestConvertSpdxToSnapshot(unittest.TestCase): + + def _base_snapshot(self, spdx: dict, **kwargs) -> dict: + return convert_spdx_to_snapshot( + spdx=spdx, + sha="abc123" * 6 + "ab", # 38 chars, close enough for test + ref="refs/heads/main", + job_correlator="test-workflow_sbom", + job_id="42", + **kwargs, + ) + + def test_snapshot_top_level_fields(self): + spdx = _make_spdx(packages=[]) + snapshot = self._base_snapshot(spdx) + self.assertEqual(snapshot["version"], 0) + self.assertIn("sha", snapshot) + self.assertIn("ref", snapshot) + self.assertIn("job", snapshot) + self.assertIn("detector", snapshot) + self.assertIn("scanned", snapshot) + self.assertIn("manifests", snapshot) + + def test_detector_fields(self): + spdx = _make_spdx(packages=[]) + snapshot = self._base_snapshot(spdx) + detector = snapshot["detector"] + self.assertEqual(detector["name"], "score-sbom-generator") + self.assertIn("version", detector) + self.assertIn("url", detector) + + def test_job_correlator(self): + spdx = _make_spdx(packages=[]) + snapshot = self._base_snapshot(spdx) + self.assertEqual(snapshot["job"]["correlator"], "test-workflow_sbom") + self.assertEqual(snapshot["job"]["id"], "42") + + def test_packages_without_purl_are_excluded(self): + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + no_purl_pkg = _cargo_pkg("SPDXRef-nopurl", "internal-tool", "0.1.0") + spdx = _make_spdx( + packages=[root_pkg, no_purl_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + # no_purl_pkg has no PURL → excluded + self.assertFalse(any("internal-tool" in k for k in resolved)) + + def test_root_package_excluded_from_resolved(self): + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + dep_pkg = _cargo_pkg("SPDXRef-serde", "serde", "1.0.228", purl="pkg:cargo/serde@1.0.228") + spdx = _make_spdx( + packages=[root_pkg, dep_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-serde"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + # Root package (myapp) should not appear in resolved + self.assertFalse(any("myapp" in k for k in resolved)) + # Dep package should appear + self.assertTrue(any("serde" in k for k in resolved)) + + def test_direct_vs_indirect_relationship(self): + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + direct_pkg = _cargo_pkg("SPDXRef-tokio", "tokio", "1.0.0", purl="pkg:cargo/tokio@1.0.0") + indirect_pkg = _cargo_pkg("SPDXRef-mio", "mio", "0.8.0", purl="pkg:cargo/mio@0.8.0") + spdx = _make_spdx( + packages=[root_pkg, direct_pkg, indirect_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-tokio"}, + {"spdxElementId": "SPDXRef-tokio", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-mio"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + + tokio_entry = next(v for k, v in resolved.items() if "tokio" in k) + mio_entry = next(v for k, v in resolved.items() if "mio" in k) + + self.assertEqual(tokio_entry["relationship"], "direct") + self.assertEqual(mio_entry["relationship"], "indirect") + + def test_package_url_preserved(self): + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + dep_pkg = _cargo_pkg("SPDXRef-serde", "serde", "1.0.228", purl="pkg:cargo/serde@1.0.228") + spdx = _make_spdx( + packages=[root_pkg, dep_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-serde"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + serde_entry = next(v for k, v in resolved.items() if "serde" in k) + self.assertEqual(serde_entry["package_url"], "pkg:cargo/serde@1.0.228") + + def test_manifest_name_from_spdx_document_name(self): + spdx = _make_spdx(packages=[], doc_name="my-sbom-component") + snapshot = self._base_snapshot(spdx) + self.assertIn("my-sbom-component", snapshot["manifests"]) + + def test_empty_spdx_produces_empty_manifest(self): + spdx = _make_spdx(packages=[]) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + self.assertEqual(manifest["resolved"], {}) + + def test_sha_and_ref_set_correctly(self): + spdx = _make_spdx(packages=[]) + snapshot = convert_spdx_to_snapshot( + spdx=spdx, + sha="deadbeef" * 5, + ref="refs/tags/v1.0.0", + job_correlator="ci_sbom", + job_id="99", + ) + self.assertEqual(snapshot["sha"], "deadbeef" * 5) + self.assertEqual(snapshot["ref"], "refs/tags/v1.0.0") + + def test_generic_purl_included(self): + """pkg:generic/ PURLs (BCR modules) are accepted by GitHub Dependency Graph.""" + root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + boost_pkg = _cargo_pkg("SPDXRef-boost", "boost.filesystem", "1.83.0", purl="pkg:generic/boost.filesystem@1.83.0") + spdx = _make_spdx( + packages=[root_pkg, boost_pkg], + relationships=[ + {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-boost"}, + ], + ) + snapshot = self._base_snapshot(spdx) + manifest = next(iter(snapshot["manifests"].values())) + resolved = manifest["resolved"] + boost_entry = next((v for k, v in resolved.items() if "boost" in k), None) + self.assertIsNotNone(boost_entry) + self.assertEqual(boost_entry["package_url"], "pkg:generic/boost.filesystem@1.83.0") + + +if __name__ == "__main__": + unittest.main() From 50aa73298ecb53b8dcf56de5bed1c252acce08e8 Mon Sep 17 00:00:00 2001 From: Lukasz Juranek Date: Mon, 2 Mar 2026 09:51:39 +0100 Subject: [PATCH 2/4] Fix formatting: apply ruff, buildifier, and yamlfmt fixes Co-Authored-By: Claude Sonnet 4.6 --- .../workflows/sbom_dependency_submission.yml | 11 +- sbom/internal/generator/BUILD | 1 - sbom/internal/generator/sbom_generator.py | 10 +- .../scripts/generate_crates_metadata_cache.py | 18 +-- sbom/scripts/spdx_to_github_snapshot.py | 12 +- sbom/tests/test_bcr_known_licenses.py | 72 +++++++--- sbom/tests/test_cpp_enrich_checksum.py | 4 +- sbom/tests/test_cyclonedx_formatter.py | 32 ++++- .../test_generate_crates_metadata_cache.py | 22 +-- sbom/tests/test_spdx_formatter.py | 31 ++-- sbom/tests/test_spdx_to_github_snapshot.py | 133 ++++++++++++++---- 11 files changed, 231 insertions(+), 115 deletions(-) diff --git a/.github/workflows/sbom_dependency_submission.yml b/.github/workflows/sbom_dependency_submission.yml index 0330998..277aa07 100644 --- a/.github/workflows/sbom_dependency_submission.yml +++ b/.github/workflows/sbom_dependency_submission.yml @@ -22,42 +22,35 @@ on: required: false type: string default: 'dev' - jobs: sbom-dependency-submission: name: Build SBOM and submit to Dependency Graph runs-on: ubuntu-24.04 permissions: - contents: write # Required for dependency-graph/snapshots API - + contents: write # Required for dependency-graph/snapshots API steps: - name: Checkout uses: actions/checkout@v4.2.2 - - name: Setup Bazel uses: bazel-contrib/setup-bazel@0.15.0 with: disk-cache: true repository-cache: true bazelisk-cache: true - - name: Build SBOM run: | bazel build ${{ inputs.sbom_target }} \ --define=component_version=${{ inputs.release_tag }} - - name: Collect SPDX outputs run: | mkdir -p sbom_output find bazel-bin -name "*.spdx.json" -exec cp {} sbom_output/ \; echo "SBOM files collected:" ls -lh sbom_output/ || echo "(none)" - - name: Set up Python uses: actions/setup-python@v5 with: python-version: '3.11' - - name: Convert SPDX → GitHub Dependency Snapshot run: | mkdir -p snapshots @@ -74,7 +67,6 @@ jobs: --job-correlator "$correlator" \ --job-id "${{ github.run_id }}" done - - name: Submit snapshots to GitHub Dependency Graph env: GH_TOKEN: ${{ github.token }} @@ -101,7 +93,6 @@ jobs: echo "---" echo "Submitted: $submitted, Failed: $failed" [ "$failed" -eq 0 ] || exit 1 - - name: Upload snapshot artifacts if: always() uses: actions/upload-artifact@v4 diff --git a/sbom/internal/generator/BUILD b/sbom/internal/generator/BUILD index d3b96fc..c5947c9 100644 --- a/sbom/internal/generator/BUILD +++ b/sbom/internal/generator/BUILD @@ -35,4 +35,3 @@ py_library( name = "cyclonedx_formatter", srcs = ["cyclonedx_formatter.py"], ) - diff --git a/sbom/internal/generator/sbom_generator.py b/sbom/internal/generator/sbom_generator.py index 8ae3da6..58c1728 100644 --- a/sbom/internal/generator/sbom_generator.py +++ b/sbom/internal/generator/sbom_generator.py @@ -151,8 +151,6 @@ def load_crates_cache(cache_path: str | None = None) -> dict[str, Any]: return {} - - # Known licenses for Bazel Central Registry (BCR) C++ modules. # Used as a fallback when cdxgen and lockfile parsing cannot provide license data. # Keys are BCR module names (exact or prefix for sub-modules like boost.*). @@ -373,7 +371,9 @@ def load_cdxgen_sbom(cdxgen_path: str) -> list[dict[str, Any]]: return components -def mark_missing_cpp_descriptions(components: list[dict[str, Any]]) -> list[dict[str, Any]]: +def mark_missing_cpp_descriptions( + components: list[dict[str, Any]], +) -> list[dict[str, Any]]: """Mark missing descriptions for non-Rust libraries as 'Missing'.""" for comp in components: if comp.get("description"): @@ -495,9 +495,7 @@ def main() -> int: # Filter out the main component from the dependency list to avoid self-dependency # (e.g., sbom for score_kyron should not list score_kyron as its own dependency) if component_name: - components = [ - c for c in components if c.get("name") != component_name - ] + components = [c for c in components if c.get("name") != component_name] # Generate outputs if args.spdx_output: diff --git a/sbom/scripts/generate_crates_metadata_cache.py b/sbom/scripts/generate_crates_metadata_cache.py index 20eb138..0c2a16f 100755 --- a/sbom/scripts/generate_crates_metadata_cache.py +++ b/sbom/scripts/generate_crates_metadata_cache.py @@ -166,9 +166,7 @@ def generate_synthetic_cargo_lock( lines.append("[[package]]") lines.append(f'name = "{info["name"]}"') lines.append(f'version = "{info["version"]}"') - lines.append( - 'source = "registry+https://github.com/rust-lang/crates.io-index"' - ) + lines.append('source = "registry+https://github.com/rust-lang/crates.io-index"') lines.append("") with open(output_path, "w", encoding="utf-8") as f: @@ -190,9 +188,7 @@ def _find_uvx() -> str: return "uvx" # fall back, will raise FileNotFoundError in subprocess -def run_dash_license_scan( - cargo_lock_path: str, summary_output_path: str -) -> None: +def run_dash_license_scan(cargo_lock_path: str, summary_output_path: str) -> None: """Invoke dash-license-scan via uvx and write summary to file. Args: @@ -238,9 +234,7 @@ def run_dash_license_scan( ) sys.exit(1) except subprocess.TimeoutExpired: - print( - "ERROR: dash-license-scan timed out after 600 seconds", file=sys.stderr - ) + print("ERROR: dash-license-scan timed out after 600 seconds", file=sys.stderr) sys.exit(1) # dash-license-scan exits with returncode = number of restricted items. @@ -267,9 +261,7 @@ def run_dash_license_scan( sys.exit(1) if result.returncode > 0: - print( - f" NOTE: {result.returncode} crate(s) have 'restricted' license status" - ) + print(f" NOTE: {result.returncode} crate(s) have 'restricted' license status") def parse_dash_summary(summary_path: str) -> Dict[str, str]: @@ -413,7 +405,7 @@ def generate_cache( print(f"Found {len(crates)} crates from Cargo.lock") # Merge crates from MODULE.bazel.lock files - for module_lock_path in (module_lock_paths or []): + for module_lock_path in module_lock_paths or []: print(f"Parsing {module_lock_path}...") module_crates = parse_module_bazel_lock(module_lock_path) added = 0 diff --git a/sbom/scripts/spdx_to_github_snapshot.py b/sbom/scripts/spdx_to_github_snapshot.py index c62e13c..f791471 100644 --- a/sbom/scripts/spdx_to_github_snapshot.py +++ b/sbom/scripts/spdx_to_github_snapshot.py @@ -168,12 +168,8 @@ def main() -> int: ) parser.add_argument("--input", required=True, help="Path to SPDX 2.3 JSON file") parser.add_argument("--output", required=True, help="Output snapshot JSON path") - parser.add_argument( - "--sha", required=True, help="Git commit SHA (40 hex chars)" - ) - parser.add_argument( - "--ref", required=True, help="Git ref (e.g. refs/heads/main)" - ) + parser.add_argument("--sha", required=True, help="Git commit SHA (40 hex chars)") + parser.add_argument("--ref", required=True, help="Git ref (e.g. refs/heads/main)") parser.add_argument( "--job-correlator", default="score-sbom_sbom", @@ -215,9 +211,7 @@ def main() -> int: with output_path.open("w") as f: json.dump(snapshot, f, indent=2) - total_packages = sum( - len(m["resolved"]) for m in snapshot["manifests"].values() - ) + total_packages = sum(len(m["resolved"]) for m in snapshot["manifests"].values()) print( f"Converted {len(spdx.get('packages', []))} SPDX packages → " f"{total_packages} Dependency Submission packages" diff --git a/sbom/tests/test_bcr_known_licenses.py b/sbom/tests/test_bcr_known_licenses.py index 16aafee..8adbc15 100644 --- a/sbom/tests/test_bcr_known_licenses.py +++ b/sbom/tests/test_bcr_known_licenses.py @@ -38,7 +38,10 @@ def test_boost_submodule_gets_license(self): """boost.config should inherit BSL-1.0 from the 'boost' BCR entry.""" metadata = { "modules": { - "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + }, }, "licenses": {}, } @@ -50,13 +53,17 @@ def test_boost_submodule_gets_license(self): def test_multiple_boost_submodules(self): """All boost.* sub-modules should receive BSL-1.0.""" names = [ - "boost.config", "boost.assert", "boost.mp11", "boost.container", - "boost.interprocess", "boost.core", "boost.predef", + "boost.config", + "boost.assert", + "boost.mp11", + "boost.container", + "boost.interprocess", + "boost.core", + "boost.predef", ] metadata = { "modules": { - n: {"version": "1.87.0", "purl": f"pkg:bazel/{n}@1.87.0"} - for n in names + n: {"version": "1.87.0", "purl": f"pkg:bazel/{n}@1.87.0"} for n in names }, "licenses": {}, } @@ -64,7 +71,8 @@ def test_multiple_boost_submodules(self): for n in names: self.assertEqual( - metadata["modules"][n]["license"], "BSL-1.0", + metadata["modules"][n]["license"], + "BSL-1.0", f"{n} should have BSL-1.0 license", ) @@ -72,7 +80,10 @@ def test_exact_bcr_match(self): """A module matching a BCR key exactly gets the license.""" metadata = { "modules": { - "abseil-cpp": {"version": "20230802.0", "purl": "pkg:bazel/abseil-cpp@20230802.0"}, + "abseil-cpp": { + "version": "20230802.0", + "purl": "pkg:bazel/abseil-cpp@20230802.0", + }, }, "licenses": {}, } @@ -84,7 +95,10 @@ def test_unknown_module_unchanged(self): """Modules not in BCR_KNOWN_LICENSES remain without a license.""" metadata = { "modules": { - "some_unknown_lib": {"version": "1.0.0", "purl": "pkg:bazel/some_unknown_lib@1.0.0"}, + "some_unknown_lib": { + "version": "1.0.0", + "purl": "pkg:bazel/some_unknown_lib@1.0.0", + }, }, "licenses": {}, } @@ -98,7 +112,10 @@ def test_explicit_license_override(self): """User-declared license in metadata['licenses'] takes priority.""" metadata = { "modules": { - "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + }, }, "licenses": { "boost.config": {"license": "MIT", "supplier": "Custom"}, @@ -113,8 +130,14 @@ def test_parent_license_override(self): """Parent-level license declaration covers all sub-modules.""" metadata = { "modules": { - "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, - "boost.container": {"version": "1.87.0", "purl": "pkg:bazel/boost.container@1.87.0"}, + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + }, + "boost.container": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.container@1.87.0", + }, }, "licenses": { "boost": {"license": "BSL-1.0-custom", "supplier": "My Boost Fork"}, @@ -122,14 +145,21 @@ def test_parent_license_override(self): } apply_known_licenses(metadata) - self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0-custom") - self.assertEqual(metadata["modules"]["boost.container"]["license"], "BSL-1.0-custom") + self.assertEqual( + metadata["modules"]["boost.config"]["license"], "BSL-1.0-custom" + ) + self.assertEqual( + metadata["modules"]["boost.container"]["license"], "BSL-1.0-custom" + ) def test_explicit_beats_parent(self): """Exact-name license takes priority over parent-level declaration.""" metadata = { "modules": { - "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + }, }, "licenses": { "boost": {"license": "BSL-1.0", "supplier": "Boost.org"}, @@ -144,7 +174,10 @@ def test_explicit_beats_bcr_known(self): """User-declared license overrides the BCR known-license database.""" metadata = { "modules": { - "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + }, }, "licenses": { "boost": {"license": "Apache-2.0", "supplier": "Custom Boost"}, @@ -190,7 +223,9 @@ def test_supplier_not_overwritten_when_present(self): apply_known_licenses(metadata) self.assertEqual(metadata["modules"]["boost.config"]["license"], "BSL-1.0") - self.assertEqual(metadata["modules"]["boost.config"]["supplier"], "My Custom Supplier") + self.assertEqual( + metadata["modules"]["boost.config"]["supplier"], "My Custom Supplier" + ) # -- Edge cases ----------------------------------------------------------- @@ -203,7 +238,10 @@ def test_no_licenses_key(self): """Missing 'licenses' key does not raise.""" metadata = { "modules": { - "boost.config": {"version": "1.87.0", "purl": "pkg:bazel/boost.config@1.87.0"}, + "boost.config": { + "version": "1.87.0", + "purl": "pkg:bazel/boost.config@1.87.0", + }, }, } apply_known_licenses(metadata) diff --git a/sbom/tests/test_cpp_enrich_checksum.py b/sbom/tests/test_cpp_enrich_checksum.py index 258eba3..d9e7a59 100644 --- a/sbom/tests/test_cpp_enrich_checksum.py +++ b/sbom/tests/test_cpp_enrich_checksum.py @@ -42,7 +42,9 @@ def test_checksum_not_overwritten_when_already_present(self): """An existing checksum on a component is preserved — cache is skipped.""" existing = "aaaa" * 16 cache_sha = "bbbb" * 16 - components = [{"name": "flatbuffers", "version": "25.2.10", "checksum": existing}] + components = [ + {"name": "flatbuffers", "version": "25.2.10", "checksum": existing} + ] cpp_cache = [{"name": "flatbuffers", "checksum": cache_sha}] result = self._run(components, cpp_cache) diff --git a/sbom/tests/test_cyclonedx_formatter.py b/sbom/tests/test_cyclonedx_formatter.py index 46de425..0a20345 100644 --- a/sbom/tests/test_cyclonedx_formatter.py +++ b/sbom/tests/test_cyclonedx_formatter.py @@ -3,7 +3,10 @@ import unittest from datetime import datetime, timezone -from sbom.internal.generator.cyclonedx_formatter import generate_cyclonedx, _normalize_spdx_license +from sbom.internal.generator.cyclonedx_formatter import ( + generate_cyclonedx, + _normalize_spdx_license, +) class TestCycloneDXFormatter(unittest.TestCase): @@ -139,7 +142,9 @@ def test_generate_cyclonedx_cratesio_external_ref_from_source_field(self): serde = next(c for c in cdx["components"] if c["name"] == "serde") ext_refs = serde.get("externalReferences", []) dist_ref = next((r for r in ext_refs if r["type"] == "distribution"), None) - self.assertIsNotNone(dist_ref, "Expected distribution externalReference for crates.io crate") + self.assertIsNotNone( + dist_ref, "Expected distribution externalReference for crates.io crate" + ) self.assertIn("crates.io/crates/serde/1.0.228", dist_ref["url"]) def test_generate_cyclonedx_schema_url_uses_https(self): @@ -169,10 +174,14 @@ class TestNormalizeSpdxLicenseCdx(unittest.TestCase): """Verify lowercase operator normalization for CycloneDX formatter.""" def test_lowercase_or_normalized(self): - self.assertEqual(_normalize_spdx_license("Apache-2.0 or MIT"), "Apache-2.0 OR MIT") + self.assertEqual( + _normalize_spdx_license("Apache-2.0 or MIT"), "Apache-2.0 OR MIT" + ) def test_gpl_or_later_not_mangled(self): - self.assertEqual(_normalize_spdx_license("GPL-2.0-or-later"), "GPL-2.0-or-later") + self.assertEqual( + _normalize_spdx_license("GPL-2.0-or-later"), "GPL-2.0-or-later" + ) def test_lowercase_or_routes_to_expression_field(self): """'Apache-2.0 or MIT' from dash-license-scan must use expression field, not license.id.""" @@ -183,14 +192,23 @@ def test_lowercase_or_routes_to_expression_field(self): "namespace": "https://example.com", } timestamp = "2024-01-01T00:00:00+00:00" - components = [{"name": "serde", "version": "1.0.228", "purl": "pkg:cargo/serde@1.0.228", - "type": "library", "license": "Apache-2.0 or MIT"}] + components = [ + { + "name": "serde", + "version": "1.0.228", + "purl": "pkg:cargo/serde@1.0.228", + "type": "library", + "license": "Apache-2.0 or MIT", + } + ] cdx = generate_cyclonedx(components, config, timestamp) serde = next(c for c in cdx["components"] if c["name"] == "serde") licenses = serde.get("licenses", []) self.assertEqual(len(licenses), 1) # Must use 'expression' field with uppercase OR, not 'license.id' - self.assertIn("expression", licenses[0], "compound license must use 'expression' field") + self.assertIn( + "expression", licenses[0], "compound license must use 'expression' field" + ) self.assertEqual(licenses[0]["expression"], "Apache-2.0 OR MIT") self.assertNotIn("license", licenses[0]) diff --git a/sbom/tests/test_generate_crates_metadata_cache.py b/sbom/tests/test_generate_crates_metadata_cache.py index 30bdc3e..9c70f66 100644 --- a/sbom/tests/test_generate_crates_metadata_cache.py +++ b/sbom/tests/test_generate_crates_metadata_cache.py @@ -109,9 +109,7 @@ def test_empty_file(self): def test_restricted_crate_still_gets_license(self): """Restricted crates still have their license extracted.""" - summary = ( - "crate/cratesio/-/openssl-sys/0.9.104, OpenSSL, restricted, clearlydefined\n" - ) + summary = "crate/cratesio/-/openssl-sys/0.9.104, OpenSSL, restricted, clearlydefined\n" path = self._write_summary(summary) result = parse_dash_summary(path) @@ -119,9 +117,7 @@ def test_restricted_crate_still_gets_license(self): def test_licenseref_expression(self): """LicenseRef-* expressions are preserved.""" - summary = ( - "crate/cratesio/-/ring/0.17.14, LicenseRef-ring, restricted, clearlydefined\n" - ) + summary = "crate/cratesio/-/ring/0.17.14, LicenseRef-ring, restricted, clearlydefined\n" path = self._write_summary(summary) result = parse_dash_summary(path) @@ -170,9 +166,7 @@ def test_hyphenated_crate_name(self): } coords = build_dash_coordinates(crates) - self.assertEqual( - coords[0], "crate/cratesio/-/iceoryx2-bb-lock-free/0.7.0" - ) + self.assertEqual(coords[0], "crate/cratesio/-/iceoryx2-bb-lock-free/0.7.0") class TestParseModuleBazelLock(unittest.TestCase): @@ -258,11 +252,7 @@ def test_complex_crate_name(self): def test_no_crate_extension(self): """Lockfile without crate extension returns empty dict.""" - lockfile = { - "moduleExtensions": { - "some_other_extension": {"general": {}} - } - } + lockfile = {"moduleExtensions": {"some_other_extension": {"general": {}}}} path = self._write_lockfile(lockfile) result = parse_module_bazel_lock(path) @@ -389,9 +379,7 @@ def test_kyron_style_crates(self): # All crates should have licenses for name in crates: self.assertIn(name, license_map, f"Missing license for {name}") - self.assertTrue( - license_map[name], f"Empty license for {name}" - ) + self.assertTrue(license_map[name], f"Empty license for {name}") if __name__ == "__main__": diff --git a/sbom/tests/test_spdx_formatter.py b/sbom/tests/test_spdx_formatter.py index 7074e97..d0c0350 100644 --- a/sbom/tests/test_spdx_formatter.py +++ b/sbom/tests/test_spdx_formatter.py @@ -3,7 +3,10 @@ import unittest from datetime import datetime, timezone -from sbom.internal.generator.spdx_formatter import generate_spdx, _normalize_spdx_license +from sbom.internal.generator.spdx_formatter import ( + generate_spdx, + _normalize_spdx_license, +) class TestSpdxFormatter(unittest.TestCase): @@ -104,7 +107,6 @@ def test_generate_spdx_component_purl(self): self.assertIsNotNone(purl_ref) self.assertEqual(purl_ref["referenceLocator"], "pkg:cargo/tokio@1.10.0") - def test_generate_spdx_component_checksum(self): """Test that SHA-256 checksums are emitted when available.""" components_with_hash = [ @@ -144,20 +146,31 @@ class TestNormalizeSpdxLicense(unittest.TestCase): """Tests for SPDX boolean operator normalization.""" def test_lowercase_or_uppercased(self): - self.assertEqual(_normalize_spdx_license("Apache-2.0 or MIT"), "Apache-2.0 OR MIT") + self.assertEqual( + _normalize_spdx_license("Apache-2.0 or MIT"), "Apache-2.0 OR MIT" + ) def test_lowercase_and_uppercased(self): - self.assertEqual(_normalize_spdx_license("MIT and Apache-2.0"), "MIT AND Apache-2.0") + self.assertEqual( + _normalize_spdx_license("MIT and Apache-2.0"), "MIT AND Apache-2.0" + ) def test_lowercase_with_uppercased(self): - self.assertEqual(_normalize_spdx_license("GPL-2.0 with Classpath-exception-2.0"), "GPL-2.0 WITH Classpath-exception-2.0") + self.assertEqual( + _normalize_spdx_license("GPL-2.0 with Classpath-exception-2.0"), + "GPL-2.0 WITH Classpath-exception-2.0", + ) def test_already_uppercase_unchanged(self): - self.assertEqual(_normalize_spdx_license("Apache-2.0 OR MIT"), "Apache-2.0 OR MIT") + self.assertEqual( + _normalize_spdx_license("Apache-2.0 OR MIT"), "Apache-2.0 OR MIT" + ) def test_gpl_or_later_identifier_not_mangled(self): """GPL-2.0-or-later has '-or-' (hyphen-delimited) — must not be uppercased.""" - self.assertEqual(_normalize_spdx_license("GPL-2.0-or-later"), "GPL-2.0-or-later") + self.assertEqual( + _normalize_spdx_license("GPL-2.0-or-later"), "GPL-2.0-or-later" + ) def test_mixed_compound_expression(self): self.assertEqual( @@ -180,7 +193,9 @@ def test_lowercase_operator_in_spdx_output_end_to_end(self): "namespace": "https://example.com", } timestamp = "2024-01-01T00:00:00+00:00" - components = [{"name": "serde", "version": "1.0.228", "license": "Apache-2.0 or MIT"}] + components = [ + {"name": "serde", "version": "1.0.228", "license": "Apache-2.0 or MIT"} + ] spdx = generate_spdx(components, config, timestamp) serde_pkg = next(p for p in spdx["packages"] if p["name"] == "serde") self.assertEqual(serde_pkg["licenseConcluded"], "Apache-2.0 OR MIT") diff --git a/sbom/tests/test_spdx_to_github_snapshot.py b/sbom/tests/test_spdx_to_github_snapshot.py index de93f6a..5fe77a9 100644 --- a/sbom/tests/test_spdx_to_github_snapshot.py +++ b/sbom/tests/test_spdx_to_github_snapshot.py @@ -19,9 +19,7 @@ def _make_spdx( } -def _cargo_pkg( - spdx_id: str, name: str, version: str, purl: str | None = None -) -> dict: +def _cargo_pkg(spdx_id: str, name: str, version: str, purl: str | None = None) -> dict: pkg: dict = { "SPDXID": spdx_id, "name": name, @@ -30,13 +28,16 @@ def _cargo_pkg( } if purl: pkg["externalRefs"] = [ - {"referenceCategory": "PACKAGE-MANAGER", "referenceType": "purl", "referenceLocator": purl} + { + "referenceCategory": "PACKAGE-MANAGER", + "referenceType": "purl", + "referenceLocator": purl, + } ] return pkg class TestConvertSpdxToSnapshot(unittest.TestCase): - def _base_snapshot(self, spdx: dict, **kwargs) -> dict: return convert_spdx_to_snapshot( spdx=spdx, @@ -73,12 +74,21 @@ def test_job_correlator(self): self.assertEqual(snapshot["job"]["id"], "42") def test_packages_without_purl_are_excluded(self): - root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") + root_pkg = _cargo_pkg( + "SPDXRef-root", + "myapp", + "1.0.0", + purl="pkg:github/eclipse-score/myapp@1.0.0", + ) no_purl_pkg = _cargo_pkg("SPDXRef-nopurl", "internal-tool", "0.1.0") spdx = _make_spdx( packages=[root_pkg, no_purl_pkg], relationships=[ - {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, + { + "spdxElementId": "SPDXRef-DOCUMENT", + "relationshipType": "DESCRIBES", + "relatedSpdxElement": "SPDXRef-root", + }, ], ) snapshot = self._base_snapshot(spdx) @@ -88,13 +98,28 @@ def test_packages_without_purl_are_excluded(self): self.assertFalse(any("internal-tool" in k for k in resolved)) def test_root_package_excluded_from_resolved(self): - root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") - dep_pkg = _cargo_pkg("SPDXRef-serde", "serde", "1.0.228", purl="pkg:cargo/serde@1.0.228") + root_pkg = _cargo_pkg( + "SPDXRef-root", + "myapp", + "1.0.0", + purl="pkg:github/eclipse-score/myapp@1.0.0", + ) + dep_pkg = _cargo_pkg( + "SPDXRef-serde", "serde", "1.0.228", purl="pkg:cargo/serde@1.0.228" + ) spdx = _make_spdx( packages=[root_pkg, dep_pkg], relationships=[ - {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, - {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-serde"}, + { + "spdxElementId": "SPDXRef-DOCUMENT", + "relationshipType": "DESCRIBES", + "relatedSpdxElement": "SPDXRef-root", + }, + { + "spdxElementId": "SPDXRef-root", + "relationshipType": "DEPENDS_ON", + "relatedSpdxElement": "SPDXRef-serde", + }, ], ) snapshot = self._base_snapshot(spdx) @@ -106,15 +131,36 @@ def test_root_package_excluded_from_resolved(self): self.assertTrue(any("serde" in k for k in resolved)) def test_direct_vs_indirect_relationship(self): - root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") - direct_pkg = _cargo_pkg("SPDXRef-tokio", "tokio", "1.0.0", purl="pkg:cargo/tokio@1.0.0") - indirect_pkg = _cargo_pkg("SPDXRef-mio", "mio", "0.8.0", purl="pkg:cargo/mio@0.8.0") + root_pkg = _cargo_pkg( + "SPDXRef-root", + "myapp", + "1.0.0", + purl="pkg:github/eclipse-score/myapp@1.0.0", + ) + direct_pkg = _cargo_pkg( + "SPDXRef-tokio", "tokio", "1.0.0", purl="pkg:cargo/tokio@1.0.0" + ) + indirect_pkg = _cargo_pkg( + "SPDXRef-mio", "mio", "0.8.0", purl="pkg:cargo/mio@0.8.0" + ) spdx = _make_spdx( packages=[root_pkg, direct_pkg, indirect_pkg], relationships=[ - {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, - {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-tokio"}, - {"spdxElementId": "SPDXRef-tokio", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-mio"}, + { + "spdxElementId": "SPDXRef-DOCUMENT", + "relationshipType": "DESCRIBES", + "relatedSpdxElement": "SPDXRef-root", + }, + { + "spdxElementId": "SPDXRef-root", + "relationshipType": "DEPENDS_ON", + "relatedSpdxElement": "SPDXRef-tokio", + }, + { + "spdxElementId": "SPDXRef-tokio", + "relationshipType": "DEPENDS_ON", + "relatedSpdxElement": "SPDXRef-mio", + }, ], ) snapshot = self._base_snapshot(spdx) @@ -128,13 +174,28 @@ def test_direct_vs_indirect_relationship(self): self.assertEqual(mio_entry["relationship"], "indirect") def test_package_url_preserved(self): - root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") - dep_pkg = _cargo_pkg("SPDXRef-serde", "serde", "1.0.228", purl="pkg:cargo/serde@1.0.228") + root_pkg = _cargo_pkg( + "SPDXRef-root", + "myapp", + "1.0.0", + purl="pkg:github/eclipse-score/myapp@1.0.0", + ) + dep_pkg = _cargo_pkg( + "SPDXRef-serde", "serde", "1.0.228", purl="pkg:cargo/serde@1.0.228" + ) spdx = _make_spdx( packages=[root_pkg, dep_pkg], relationships=[ - {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, - {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-serde"}, + { + "spdxElementId": "SPDXRef-DOCUMENT", + "relationshipType": "DESCRIBES", + "relatedSpdxElement": "SPDXRef-root", + }, + { + "spdxElementId": "SPDXRef-root", + "relationshipType": "DEPENDS_ON", + "relatedSpdxElement": "SPDXRef-serde", + }, ], ) snapshot = self._base_snapshot(spdx) @@ -168,13 +229,31 @@ def test_sha_and_ref_set_correctly(self): def test_generic_purl_included(self): """pkg:generic/ PURLs (BCR modules) are accepted by GitHub Dependency Graph.""" - root_pkg = _cargo_pkg("SPDXRef-root", "myapp", "1.0.0", purl="pkg:github/eclipse-score/myapp@1.0.0") - boost_pkg = _cargo_pkg("SPDXRef-boost", "boost.filesystem", "1.83.0", purl="pkg:generic/boost.filesystem@1.83.0") + root_pkg = _cargo_pkg( + "SPDXRef-root", + "myapp", + "1.0.0", + purl="pkg:github/eclipse-score/myapp@1.0.0", + ) + boost_pkg = _cargo_pkg( + "SPDXRef-boost", + "boost.filesystem", + "1.83.0", + purl="pkg:generic/boost.filesystem@1.83.0", + ) spdx = _make_spdx( packages=[root_pkg, boost_pkg], relationships=[ - {"spdxElementId": "SPDXRef-DOCUMENT", "relationshipType": "DESCRIBES", "relatedSpdxElement": "SPDXRef-root"}, - {"spdxElementId": "SPDXRef-root", "relationshipType": "DEPENDS_ON", "relatedSpdxElement": "SPDXRef-boost"}, + { + "spdxElementId": "SPDXRef-DOCUMENT", + "relationshipType": "DESCRIBES", + "relatedSpdxElement": "SPDXRef-root", + }, + { + "spdxElementId": "SPDXRef-root", + "relationshipType": "DEPENDS_ON", + "relatedSpdxElement": "SPDXRef-boost", + }, ], ) snapshot = self._base_snapshot(spdx) @@ -182,7 +261,9 @@ def test_generic_purl_included(self): resolved = manifest["resolved"] boost_entry = next((v for k, v in resolved.items() if "boost" in k), None) self.assertIsNotNone(boost_entry) - self.assertEqual(boost_entry["package_url"], "pkg:generic/boost.filesystem@1.83.0") + self.assertEqual( + boost_entry["package_url"], "pkg:generic/boost.filesystem@1.83.0" + ) if __name__ == "__main__": From cf97c27e1117b5327dc3afe09c8ecd8370f21969 Mon Sep 17 00:00:00 2001 From: Lukasz Juranek Date: Mon, 2 Mar 2026 10:41:05 +0100 Subject: [PATCH 3/4] sbom: switch tests to pytest via score_py_pytest Replace py_test with score_py_pytest in sbom/tests/BUILD to align with the pytest-based test framework used across the rest of the tooling module. Remove the unittest.main() entry points that are no longer needed when pytest is the test runner (pytest discovers and runs unittest.TestCase subclasses natively). Co-Authored-By: Claude Sonnet 4.6 --- sbom/tests/BUILD | 23 +++++++++++++------ sbom/tests/test_bcr_known_licenses.py | 4 ---- sbom/tests/test_cpp_enrich_checksum.py | 4 ---- sbom/tests/test_cyclonedx_formatter.py | 4 ---- .../test_generate_crates_metadata_cache.py | 4 ---- sbom/tests/test_spdx_formatter.py | 4 ---- sbom/tests/test_spdx_to_github_snapshot.py | 4 ---- 7 files changed, 16 insertions(+), 31 deletions(-) diff --git a/sbom/tests/BUILD b/sbom/tests/BUILD index 1f90375..233b6e0 100644 --- a/sbom/tests/BUILD +++ b/sbom/tests/BUILD @@ -1,30 +1,39 @@ -# SBOM Tests Package +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation # -# This package contains tests for the SBOM generation system. +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* -load("@rules_python//python:defs.bzl", "py_test") +load("@score_tooling//python_basics:defs.bzl", "score_py_pytest") package(default_visibility = ["//visibility:private"]) -py_test( +score_py_pytest( name = "test_spdx_formatter", srcs = ["test_spdx_formatter.py"], deps = ["//sbom/internal/generator:spdx_formatter"], ) -py_test( +score_py_pytest( name = "test_cyclonedx_formatter", srcs = ["test_cyclonedx_formatter.py"], deps = ["//sbom/internal/generator:cyclonedx_formatter"], ) -py_test( +score_py_pytest( name = "test_bcr_known_licenses", srcs = ["test_bcr_known_licenses.py"], deps = ["//sbom/internal/generator:sbom_generator_lib"], ) -py_test( +score_py_pytest( name = "test_cpp_enrich_checksum", srcs = ["test_cpp_enrich_checksum.py"], data = ["//sbom:cpp_metadata.json"], diff --git a/sbom/tests/test_bcr_known_licenses.py b/sbom/tests/test_bcr_known_licenses.py index 8adbc15..c5baba6 100644 --- a/sbom/tests/test_bcr_known_licenses.py +++ b/sbom/tests/test_bcr_known_licenses.py @@ -282,7 +282,3 @@ def test_module_with_license_from_apply(self): self.assertIsNotNone(comp) self.assertEqual(comp["name"], "boost.config") self.assertEqual(comp["license"], "BSL-1.0") - - -if __name__ == "__main__": - unittest.main() diff --git a/sbom/tests/test_cpp_enrich_checksum.py b/sbom/tests/test_cpp_enrich_checksum.py index d9e7a59..37187ee 100644 --- a/sbom/tests/test_cpp_enrich_checksum.py +++ b/sbom/tests/test_cpp_enrich_checksum.py @@ -152,7 +152,3 @@ def test_no_sbom_fields_in_any_entry(self): f"fields: {manually_present}. All SBOM fields must come from " f"automated sources only.", ) - - -if __name__ == "__main__": - unittest.main() diff --git a/sbom/tests/test_cyclonedx_formatter.py b/sbom/tests/test_cyclonedx_formatter.py index 0a20345..b7eaa80 100644 --- a/sbom/tests/test_cyclonedx_formatter.py +++ b/sbom/tests/test_cyclonedx_formatter.py @@ -211,7 +211,3 @@ def test_lowercase_or_routes_to_expression_field(self): ) self.assertEqual(licenses[0]["expression"], "Apache-2.0 OR MIT") self.assertNotIn("license", licenses[0]) - - -if __name__ == "__main__": - unittest.main() diff --git a/sbom/tests/test_generate_crates_metadata_cache.py b/sbom/tests/test_generate_crates_metadata_cache.py index 9c70f66..c84f3d9 100644 --- a/sbom/tests/test_generate_crates_metadata_cache.py +++ b/sbom/tests/test_generate_crates_metadata_cache.py @@ -380,7 +380,3 @@ def test_kyron_style_crates(self): for name in crates: self.assertIn(name, license_map, f"Missing license for {name}") self.assertTrue(license_map[name], f"Empty license for {name}") - - -if __name__ == "__main__": - unittest.main() diff --git a/sbom/tests/test_spdx_formatter.py b/sbom/tests/test_spdx_formatter.py index d0c0350..1df7dba 100644 --- a/sbom/tests/test_spdx_formatter.py +++ b/sbom/tests/test_spdx_formatter.py @@ -200,7 +200,3 @@ def test_lowercase_operator_in_spdx_output_end_to_end(self): serde_pkg = next(p for p in spdx["packages"] if p["name"] == "serde") self.assertEqual(serde_pkg["licenseConcluded"], "Apache-2.0 OR MIT") self.assertEqual(serde_pkg["licenseDeclared"], "Apache-2.0 OR MIT") - - -if __name__ == "__main__": - unittest.main() diff --git a/sbom/tests/test_spdx_to_github_snapshot.py b/sbom/tests/test_spdx_to_github_snapshot.py index 5fe77a9..512e2dc 100644 --- a/sbom/tests/test_spdx_to_github_snapshot.py +++ b/sbom/tests/test_spdx_to_github_snapshot.py @@ -264,7 +264,3 @@ def test_generic_purl_included(self): self.assertEqual( boost_entry["package_url"], "pkg:generic/boost.filesystem@1.83.0" ) - - -if __name__ == "__main__": - unittest.main() From 426bc927f15aae198e5ab8e57674eedb00187096 Mon Sep 17 00:00:00 2001 From: Lukasz Juranek Date: Tue, 3 Mar 2026 18:06:27 +0100 Subject: [PATCH 4/4] Add tests cleanup readme (#2232) --- sbom/BUILD.bazel | 2 + sbom/SBOM_Readme.md | 315 +-- .../requirements/component_requirements.rst | 88 - .../requirements/feature_requirements.rst | 92 - sbom/internal/generator/BUILD | 7 + .../internal/generator/cyclonedx_formatter.py | 15 +- sbom/internal/generator/sbom_generator.py | 102 +- sbom/internal/generator/spdx_formatter.py | 14 +- sbom/internal/generator/utils.py | 17 + sbom/scripts/BUILD.bazel | 30 + .../scripts/generate_crates_metadata_cache.py | 39 +- sbom/tests/BUILD | 31 + sbom/tests/fixtures/baselibs_input.json | 271 ++ sbom/tests/fixtures/crates_metadata.json | 2306 +++++++++++++++++ sbom/tests/fixtures/kyron_cdxgen.cdx.json | 1 + sbom/tests/fixtures/kyron_input.json | 570 ++++ .../fixtures/orchestrator_cdxgen.cdx.json | 1 + sbom/tests/fixtures/orchestrator_input.json | 576 ++++ .../reference_integration.MODULE.bazel.lock | 47 + sbom/tests/fixtures/sbom_metadata.json | 1 + sbom/tests/test_bcr_known_licenses.py | 27 +- sbom/tests/test_cpp_enrich_checksum.py | 28 +- sbom/tests/test_cyclonedx_formatter.py | 39 +- .../tests/test_generate_cpp_metadata_cache.py | 393 +++ .../test_generate_crates_metadata_cache.py | 129 +- sbom/tests/test_real_sbom_integration.py | 593 +++++ sbom/tests/test_sbom_generator.py | 1184 +++++++++ sbom/tests/test_spdx_formatter.py | 39 +- sbom/tests/test_spdx_to_github_snapshot.py | 34 +- 29 files changed, 6372 insertions(+), 619 deletions(-) delete mode 100644 sbom/docs/requirements/component_requirements.rst delete mode 100644 sbom/docs/requirements/feature_requirements.rst create mode 100644 sbom/internal/generator/utils.py create mode 100644 sbom/tests/fixtures/baselibs_input.json create mode 100755 sbom/tests/fixtures/crates_metadata.json create mode 100755 sbom/tests/fixtures/kyron_cdxgen.cdx.json create mode 100644 sbom/tests/fixtures/kyron_input.json create mode 100755 sbom/tests/fixtures/orchestrator_cdxgen.cdx.json create mode 100644 sbom/tests/fixtures/orchestrator_input.json create mode 100644 sbom/tests/fixtures/reference_integration.MODULE.bazel.lock create mode 100755 sbom/tests/fixtures/sbom_metadata.json create mode 100644 sbom/tests/test_generate_cpp_metadata_cache.py create mode 100644 sbom/tests/test_real_sbom_integration.py create mode 100644 sbom/tests/test_sbom_generator.py diff --git a/sbom/BUILD.bazel b/sbom/BUILD.bazel index ec94784..9934f9a 100644 --- a/sbom/BUILD.bazel +++ b/sbom/BUILD.bazel @@ -14,6 +14,8 @@ package(default_visibility = ["//visibility:public"]) exports_files([ "defs.bzl", "extensions.bzl", + "cpp_metadata.json", + "crates_metadata.json", ]) # Filegroup for all SBOM-related bzl files diff --git a/sbom/SBOM_Readme.md b/sbom/SBOM_Readme.md index b242902..c02233d 100644 --- a/sbom/SBOM_Readme.md +++ b/sbom/SBOM_Readme.md @@ -1,27 +1,28 @@ -# SBOM Setup Guide +# About + +SBOM tooling gives a set of bazel rules that generates a Software Bill of Materials +in SPDX 2.3 and CycloneDX 1.6 format for a given Bazel target. + +# Setup ## 1. Configure MODULE.bazel -Add the SBOM metadata extension in your **root** MODULE.bazel (e.g. `reference_integration/MODULE.bazel`): +Add the SBOM metadata extension in your **root** MODULE.bazel: ```starlark -# Enable SBOM metadata collection from all modules in the dependency graph sbom_ext = use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") use_repo(sbom_ext, "sbom_metadata") ``` -**For modules using `local_path_override` or `git_override`**, also add a `track_module` tag for each such module. Without this, their versions cannot be auto-detected and will appear as `unknown` in the SBOM: +**For modules using `local_path_override` or `git_override`**, also add a `track_module` tag for each such module. Without this, their versions cannot be auto-detected and will appear as `unknown` in the SBOM. The extension reads the version directly from the module's own `MODULE.bazel` file: ```starlark -# Required for modules with local_path_override or git_override (no registry version) +sbom_ext = use_extension("@score_tooling//sbom:extensions.bzl", "sbom_metadata") sbom_ext.track_module(name = "score_baselibs") -sbom_ext.track_module(name = "score_communication") -sbom_ext.track_module(name = "score_orchestrator") -# ... one entry per overridden module +sbom_ext.track_module(name = "score_kyron") +use_repo(sbom_ext, "sbom_metadata") ``` -No manual license entries are needed — all license metadata is collected automatically. - ## 2. Add SBOM Target in BUILD ```starlark @@ -32,79 +33,56 @@ sbom( targets = ["//my/app:binary"], component_name = "my_application", component_version = "1.0.0", - # Rust crate metadata from multiple MODULE.bazel.lock files module_lockfiles = [ "@score_crates//:MODULE.bazel.lock", - ":MODULE.bazel.lock", # workspace's own lockfile for additional crates + ":MODULE.bazel.lock", ], auto_crates_cache = True, - auto_cdxgen = True, # Requires system-installed npm/cdxgen (see below) + auto_cdxgen = True, ) ``` ### Parameters | Parameter | Default | Description | -| :--- | :--- | :--- | -| `targets` | _(required)_ | Bazel targets to include in SBOM | -| `component_name` | rule name | Main component name | -| `component_version` | `""` | Version string | -| `output_formats` | `["spdx", "cyclonedx"]` | Output formats: `"spdx"` and/or `"cyclonedx"` | -| `module_lockfiles` | `[]` | List of MODULE.bazel.lock files for Rust crate metadata. Pass `@score_crates//:MODULE.bazel.lock` (centralized crate specs) and `:MODULE.bazel.lock` (workspace-local crates). Each lockfile is parsed for crate name, version, and sha256. | -| `cargo_lockfile` | `None` | Optional Cargo.lock for additional crates. Usually not needed when `module_lockfiles` covers all crates. | -| `auto_crates_cache` | `True` | Auto-generate crates cache when `module_lockfiles` or `cargo_lockfile` is set | -| `auto_cdxgen` | `False` | Auto-run cdxgen when no `cdxgen_sbom` is provided | -| `cdxgen_sbom` | `None` | Label to a pre-generated CycloneDX JSON from cdxgen for C++ enrichment | -| `producer_name` | `"Eclipse Foundation"` | SBOM producer organization name (appears in `metadata.supplier`) | -| `producer_url` | `"https://projects.eclipse.org/projects/automotive.score"` | SBOM producer URL | -| `sbom_authors` | `[]` | Author strings for `metadata.authors` (e.g. `["Eclipse SCORE Team"]`) | -| `generation_context` | `""` | Lifecycle phase: `"pre-build"`, `"build"`, or `"post-build"` | -| `sbom_tools` | `[]` | Additional tool names added to `metadata.tools` | -| `namespace` | `"https://eclipse.dev/score"` | Base URI for the SPDX document namespace | -| `exclude_patterns` | _(build tools)_ | List of repo name substrings to exclude (e.g. `rules_rust`, `bazel_tools`). Defaults exclude common Bazel build-tool repos. | -| `dep_module_files` | `[]` | Additional MODULE.bazel files from dependency modules for version extraction | +|---|---|---| +| `name` | *(required)* | Rule name; also used as the output filename prefix (e.g. `my_sbom` → `my_sbom.spdx.json`). | +| `targets` | *(required)* | Bazel targets whose transitive dependencies are included in the SBOM. | +| `component_name` | rule `name` | Name of the root component written into the SBOM; defaults to the rule name if omitted. | +| `component_version` | `None` | Version string for the root component; auto-detected from the module graph when omitted. | +| `module_lockfiles` | `[]` | One or more `MODULE.bazel.lock` files used to extract dependency versions and SHA-256 checksums; C++ projects need only the workspace lockfile (`:MODULE.bazel.lock`), Rust projects should also pass `@score_crates//:MODULE.bazel.lock` to cover crate versions and checksums. | +| `auto_crates_cache` | `True` | Runs `generate_crates_metadata_cache` at build time (requires network) to fetch Rust crate license and supplier data from dash-license-scan and crates.io; set to `False` only as a workaround for air-gapped or offline build environments — doing so produces a non-compliant SBOM where all Rust crates show `NOASSERTION` for license, supplier, and description. Has no effect when no lockfiles are provided (pure C++ projects). | +| `cargo_lockfile` | `None` | Path to a `Cargo.lock` file for crate enumeration; not needed when `module_lockfiles` is provided, as a synthetic `Cargo.lock` is generated from it automatically. **Deprecated — will be removed in a future release.** | +| `cdxgen_sbom` | `None` | Label to a pre-generated cdxgen CycloneDX JSON file; alternative to `auto_cdxgen` for C++ projects where cdxgen cannot run inside the Bazel build (e.g. CI environment without npm). Run cdxgen manually and pass its output here. Ignored for pure Rust projects. | +| `auto_cdxgen` | `False` | Runs cdxgen automatically inside the Bazel build (requires npm + `@cyclonedx/cdxgen` installed on the build machine); alternative to `cdxgen_sbom` for C++ projects. Uses `no-sandbox` execution to scan the source tree. Ignored for pure Rust projects. | +| `output_formats` | `["spdx", "cyclonedx"]` | List of output formats to generate; valid values are `"spdx"` and `"cyclonedx"`. | +| `producer_name` | `"Eclipse Foundation"` | Organisation name recorded as the SBOM producer. | +| `producer_url` | Eclipse S-CORE URL | URL of the SBOM producer organisation. | +| `sbom_authors` | `None` | List of author strings written into SBOM metadata; defaults to `producer_name` when omitted. | +| `namespace` | `https://eclipse.dev/score` | URI used as the SPDX document namespace and CycloneDX serial number base. | +| `generation_context` | `None` | CycloneDX lifecycle phase label (e.g. `"build"`, `"release"`). | +| `sbom_tools` | `None` | List of tool name strings recorded in SBOM metadata alongside the generator itself. | +| `exclude_patterns` | `None` | List of repo name substrings to exclude from the dependency graph (e.g. build tools, test frameworks). | +| `dep_module_files` | `None` | `MODULE.bazel` files from dependency modules used for additional automatic version extraction. | +| `metadata_json` | `@sbom_metadata//:metadata.json` | Label to the metadata JSON produced by the `sbom_metadata` Bazel extension; rarely needs changing. | ## 3. Install Prerequisites -### For `auto_crates_cache` (Rust crate metadata) - -License data for Rust crates is fetched via [dash-license-scan](https://github.com/eclipse-score/dash-license-scan). Description and supplier metadata is fetched from the crates.io API (parallel, ~10 concurrent requests). Requires: +**Rust crate metadata** (`auto_crates_cache = True`): ```bash -# Install uv (Python package runner) curl -LsSf https://astral.sh/uv/install.sh | sh - -# Install Java >= 11 (required by Eclipse dash-licenses JAR) -# Option 1: Ubuntu/Debian -sudo apt install openjdk-11-jre-headless - -# Option 2: Fedora/RHEL -sudo dnf install java-11-openjdk-headless - -# Verify installation -uvx dash-license-scan --help -java -version +sudo apt install openjdk-11-jre-headless # or equivalent for your distro ``` -### For `auto_cdxgen` (C++ dependency scanning) - -If using `auto_cdxgen = True` to automatically scan C++ dependencies: +**C++ dependency scanning** (`auto_cdxgen = True`): ```bash -# Install Node.js and cdxgen globally -# Option 1: Using nvm (recommended) -curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash -source ~/.bashrc nvm install 20 npm install -g @cyclonedx/cdxgen - -# Verify installation -which cdxgen -cdxgen --version ``` -**Note:** If you don't have npm/cdxgen installed, set `auto_cdxgen = False` in your SBOM configuration. -When `auto_cdxgen` is enabled, the SBOM rule runs cdxgen against the repository path of the selected Bazel targets (for example `external/score_baselibs+` for `@score_baselibs//...` targets). +Set `auto_cdxgen = False` if cdxgen is not available. ## 4. Build @@ -114,31 +92,16 @@ bazel build //:my_sbom ## 5. Output -Generated files in `bazel-bin/`: +Generated in `bazel-bin/`: -- `my_sbom.spdx.json` — SPDX 2.3 format -- `my_sbom.cdx.json` — CycloneDX 1.6 format -- `my_sbom_crates_metadata.json` — Auto-generated Rust crate cache (if `auto_crates_cache = True`) -- `my_sbom_cdxgen.cdx.json` — C++ dependencies from cdxgen (if `auto_cdxgen = True`) +- `my_sbom.spdx.json` — SPDX 2.3 +- `my_sbom.cdx.json` — CycloneDX 1.6 +- `my_sbom_crates_metadata.json` — Rust crate cache (if `auto_crates_cache = True`) +- `my_sbom_cdxgen.cdx.json` — C++ scan output (if `auto_cdxgen = True`) --- -## Toolchain Components - -### Core Tools - -| Tool | Role | Required For | -|------|------|--------------| -| [Bazel](https://bazel.build) | Build system — rules, aspects, and module extensions drive dependency discovery and SBOM generation | All SBOM generation | -| [Python 3](https://www.python.org) | Runtime for the SBOM generator, formatters, and metadata extraction scripts | All SBOM generation | -| [dash-license-scan](https://github.com/eclipse-score/dash-license-scan) | Rust crate license metadata via Eclipse Foundation + ClearlyDefined | Rust metadata extraction when `auto_crates_cache = True` | -| [uv / uvx](https://docs.astral.sh/uv/) | Python package runner for dash-license-scan | Rust metadata extraction when `auto_crates_cache = True` | -| [Java >= 11](https://openjdk.org) | Runtime for Eclipse dash-licenses JAR (used by dash-license-scan) | Rust metadata extraction when `auto_crates_cache = True` | -| [crates.io API](https://crates.io) | Description and supplier metadata for Rust crates (parallel fetching) | Rust metadata extraction when `auto_crates_cache = True` | -| [@cyclonedx/cdxgen](https://github.com/CycloneDX/cdxgen) | C++ dependency scanner and license discovery tool | C++ metadata extraction when `auto_cdxgen = True` | -| [Node.js / npm](https://nodejs.org) | Runtime for cdxgen | C++ metadata extraction when `auto_cdxgen = True` | - -### Architecture +## Architecture ``` ┌──────────────────┐ @@ -171,170 +134,80 @@ Generated files in `bazel-bin/`: **Data sources:** - **Bazel module graph** — version, PURL, and registry info for `bazel_dep` modules - **Bazel aspect** — transitive dependency graph and external repo dependency edges -- **dash-license-scan** — Rust crate licenses via Eclipse Foundation + ClearlyDefined (from MODULE.bazel.lock or Cargo.lock) -- **crates.io API** — description and supplier for Rust crates (supplier extracted from GitHub repository URL) -- **cdxgen** — C++ dependency licenses, descriptions, and suppliers (from source tree scan) - -### Automatically Populated Fields - -The following SBOM fields are populated automatically without manual configuration: - -| Field | Rust Crates | C++ Dependencies | Bazel Modules | -|-------|-------------|------------------|---------------| -| License | dash-license-scan | cdxgen | — | -| Description | crates.io API | cdxgen (falls back to `"Missing"` when unavailable) | — | -| Supplier | crates.io API (GitHub org from repository URL) | cdxgen | — | -| Version | MODULE.bazel.lock / Cargo.lock | cdxgen (with MODULE.bazel.lock fallback for Bazel modules) | Bazel module graph | -| Checksum (SHA-256) | MODULE.bazel.lock / Cargo.lock | BCR `source.json` `sha256` + cdxgen `hashes` (when present) | http_archive `sha256` + MODULE.bazel.lock BCR `source.json` | -| PURL | Auto-generated (`pkg:cargo/...`) | cdxgen | Auto-generated | - -### Platform-Specific Crate Handling - -Crates with platform-specific suffixes (e.g. `iceoryx2-bb-lock-free-qnx8`) that don't exist on crates.io are handled by stripping the suffix and falling back to the base crate name for description and supplier lookup. - -### What Is Excluded from SBOM - -- Dependencies not in the transitive dep graph of your `targets` -- Build toolchain repos matching `exclude_patterns` (e.g. `rules_rust`, `rules_cc`, `bazel_tools`, `platforms`) - -## Example - -See [reference_integration/BUILD](../../reference_integration/BUILD) for working SBOM targets and [reference_integration/MODULE.bazel](../../reference_integration/MODULE.bazel) for the metadata extension setup. +- **dash-license-scan** — licenses data +- **crates.io API** — description and supplier for Rust crates +- **cdxgen** — C++ dependency licenses, descriptions, and suppliers -Each SBOM target uses `module_lockfiles` to provide crate version/checksum data from multiple lockfiles and `auto_crates_cache = True` to automatically fetch license, description, and supplier data. +### Automated Metadata Sources -### score_crates Integration +All license, hash, supplier, and description values are derived from automated sources: `MODULE.bazel.lock`, `http_archive` rules, dash-license-scan (Rust), crates.io API (Rust), and cdxgen (C++). Cache files such as `cpp_metadata.json` must never be hand-edited. -The `score_crates` module provides centralized Rust crate management for the SCORE project. Its `MODULE.bazel.lock` file contains the majority of resolved crate specs (name, version, sha256) generated by `cargo-bazel`. The workspace's own `MODULE.bazel.lock` may contain additional crates not in `score_crates`. Both lockfiles should be passed via `module_lockfiles` to ensure complete coverage. +CPE, aliases, and pedigree are the only fields that may be set manually via `sbom_ext.license()`, as they represent identity and provenance annotations that cannot be auto-deduced. -## CISA 2025 Element Coverage (CycloneDX) +### Required SBOM Fields (CISA 2025) -The table below maps the CISA 2025 draft elements to CycloneDX fields and notes current support in this SBOM generator. +Every component entry in the generated SBOM must include the following fields, as mandated by CISA 2025 minimum elements: -| CISA 2025 Element | CycloneDX Field (JSON) | Support | Notes | -|---|---|---|---| -| Software Producer | `components[].supplier.name` | **Supported** | Root producer is set in `metadata.component.supplier`. For components, supplier is auto-extracted from crates.io repository URL (Rust) or from cdxgen (C++); in the current baselibs example, Boost BCR modules have no supplier because cdxgen does not provide one. | -| Component Name | `components[].name` | **Supported** | Single name; aliases are stored as `properties` with `cdx:alias`. | -| Component Version | `components[].version` | **Supported** | If unknown and source is git repo with `commit_date`, version can fall back to that date. | -| Software Identifiers | `components[].purl`, `components[].cpe` | **Supported (PURL)** / **Optional (CPE)** | PURL is generated for all components. CPE is optional if provided in metadata. | -| Component Hash | `components[].hashes` | **Supported** | SHA-256 is populated for Rust crates (from lockfiles) and for BCR / http_archive / some cdxgen-backed C++ components. In the current examples, Rust crates and Boost BCR modules have hashes; some QNX-specific crates and other C++ deps may not. | -| License | `components[].licenses` | **Supported (Rust) / Best-effort (C++)** | Rust licenses are auto-fetched via dash-license-scan and are present for most crates (e.g. Kyron SBOM); some crates like `iceoryx2-*` may still lack licenses. For C++ components, licenses are only present when cdxgen (or an upstream SBOM) provides them; in the current baselibs example, Boost BCR modules have empty `licenses`. Compound SPDX expressions (AND/OR) use the `expression` field per CycloneDX spec. | -| Component Description | `components[].description` | **Supported** | Auto-fetched from crates.io API (Rust) and cdxgen (C++), with C++ falling back to `"Missing"` when no description is available (as seen for Boost in the baselibs SBOM). | -| Dependency Relationship | `dependencies` | **Supported** | Uses external repo dependency edges from Bazel aspect; both Kyron and baselibs SBOMs include a dependency graph for the root component. | -| Pedigree / Derivation | `components[].pedigree` | **Supported (manual)** | Must be provided via `sbom_ext.license()` with `pedigree_*` fields. Not auto-deduced. | -| SBOM Author | `metadata.authors` | **Supported** | Set via `sbom_authors` in `sbom()` rule (e.g. `"Eclipse SCORE Team"` in the examples). | -| Tool Name | `metadata.tools` | **Supported** | Always includes `score-sbom-generator`; extra tools can be added via `sbom_tools`. | -| Timestamp | `metadata.timestamp` | **Supported** | ISO 8601 UTC timestamp generated at build time. | -| Generation Context | `metadata.lifecycles` | **Supported** | Set via `generation_context` in `sbom()` rule (`pre-build`, `build`, `post-build`). | +| Field | SPDX 2.3 | CycloneDX 1.6 | Source | Description | +|---|---|---|---|---| +| Component name | `name` | `components[].name` | Extracted | Human-readable name of the dependency (e.g. `serde`, `boost.mp11`). | +| Component version | `versionInfo` | `components[].version` | Extracted | Exact released version string used in the build. | +| Component hash (SHA-256) | `checksums[SHA256]` | `components[].hashes` | Extracted | SHA-256 digest of the downloaded archive, sourced from `MODULE.bazel.lock` or the `http_archive` `sha256` field. | +| Software identifier (PURL) | `externalRefs[purl]` | `components[].purl` | Extracted | Package URL uniquely identifying the component by ecosystem, name, and version (e.g. `pkg:cargo/serde@1.0.228`). | +| License expression | `licenseConcluded` | `components[].licenses` | Extracted | SPDX license expression concluded for this component (e.g. `Apache-2.0 OR MIT`). | +| Dependency relationships | `relationships[DEPENDS_ON]` | `dependencies` | Extracted | Graph edges recording which component depends on which, enabling consumers to reason about transitive exposure. | +| Supplier | `supplier` | `components[].supplier.name` | Extracted | Organisation or individual that distributes the component (e.g. the crates.io publisher name). | +| Component description | `description` | `components[].description` | Extracted | Short human-readable summary of what the component does; set to `"Missing"` when no source can provide it. | +| SBOM author | `creationInfo.creators` | `metadata.authors` | Configured | Entity responsible for producing this SBOM document; set via `producer_name` in the `sbom()` rule (default: Eclipse Foundation). | +| Tool name | `creationInfo.creators` | `metadata.tools` | Auto-generated | Name and version of the tool that generated the SBOM. | +| Timestamp | `creationInfo.created` | `metadata.timestamp` | Auto-generated | ISO-8601 UTC timestamp recording when the SBOM was generated. | +| Generation context (lifecycle) | — | `metadata.lifecycles` | Auto-generated | CycloneDX lifecycle phase at which the SBOM was produced (e.g. `build`). | -### SPDX-Specific Notes +Legend: **Extracted** — derived automatically from the Bazel dependency graph, lockfiles, or external registries (crates.io, cdxgen). **Configured** — comes from an `sbom()` rule parameter with a sensible default. **Auto-generated** — computed at build time with no user input required. -- **LicenseRef-* declarations**: Any `LicenseRef-*` identifiers used in license fields are automatically declared in `hasExtractedLicensingInfos` as required by SPDX 2.3. -- **Supplier**: Emitted as `Organization: ` in the SPDX `supplier` field. +Fields are populated automatically from the sources described in [Automated Metadata Sources](#automated-metadata-sources) and [License Data by Language](#license-data-by-language). If a source cannot provide a value (e.g. cdxgen cannot resolve a C++ component), the field is omitted rather than filled with incorrect data — except for description, which is set to `"Missing"` to make the gap visible. -### Notes on Missing Data -If a field is absent in output, it usually means the source metadata was not provided: -- Licenses and suppliers are auto-populated from dash-license-scan (Rust) or cdxgen (C++). For C++ dependencies, licenses and suppliers are available only when cdxgen can resolve the component; Bazel Central Registry modules like `boost.*` may have empty licenses if cdxgen cannot infer them. -- CPE, aliases, and pedigree are optional and must be explicitly set via `sbom_ext.license()`. -- Rust crate licenses require a crates metadata cache; this is generated automatically when `module_lockfiles` (or `cargo_lockfile`) is provided to `sbom()`. License data is fetched via `dash-license-scan` (Eclipse Foundation + ClearlyDefined). The `score_crates` MODULE.bazel.lock combined with the workspace's MODULE.bazel.lock provides complete coverage. -- If cdxgen cannot resolve C++ package metadata for a Bazel-only dependency graph, SBOM generation sets C++ dependency descriptions to `"Missing"`. +### Component Scope -Examples (add to `MODULE.bazel`): +Only transitive dependencies of the declared build targets are included. Build-time tools (compilers, build systems, test frameworks) are excluded via `exclude_patterns`. -```starlark -# Optional metadata (CPE, aliases, pedigree) -# Note: sbom_ext.license() should only be used for pedigree, CPE, and aliases. -# Licenses and suppliers are auto-populated from dash-license-scan (Rust) or cdxgen (C++). -sbom_ext.license( - name = "linux-kernel", - cpe = "cpe:2.3:o:linux:linux_kernel:*:*:*:*:*:*:*:*", - aliases = ["linux", "kernel"], - pedigree_ancestors = ["pkg:generic/linux-kernel@5.10.130"], - pedigree_notes = "Backported CVE-2025-12345 fix from 5.10.130", -) -``` - -### C++ license data and dash-license-scan - -- **Rust crates** - Rust licenses are obtained via `generate_crates_metadata_cache.py`, which reads `MODULE.bazel.lock` / `Cargo.lock`, builds a synthetic `Cargo.lock`, runs `uvx dash-license-scan` (backed by Eclipse dash-licenses), and writes a `crates_metadata.json` cache that `sbom_generator.py` consumes. - -- **C++ dependencies** - C++ licenses and suppliers are resolved through two mechanisms: +### Component Hash Source - 1. **cdxgen scan** — when `auto_cdxgen = True` (or a `cdxgen_sbom` label is provided), cdxgen scans the source tree for C++ package metadata. This is the primary automated source for C++ license, supplier, version, and PURL. +SHA-256 checksums come exclusively from `MODULE.bazel.lock` `registryFileHashes` (BCR modules) or the `sha256` field of `http_archive` rules. If neither provides a checksum, the hash field is omitted rather than emitting an incorrect value. - 2. **`cpp_metadata.json` cache** — populated by running `generate_cpp_metadata_cache.py` against cdxgen output. **This file must always be generated by the script, never edited by hand.** See the no-manual-fallback requirement below. +### License Data by Language - There is currently **no dash-license-scan integration for C++ SBOMs**. `dash-license-scan` understands purls like `pkg:cargo/...`, `pkg:pypi/...`, `pkg:npm/...`, and `pkg:maven/...`, but not `pkg:generic/...` (used for BCR modules), so running it on the C++ CycloneDX SBOM does not improve C++ license coverage. +- **Rust**: Licenses via dash-license-scan (Eclipse Foundation + ClearlyDefined); descriptions and suppliers from crates.io API. Crates with platform-specific suffixes (e.g. `iceoryx2-bb-lock-free-qnx8`) fall back to the base crate name for lookup. +- **C++**: Licenses, descriptions, and suppliers via cdxgen source tree scan. There is no dash-license-scan integration for C++ — it does not support `pkg:generic/...` PURLs used by BCR modules. If cdxgen cannot resolve a component, its description is set to `"Missing"` and its license field is empty. -### No-manual-fallback requirement (MUST) +### Output Format Versions -**All SBOM fields must originate from automated sources. No manually-curated fallback values are permitted for any field — not checksum, not license, not supplier, not version, not PURL, not description.** +- **SPDX 2.3**: Migration to SPDX 3.0 is deferred until supported in production by at least one major consumer (Trivy, GitHub Dependabot, or Grype). As of early 2026, none support it and the reference Python library marks its own 3.0 support as experimental. `LicenseRef-*` identifiers are declared in `hasExtractedLicensingInfos` as required by SPDX 2.3; supplier is emitted as `Organization: `. +- **CycloneDX 1.6**: Emitted with `"specVersion": "1.6"` and `"$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json"`. -This applies to every data source in the pipeline: -| Source | Status | What it provides | -|---|---|---| -| `MODULE.bazel.lock` `source.json` sha256 | ✅ Automated | Checksum for BCR C++ modules | -| `http_archive sha256 =` field | ✅ Automated | Checksum for non-BCR deps | -| cdxgen source-tree scan | ✅ Automated | License, supplier, version, PURL for C++ | -| `generate_cpp_metadata_cache.py` output | ✅ Automated (generated from cdxgen) | Persistent C++ metadata cache | -| dash-license-scan | ✅ Automated | License for Rust crates | -| `cpp_metadata.json` with hand-written entries | ❌ **Forbidden** | — | -| `BCR_KNOWN_LICENSES` dict in `sbom_generator.py` | ⚠️ Known violation — must be removed | License/supplier for BCR C++ modules | - -**Why:** A manually-written value is version-pinned to whatever version string happens to be in the file at the time of writing. If the workspace resolves a different version of that component, the value silently describes the wrong artifact. An absent field is honest and correct; a manually-guessed field is a compliance violation and a traceability lie. - -**Correct behaviour for missing data:** If an automated source cannot determine a field, the field is absent in the SBOM output. This is expected and acceptable. - -**Enforcement:** `test_cpp_enrich_checksum.py::TestNoManualFallbackInCppMetadata` asserts that `cpp_metadata.json` is empty and contains no SBOM fields. If entries are needed, regenerate the file: +## How design is tested +To run tests ```bash -npx @cyclonedx/cdxgen -t cpp --deep -r -o cdxgen_output.cdx.json -python3 tooling/sbom/scripts/generate_cpp_metadata_cache.py \ - cdxgen_output.cdx.json tooling/sbom/cpp_metadata.json +# From tooling/ — run all SBOM tests +bazel test //sbom/tests/... ``` -**Known violation — `BCR_KNOWN_LICENSES`:** The `BCR_KNOWN_LICENSES` dict hardcoded in `sbom_generator.py` is a manually-maintained license/supplier table for Bazel Central Registry C++ modules. It violates this requirement and must be replaced with automated BCR metadata fetching (e.g. querying the BCR `MODULE.bazel` or `metadata.json` at build time). Until that is implemented, BCR C++ modules that cdxgen cannot resolve will have missing license fields in the SBOM — which is the correct, honest output. - ---- - -## SPDX Version Decision (stay on 2.3) - -This generator emits **SPDX 2.3** and will not migrate to SPDX 3.0 until tooling support matures. - -### Why not SPDX 3.0? - -SPDX 3.0 is a **breaking rewrite**, not an additive update: - -| Aspect | SPDX 2.3 | SPDX 3.0 | -|---|---|---| -| Serialization | Flat JSON | JSON-LD (`@context` + `@graph`) | -| Top-level key | `spdxVersion: "SPDX-2.3"` | `@context: "https://spdx.org/rdf/3.0.1/spdx-context.jsonld"` | -| Package fields | `versionInfo`, `licenseConcluded`, `SPDXID` | `software_packageVersion`, licensing profile objects, `spdxId` | -| Relationships | Array in document | Standalone elements in `@graph` | -| Profiles | None | Mandatory `profileConformance` declaration | +Sbom was also tested by external tool +https://sbomgenerator.com/tools/validator -**Downstream consumer support as of Feb 2026 — tools that read/process our SBOM output, none support SPDX 3.0:** +#### Tests description -| Tool | SPDX 2.3 | SPDX 3.0 | +| Test file | Bazel target | What it covers | |---|---|---| -| GitHub Dependabot / Dependency Submission API | ✅ SPDX 2.3 (export) / action works with 2.3 in practice | ❌ | -| Trivy | ✅ generates 2.3 | ❌ | -| Grype | ✅ consumes 2.x | ❌ | -| Syft | ✅ generates 2.3 | ❌ | -| spdx-tools (Python) | ✅ full support | ⚠️ "experimental, unstable" | - -The `spdx-tools` Python library (latest: v0.8.4, Jan 2025) still describes its SPDX 3.0 support as "neither complete nor stable" and explicitly warns against production use. v0.8.4 added Python 3.14 support but made no SPDX 3.0 improvements. - -For SCORE's use case (license data, PURL, checksums, dependency graph), SPDX 2.3 covers all requirements with zero compatibility issues. - -### Revisit trigger - -Reconsider migration when **Trivy or GitHub Dependabot** announces production SPDX 3.0 support. At that point the required changes are: - -- `tooling/sbom/internal/generator/spdx_formatter.py` — full rewrite (flat JSON → JSON-LD `@graph`, new field names) -- `tooling/sbom/tests/test_spdx_formatter.py` — all 17 tests need rewriting -- `tooling/sbom/scripts/spdx_to_github_snapshot.py` — relationship and `externalRefs` parsing +| `test_bcr_known_licenses.py` | `test_bcr_known_licenses` | `BCR_KNOWN_LICENSES` table; `apply_known_licenses()` priority chain (5 levels); `resolve_component()` integration after license resolution | +| `test_cpp_enrich_checksum.py` | `test_cpp_enrich_checksum` | `enrich_components_from_cpp_cache()` field propagation (checksum, normalised names, parent match); no-manual-curation rule on `cpp_metadata.json` | +| `test_cyclonedx_formatter.py` | `test_cyclonedx_formatter` | CycloneDX 1.6 document structure; license encoding (single id vs compound expression); `or`/`and` normalisation; dependency graph; `_normalize_spdx_license()` | +| `test_spdx_formatter.py` | `test_spdx_formatter` | SPDX 2.3 document structure; PURL as externalRef; SHA-256 checksums; DESCRIBES/DEPENDS_ON relationships; `hasExtractedLicensingInfos` for `LicenseRef-*`; `_normalize_spdx_license()` | +| `test_sbom_generator.py` | `test_sbom_generator` | `filter_repos()`; `resolve_component()` (all 8 repo-type branches); `deduplicate_components()`; `parse_module_bazel_files()`; `parse_module_lockfiles()`; `mark_missing_cpp_descriptions()`; `main()` end-to-end (15 scenarios: SPDX/CycloneDX output, BCR licenses, crate_universe, exclude patterns, version auto-detect, dep_module_files, module_lockfiles, --crates-cache, --cdxgen-sbom, output file selection) | +| `test_generate_crates_metadata_cache.py` | `test_generate_crates_metadata_cache` | `parse_dash_summary()`; `parse_module_bazel_lock()`; `generate_synthetic_cargo_lock()`; end-to-end summary CSV round-trip | +| `test_generate_cpp_metadata_cache.py` | `test_generate_cpp_metadata_cache` | `convert_cdxgen_to_cache()`: version, license (id/name/expression/AND), supplier (name/publisher fallback), PURL, URL from externalReferences, description | +| `test_spdx_to_github_snapshot.py` | `test_spdx_to_github_snapshot` | `convert_spdx_to_snapshot()`: top-level fields; direct vs. indirect classification; package filtering; manifest naming; `pkg:generic/` PURL support | +--- diff --git a/sbom/docs/requirements/component_requirements.rst b/sbom/docs/requirements/component_requirements.rst deleted file mode 100644 index 4a8af86..0000000 --- a/sbom/docs/requirements/component_requirements.rst +++ /dev/null @@ -1,88 +0,0 @@ -.. - # ******************************************************************************* - # Copyright (c) 2025 Contributors to the Eclipse Foundation - # - # See the NOTICE file(s) distributed with this work for additional - # information regarding copyright ownership. - # - # This program and the accompanying materials are made available under the - # terms of the Apache License Version 2.0 which is available at - # https://www.apache.org/licenses/LICENSE-2.0 - # - # SPDX-License-Identifier: Apache-2.0 - # ******************************************************************************* - -.. _sbom_component_requirements: - -Component Requirements -###################### - -.. document:: SBOM Generator Component Requirements - :id: doc__sbom_component_requirements - :status: valid - :safety: QM - :security: NO - :realizes: wp__requirements_comp - - -Metadata Provenance -=================== - -.. comp_req:: Component Checksum Automated Source - :id: comp_req__sbom__checksum_automated_source - :reqtype: Functional - :security: NO - :safety: QM - :satisfies: feat_req__sbom__cisa_2025_minimum_elements - :status: valid - - The generator shall source component SHA-256 checksums exclusively from - the following automated inputs: - - - ``MODULE.bazel.lock`` ``registryFileHashes`` entries pointing to - ``source.json`` files (for Bazel Central Registry modules), and - - the ``sha256`` field of ``http_archive`` rules (for non-BCR - dependencies). - - If neither source provides a checksum for a component, the hash field - shall be omitted from that component's SBOM entry. Omitting the field is - the correct output; emitting an incorrect or stale value is not permitted. - - -Output Format -============= - -.. comp_req:: SPDX Output Version - :id: comp_req__sbom__spdx_version - :reqtype: Functional - :security: NO - :safety: QM - :satisfies: feat_req__sbom__dual_format_output - :status: valid - - The generator shall emit SPDX 2.3 compliant JSON. Migration to SPDX 3.0 - shall not be performed until SPDX 3.0 output is supported in production - by at least one of the following downstream consumers: Trivy, GitHub - Dependabot Dependency Submission API, or Grype. - - :rationale: SPDX 3.0 is a breaking JSON-LD rewrite of the format. As of - February 2026 none of the major consumers support it, and the - reference Python library (spdx-tools v0.8.4) describes its own - 3.0 support as experimental and not recommended for production. - - -.. comp_req:: CycloneDX Output Version - :id: comp_req__sbom__cyclonedx_version - :reqtype: Functional - :security: NO - :safety: QM - :satisfies: feat_req__sbom__dual_format_output - :status: valid - - The generator shall emit CycloneDX 1.6 compliant JSON with - ``"$schema": "http://cyclonedx.org/schema/bom-1.6.schema.json"`` and - ``"specVersion": "1.6"``. - - -.. needextend:: docname is not None and "sbom" in id - :+tags: sbom diff --git a/sbom/docs/requirements/feature_requirements.rst b/sbom/docs/requirements/feature_requirements.rst deleted file mode 100644 index 80b121d..0000000 --- a/sbom/docs/requirements/feature_requirements.rst +++ /dev/null @@ -1,92 +0,0 @@ -.. - # ******************************************************************************* - # Copyright (c) 2025 Contributors to the Eclipse Foundation - # - # See the NOTICE file(s) distributed with this work for additional - # information regarding copyright ownership. - # - # This program and the accompanying materials are made available under the - # terms of the Apache License Version 2.0 which is available at - # https://www.apache.org/licenses/LICENSE-2.0 - # - # SPDX-License-Identifier: Apache-2.0 - # ******************************************************************************* - -.. _sbom_feature_requirements: - -Feature Requirements -#################### - -.. document:: SBOM Generator Feature Requirements - :id: doc__sbom_feature_requirements - :status: valid - :safety: QM - :security: NO - :realizes: wp__requirements_feat - - -CISA 2025 Minimum Elements -=========================== - -.. feat_req:: CISA 2025 Mandatory SBOM Elements - :id: feat_req__sbom__cisa_2025_minimum_elements - :reqtype: Functional - :security: NO - :safety: QM - :status: valid - - The SBOM generator shall produce output that contains all minimum elements - mandated by CISA 2025 for every component entry: component name, component - version, component hash (SHA-256), software identifier (PURL), license - expression, dependency relationships, SBOM author, timestamp, tool name, - and generation context (lifecycle phase). - - -Metadata Provenance -=================== - -.. feat_req:: Automated Metadata Sources - :id: feat_req__sbom__automated_metadata_sources - :reqtype: Process - :security: NO - :safety: QM - :status: valid - - All field values written into generated SBOM output shall be derived - exclusively from automated sources. No manually-curated static data, - hardcoded lookup tables, or hand-edited cache files shall be used to - supply values for any SBOM field. - -Component Scope -=============== - -.. feat_req:: Build Target Dependency Scope - :id: feat_req__sbom__build_target_scope - :reqtype: Functional - :security: NO - :safety: QM - :status: valid - - The SBOM shall include only components that are part of the transitive - dependency closure of the declared build targets. Build-time tools that - are not part of the delivered software (compilers, build systems, test - frameworks, and code generation utilities) shall be excluded from the - SBOM output. - - -Output Formats -============== - -.. feat_req:: Dual Format SBOM Output - :id: feat_req__sbom__dual_format_output - :reqtype: Interface - :security: NO - :safety: QM - :status: valid - - The SBOM generator shall produce output simultaneously in both SPDX 2.3 - JSON format and CycloneDX 1.6 JSON format from a single invocation. - - -.. needextend:: docname is not None and "sbom" in id - :+tags: sbom diff --git a/sbom/internal/generator/BUILD b/sbom/internal/generator/BUILD index c5947c9..035d1ee 100644 --- a/sbom/internal/generator/BUILD +++ b/sbom/internal/generator/BUILD @@ -26,12 +26,19 @@ py_library( ], ) +py_library( + name = "utils", + srcs = ["utils.py"], +) + py_library( name = "spdx_formatter", srcs = ["spdx_formatter.py"], + deps = [":utils"], ) py_library( name = "cyclonedx_formatter", srcs = ["cyclonedx_formatter.py"], + deps = [":utils"], ) diff --git a/sbom/internal/generator/cyclonedx_formatter.py b/sbom/internal/generator/cyclonedx_formatter.py index 9c31778..ef9ec70 100644 --- a/sbom/internal/generator/cyclonedx_formatter.py +++ b/sbom/internal/generator/cyclonedx_formatter.py @@ -6,23 +6,10 @@ CycloneDX 1.6 Specification: https://cyclonedx.org/docs/1.6/json/ """ -import re import uuid from typing import Any - -def _normalize_spdx_license(expr: str) -> str: - """Normalize SPDX boolean operators to uppercase as required by the spec. - - dash-license-scan returns lowercase operators (e.g. 'Apache-2.0 or MIT'). - SPDX 2.3 Appendix IV and CycloneDX 1.6 both require uppercase OR/AND/WITH. - Uses space-delimited substitution to avoid modifying license identifiers - that contain 'or'/'and' as substrings (e.g. GPL-2.0-or-later). - """ - expr = re.sub(r" or ", " OR ", expr, flags=re.IGNORECASE) - expr = re.sub(r" and ", " AND ", expr, flags=re.IGNORECASE) - expr = re.sub(r" with ", " WITH ", expr, flags=re.IGNORECASE) - return expr +from sbom.internal.generator.utils import _normalize_spdx_license def generate_cyclonedx( diff --git a/sbom/internal/generator/sbom_generator.py b/sbom/internal/generator/sbom_generator.py index 58c1728..138b8ca 100644 --- a/sbom/internal/generator/sbom_generator.py +++ b/sbom/internal/generator/sbom_generator.py @@ -546,6 +546,44 @@ def filter_repos(repos: list[str], exclude_patterns: list[str]) -> list[str]: return filtered +def _build_crate_result( + crate_name: str, + version: str, + crate_meta: dict[str, Any], +) -> dict[str, Any]: + """Build a crate component dict from parsed name/version and cache metadata.""" + result: dict[str, Any] = { + "name": crate_name, + "version": version, + "purl": f"pkg:cargo/{crate_name}@{version}", + "type": "library", + "source": "crates.io", + } + if crate_meta.get("license"): + result["license"] = crate_meta["license"] + if crate_meta.get("description"): + result["description"] = crate_meta["description"] + if crate_meta.get("supplier"): + result["supplier"] = crate_meta["supplier"] + if crate_meta.get("cpe"): + result["cpe"] = crate_meta["cpe"] + if crate_meta.get("aliases"): + result["aliases"] = crate_meta["aliases"] + if crate_meta.get("pedigree_ancestors"): + result["pedigree_ancestors"] = crate_meta["pedigree_ancestors"] + if crate_meta.get("pedigree_descendants"): + result["pedigree_descendants"] = crate_meta["pedigree_descendants"] + if crate_meta.get("pedigree_variants"): + result["pedigree_variants"] = crate_meta["pedigree_variants"] + if crate_meta.get("pedigree_notes"): + result["pedigree_notes"] = crate_meta["pedigree_notes"] + if crate_meta.get("repository"): + result["url"] = crate_meta["repository"] + if crate_meta.get("checksum"): + result["checksum"] = crate_meta["checksum"] + return result + + def resolve_component( repo_name: str, metadata: dict[str, Any] ) -> dict[str, Any] | None: @@ -711,37 +749,7 @@ def resolve_component( crate_meta = cached_crates.get(crate_name) or cached_crates.get( crate_name.replace("-", "_"), {} ) - - result = { - "name": crate_name, - "version": version, - "purl": f"pkg:cargo/{crate_name}@{version}", - "type": "library", - "source": "crates.io", - } - if crate_meta.get("license"): - result["license"] = crate_meta["license"] - if crate_meta.get("description"): - result["description"] = crate_meta["description"] - if crate_meta.get("supplier"): - result["supplier"] = crate_meta["supplier"] - if crate_meta.get("cpe"): - result["cpe"] = crate_meta["cpe"] - if crate_meta.get("aliases"): - result["aliases"] = crate_meta["aliases"] - if crate_meta.get("pedigree_ancestors"): - result["pedigree_ancestors"] = crate_meta["pedigree_ancestors"] - if crate_meta.get("pedigree_descendants"): - result["pedigree_descendants"] = crate_meta["pedigree_descendants"] - if crate_meta.get("pedigree_variants"): - result["pedigree_variants"] = crate_meta["pedigree_variants"] - if crate_meta.get("pedigree_notes"): - result["pedigree_notes"] = crate_meta["pedigree_notes"] - if crate_meta.get("repository"): - result["url"] = crate_meta["repository"] - if crate_meta.get("checksum"): - result["checksum"] = crate_meta["checksum"] - return result + return _build_crate_result(crate_name, version, crate_meta) # Handle legacy crate universe format (e.g., crates_io__tokio-1.10.0) if repo_name.startswith("crates_io__") or "_crates__" in repo_name: @@ -759,37 +767,7 @@ def resolve_component( crate_meta = cached_crates.get(crate_name) or cached_crates.get( crate_name.replace("-", "_"), {} ) - - result = { - "name": crate_name, - "version": version, - "purl": f"pkg:cargo/{crate_name}@{version}", - "type": "library", - "source": "crates.io", - } - if crate_meta.get("license"): - result["license"] = crate_meta["license"] - if crate_meta.get("description"): - result["description"] = crate_meta["description"] - if crate_meta.get("supplier"): - result["supplier"] = crate_meta["supplier"] - if crate_meta.get("cpe"): - result["cpe"] = crate_meta["cpe"] - if crate_meta.get("aliases"): - result["aliases"] = crate_meta["aliases"] - if crate_meta.get("pedigree_ancestors"): - result["pedigree_ancestors"] = crate_meta["pedigree_ancestors"] - if crate_meta.get("pedigree_descendants"): - result["pedigree_descendants"] = crate_meta["pedigree_descendants"] - if crate_meta.get("pedigree_variants"): - result["pedigree_variants"] = crate_meta["pedigree_variants"] - if crate_meta.get("pedigree_notes"): - result["pedigree_notes"] = crate_meta["pedigree_notes"] - if crate_meta.get("repository"): - result["url"] = crate_meta["repository"] - if crate_meta.get("checksum"): - result["checksum"] = crate_meta["checksum"] - return result + return _build_crate_result(crate_name, version, crate_meta) # Check if repo is a sub-library of a known parent (e.g., boost.config+ -> boost) # rules_boost splits Boost into individual repos like boost.config+, boost.assert+, etc. diff --git a/sbom/internal/generator/spdx_formatter.py b/sbom/internal/generator/spdx_formatter.py index 6ad8af4..e0dbf69 100644 --- a/sbom/internal/generator/spdx_formatter.py +++ b/sbom/internal/generator/spdx_formatter.py @@ -10,19 +10,7 @@ import uuid from typing import Any - -def _normalize_spdx_license(expr: str) -> str: - """Normalize SPDX boolean operators to uppercase as required by the spec. - - dash-license-scan returns lowercase operators (e.g. 'Apache-2.0 or MIT'). - SPDX 2.3 requires uppercase OR/AND/WITH (Appendix IV). - Uses space-delimited substitution to avoid modifying license identifiers - that contain 'or'/'and' as substrings (e.g. GPL-2.0-or-later). - """ - expr = re.sub(r" or ", " OR ", expr, flags=re.IGNORECASE) - expr = re.sub(r" and ", " AND ", expr, flags=re.IGNORECASE) - expr = re.sub(r" with ", " WITH ", expr, flags=re.IGNORECASE) - return expr +from sbom.internal.generator.utils import _normalize_spdx_license def generate_spdx( diff --git a/sbom/internal/generator/utils.py b/sbom/internal/generator/utils.py new file mode 100644 index 0000000..3c72b62 --- /dev/null +++ b/sbom/internal/generator/utils.py @@ -0,0 +1,17 @@ +"""Shared utilities for SBOM formatters.""" + +import re + + +def _normalize_spdx_license(expr: str) -> str: + """Normalize SPDX boolean operators to uppercase as required by the spec. + + dash-license-scan returns lowercase operators (e.g. 'Apache-2.0 or MIT'). + SPDX 2.3 requires uppercase OR/AND/WITH (Appendix IV). + Uses space-delimited substitution to avoid modifying license identifiers + that contain 'or'/'and' as substrings (e.g. GPL-2.0-or-later). + """ + expr = re.sub(r" or ", " OR ", expr, flags=re.IGNORECASE) + expr = re.sub(r" and ", " AND ", expr, flags=re.IGNORECASE) + expr = re.sub(r" with ", " WITH ", expr, flags=re.IGNORECASE) + return expr diff --git a/sbom/scripts/BUILD.bazel b/sbom/scripts/BUILD.bazel index c33b3c6..1ef84ee 100644 --- a/sbom/scripts/BUILD.bazel +++ b/sbom/scripts/BUILD.bazel @@ -1,5 +1,35 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + +load("@rules_python//python:defs.bzl", "py_library") + package(default_visibility = ["//sbom:__subpackages__"]) exports_files([ "generate_crates_metadata_cache.py", ]) + +py_library( + name = "generate_crates_metadata_cache", + srcs = ["generate_crates_metadata_cache.py"], +) + +py_library( + name = "generate_cpp_metadata_cache", + srcs = ["generate_cpp_metadata_cache.py"], +) + +py_library( + name = "spdx_to_github_snapshot", + srcs = ["spdx_to_github_snapshot.py"], +) diff --git a/sbom/scripts/generate_crates_metadata_cache.py b/sbom/scripts/generate_crates_metadata_cache.py index 0c2a16f..d7ab829 100755 --- a/sbom/scripts/generate_crates_metadata_cache.py +++ b/sbom/scripts/generate_crates_metadata_cache.py @@ -27,10 +27,10 @@ import urllib.request from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path -from typing import Any, Dict +from typing import Any -def parse_cargo_lock(lockfile_path: str) -> Dict[str, Dict[str, Any]]: +def parse_cargo_lock(lockfile_path: str) -> dict[str, dict[str, Any]]: """Parse Cargo.lock and extract crate information. Args: @@ -71,7 +71,7 @@ def parse_cargo_lock(lockfile_path: str) -> Dict[str, Dict[str, Any]]: return crates -def parse_module_bazel_lock(lockfile_path: str) -> Dict[str, Dict[str, Any]]: +def parse_module_bazel_lock(lockfile_path: str) -> dict[str, dict[str, Any]]: """Parse MODULE.bazel.lock and extract crate information from cargo-bazel resolution. The MODULE.bazel.lock (from score_crates or similar) contains resolved crate @@ -134,23 +134,8 @@ def parse_module_bazel_lock(lockfile_path: str) -> Dict[str, Dict[str, Any]]: return crates -def build_dash_coordinates(crates: Dict[str, Dict[str, Any]]) -> list[str]: - """Build Eclipse dash-license-scan coordinate strings from crate data. - - Args: - crates: Dict mapping crate name to {name, version, checksum, ...} - - Returns: - Sorted list of coordinate strings: "crate/cratesio/-/{name}/{version}" - """ - return [ - f"crate/cratesio/-/{info['name']}/{info['version']}" - for _key, info in sorted(crates.items()) - ] - - def generate_synthetic_cargo_lock( - crates: Dict[str, Dict[str, Any]], output_path: str + crates: dict[str, dict[str, Any]], output_path: str ) -> None: """Generate a minimal synthetic Cargo.lock from parsed crate data. @@ -264,7 +249,7 @@ def run_dash_license_scan(cargo_lock_path: str, summary_output_path: str) -> Non print(f" NOTE: {result.returncode} crate(s) have 'restricted' license status") -def parse_dash_summary(summary_path: str) -> Dict[str, str]: +def parse_dash_summary(summary_path: str) -> dict[str, str]: """Parse the dash-licenses summary CSV file into a license lookup dict. Each line has format: @@ -276,7 +261,7 @@ def parse_dash_summary(summary_path: str) -> Dict[str, str]: Returns: Dict mapping crate name to SPDX license expression string """ - licenses: Dict[str, str] = {} + licenses: dict[str, str] = {} with open(summary_path, encoding="utf-8") as f: for line in f: line = line.strip() @@ -312,7 +297,7 @@ def _extract_supplier(repository_url: str) -> str: return m.group(1) if m else "" -def _fetch_one_crate_meta(name: str) -> tuple[str, Dict[str, str]]: +def _fetch_one_crate_meta(name: str) -> tuple[str, dict[str, str]]: """Fetch metadata for a single crate from crates.io API. Returns (name, {description, supplier}) dict. @@ -346,7 +331,7 @@ def _fetch_one_crate_meta(name: str) -> tuple[str, Dict[str, str]]: def fetch_crate_metadata_from_cratesio( crate_names: list[str], -) -> Dict[str, Dict[str, str]]: +) -> dict[str, dict[str, str]]: """Fetch metadata (description, supplier) from crates.io API (parallel). Args: @@ -358,7 +343,7 @@ def fetch_crate_metadata_from_cratesio( total = len(crate_names) print(f"Fetching metadata from crates.io for {total} crates...") - metadata: Dict[str, Dict[str, str]] = {} + metadata: dict[str, dict[str, str]] = {} done = 0 with ThreadPoolExecutor(max_workers=10) as pool: futures = {pool.submit(_fetch_one_crate_meta, n): n for n in crate_names} @@ -381,7 +366,7 @@ def fetch_crate_metadata_from_cratesio( def generate_cache( cargo_lock_path: str | None = None, module_lock_paths: list[str] | None = None, -) -> Dict[str, Dict[str, Any]]: +) -> dict[str, dict[str, Any]]: """Generate metadata cache from lockfiles + dash-license-scan. 1. Parse Cargo.lock and/or MODULE.bazel.lock files for crate names, versions, checksums @@ -397,7 +382,7 @@ def generate_cache( Returns: Dict mapping crate name to metadata """ - crates: Dict[str, Dict[str, Any]] = {} + crates: dict[str, dict[str, Any]] = {} if cargo_lock_path: print(f"Parsing {cargo_lock_path}...") @@ -441,7 +426,7 @@ def generate_cache( cratesio_meta = fetch_crate_metadata_from_cratesio(list(crates.keys())) # Build final cache - cache: Dict[str, Dict[str, Any]] = {} + cache: dict[str, dict[str, Any]] = {} for name, info in crates.items(): meta = cratesio_meta.get(name, {}) cache[name] = { diff --git a/sbom/tests/BUILD b/sbom/tests/BUILD index 233b6e0..e1d9a8b 100644 --- a/sbom/tests/BUILD +++ b/sbom/tests/BUILD @@ -39,3 +39,34 @@ score_py_pytest( data = ["//sbom:cpp_metadata.json"], deps = ["//sbom/internal/generator:sbom_generator_lib"], ) + +score_py_pytest( + name = "test_sbom_generator", + srcs = ["test_sbom_generator.py"], + deps = ["//sbom/internal/generator:sbom_generator_lib"], +) + +score_py_pytest( + name = "test_generate_crates_metadata_cache", + srcs = ["test_generate_crates_metadata_cache.py"], + deps = ["//sbom/scripts:generate_crates_metadata_cache"], +) + +score_py_pytest( + name = "test_generate_cpp_metadata_cache", + srcs = ["test_generate_cpp_metadata_cache.py"], + deps = ["//sbom/scripts:generate_cpp_metadata_cache"], +) + +score_py_pytest( + name = "test_spdx_to_github_snapshot", + srcs = ["test_spdx_to_github_snapshot.py"], + deps = ["//sbom/scripts:spdx_to_github_snapshot"], +) + +score_py_pytest( + name = "test_real_sbom_integration", + srcs = ["test_real_sbom_integration.py"], + data = glob(["fixtures/**"]), + deps = ["//sbom/internal/generator:sbom_generator_lib"], +) diff --git a/sbom/tests/fixtures/baselibs_input.json b/sbom/tests/fixtures/baselibs_input.json new file mode 100644 index 0000000..5aa6360 --- /dev/null +++ b/sbom/tests/fixtures/baselibs_input.json @@ -0,0 +1,271 @@ +{ + "config": { + "component_name": "score_baselibs", + "component_version": "", + "generation_context": "build", + "namespace": "https://eclipse.dev/score", + "producer_name": "Eclipse Foundation", + "producer_url": "https://projects.eclipse.org/projects/automotive.score", + "sbom_authors": [ + "Eclipse SCORE Team" + ], + "sbom_tools": [] + }, + "dep_module_files": [], + "exclude_patterns": [ + "rules_rust", + "rules_cc", + "bazel_tools", + "platforms", + "bazel_skylib", + "rules_python", + "rules_proto", + "protobuf", + "local_config_", + "remote_" + ], + "external_dep_edges": [ + "score_baselibs+::score_baselibs+", + "boost.config+::boost.config+", + "boost.assert+::boost.config+", + "boost.assert+::boost.assert+", + "boost.mp11+::boost.mp11+", + "boost.describe+::boost.mp11+", + "boost.describe+::boost.describe+", + "boost.static_assert+::boost.config+", + "boost.static_assert+::boost.static_assert+", + "boost.type_traits+::boost.config+", + "boost.type_traits+::boost.static_assert+", + "boost.type_traits+::boost.type_traits+", + "boost.container_hash+::boost.config+", + "boost.container_hash+::boost.describe+", + "boost.container_hash+::boost.mp11+", + "boost.container_hash+::boost.type_traits+", + "boost.container_hash+::boost.container_hash+", + "boost.move+::boost.config+", + "boost.move+::boost.move+", + "boost.intrusive+::boost.assert+", + "boost.intrusive+::boost.config+", + "boost.intrusive+::boost.container_hash+", + "boost.intrusive+::boost.move+", + "boost.intrusive+::boost.static_assert+", + "boost.intrusive+::boost.intrusive+", + "boost.container+::boost.assert+", + "boost.container+::boost.config+", + "boost.container+::boost.intrusive+", + "boost.container+::boost.move+", + "boost.container+::boost.static_assert+", + "boost.container+::boost.container+", + "boost.throw_exception+::boost.assert+", + "boost.throw_exception+::boost.config+", + "boost.throw_exception+::boost.throw_exception+", + "boost.core+::boost.assert+", + "boost.core+::boost.config+", + "boost.core+::boost.static_assert+", + "boost.core+::boost.throw_exception+", + "boost.core+::boost.core+", + "boost.predef+::boost.predef+", + "boost.preprocessor+::boost.preprocessor+", + "boost.tuple+::boost.config+", + "boost.tuple+::boost.core+", + "boost.tuple+::boost.static_assert+", + "boost.tuple+::boost.type_traits+", + "boost.tuple+::boost.tuple+", + "boost.unordered+::boost.assert+", + "boost.unordered+::boost.config+", + "boost.unordered+::boost.container_hash+", + "boost.unordered+::boost.core+", + "boost.unordered+::boost.move+", + "boost.unordered+::boost.mp11+", + "boost.unordered+::boost.predef+", + "boost.unordered+::boost.preprocessor+", + "boost.unordered+::boost.static_assert+", + "boost.unordered+::boost.throw_exception+", + "boost.unordered+::boost.tuple+", + "boost.unordered+::boost.type_traits+", + "boost.unordered+::boost.unordered+", + "boost.winapi+::boost.config+", + "boost.winapi+::boost.predef+", + "boost.winapi+::boost.winapi+", + "boost.interprocess+::boost.assert+", + "boost.interprocess+::boost.config+", + "boost.interprocess+::boost.container+", + "boost.interprocess+::boost.core+", + "boost.interprocess+::boost.intrusive+", + "boost.interprocess+::boost.move+", + "boost.interprocess+::boost.static_assert+", + "boost.interprocess+::boost.type_traits+", + "boost.interprocess+::boost.unordered+", + "boost.interprocess+::boost.winapi+", + "boost.interprocess+::boost.interprocess+", + "score_baselibs+::boost.container+", + "score_baselibs+::boost.interprocess+", + "score_baselibs++_repo_rules+acl-deb::score_baselibs++_repo_rules+acl-deb", + "score_baselibs+::score_baselibs++_repo_rules+acl-deb" + ], + "external_repos": [ + "score_baselibs+", + "boost.config+", + "boost.assert+", + "boost.mp11+", + "boost.describe+", + "boost.static_assert+", + "boost.type_traits+", + "boost.container_hash+", + "boost.move+", + "boost.intrusive+", + "boost.container+", + "boost.throw_exception+", + "boost.core+", + "boost.predef+", + "boost.preprocessor+", + "boost.tuple+", + "boost.unordered+", + "boost.winapi+", + "boost.interprocess+", + "score_baselibs++_repo_rules+acl-deb" + ], + "module_lockfiles": [ + "__FIXTURE_REFINT_LOCK__" + ], + "target_labels": [ + "@@score_baselibs+//score/concurrency:concurrency", + "@@score_baselibs+//score/memory/shared:shared" + ], + "transitive_deps": [ + "@@score_baselibs+//score/concurrency:destruction_guard", + "@@score_baselibs+//score/language/futurecpp:math", + "@@score_baselibs+//score/language/futurecpp:futurecpp", + "@@score_baselibs+//score/concurrency:condition_variable", + "@@score_baselibs+//score/concurrency:interruptible_wait", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:allocator_wrapper", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:scope_state", + "@@score_baselibs+//score/language/safecpp/scoped_function:scope", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:type_erasure_pointer", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:allocator_aware_erased_type", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:allocator_aware_type_erasure_pointer", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:invoker", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:function_wrapper", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:is_callable_from", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:modify_return_type", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:modify_signature", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:call_and_return_modified", + "@@score_baselibs+//score/language/safecpp/scoped_function/details:scoped_function_invoker", + "@@score_baselibs+//score/language/safecpp/scoped_function:copyable_scoped_function", + "@@score_baselibs+//score/language/safecpp/scoped_function:move_only_scoped_function", + "@@score_baselibs+//score/result:error", + "@@score_baselibs+//score/result/details/expected:expected", + "@@score_baselibs+//score/result/details/expected:extensions", + "@@score_baselibs+//score/result:result", + "@@score_baselibs+//score/concurrency/future:future", + "@@score_baselibs+//score/concurrency:task_result_base", + "@@score_baselibs+//score/concurrency:shared_task_result", + "@@score_baselibs+//score/concurrency:task_result", + "@@score_baselibs+//score/concurrency:executor", + "@@score_baselibs+//score/concurrency:delayed_task", + "@@score_baselibs+//score/os/utils/interprocess:interprocess_mutex", + "@@score_baselibs+//score/os/utils/interprocess:interprocess_conditional_variable", + "@@score_baselibs+//score/concurrency:interruptible_interprocess_condition_variable", + "@@score_baselibs+//score/os:errno", + "@@score_baselibs+//score/os:object_seam", + "@@score_baselibs+//score/os:pthread_default", + "@@score_baselibs+//score/concurrency:long_running_threads_container", + "@@score_baselibs+//score/concurrency:notification", + "@@score_baselibs+//score/concurrency:periodic_task", + "@@score_baselibs+//score/concurrency:synchronized_queue", + "@@score_baselibs+//score/concurrency:thread_pool", + "@@score_baselibs+//score/concurrency/timed_executor:timed_task", + "@@score_baselibs+//score/concurrency/timed_executor:delayed_task", + "@@score_baselibs+//score/concurrency/timed_executor:periodic_task", + "@@score_baselibs+//score/concurrency/timed_executor:timed_executor", + "@@score_baselibs+//score/concurrency/timed_executor:concurrent_timed_executor", + "@@score_baselibs+//score/concurrency:concurrency", + "@@score_baselibs+//score/memory/shared:managed_memory_resource", + "@@score_baselibs+//score/memory/shared:memory_region_bounds", + "@@score_baselibs+//score/memory/shared:atomic_interface", + "@@score_baselibs+//score/memory/shared:atomic_indirector", + "@@score_baselibs+//score/mw/log:shared_types", + "@@score_baselibs+//score/mw/log:recorder", + "@@score_baselibs+//score/static_reflection_with_serialization/visitor:visitor", + "@@score_baselibs+//score/mw/log/detail:logging_identifier", + "@@score_baselibs+//score/mw/log/detail:thread_local_guard", + "@@score_baselibs+//score/mw/log:log_stream", + "@@score_baselibs+//score/mw/log/detail/wait_free_stack:wait_free_stack", + "@@score_baselibs+//score/mw/log:recorder_interface", + "@@score_baselibs+//score/mw/log/detail:circular_allocator", + "@@score_baselibs+//score/mw/log/detail:helper_functions", + "@@score_baselibs+//score/mw/log/detail:log_entry", + "@@score_baselibs+//score/mw/log/detail:log_data_types", + "@@score_baselibs+//score/mw/log/detail:types_and_errors", + "@@score_baselibs+//score/mw/log/detail:empty_recorder", + "@@score_baselibs+//score/mw/log:frontend", + "@@score_baselibs+//score/memory/shared:memory_region_map", + "@@score_baselibs+//score/language/safecpp/safe_math:error", + "@@score_baselibs+//score/language/safecpp/safe_math/details/type_traits:type_traits", + "@@score_baselibs+//score/language/safecpp/safe_math/details/comparison:comparison", + "@@score_baselibs+//score/language/safecpp/safe_math/details/absolute:absolute", + "@@score_baselibs+//score/language/safecpp/safe_math/details:floating_point_environment", + "@@score_baselibs+//score/language/safecpp/safe_math/details/cast:cast", + "@@score_baselibs+//score/language/safecpp/safe_math/details/negate:negate", + "@@score_baselibs+//score/language/safecpp/safe_math/details/addition_subtraction:addition_subtraction", + "@@score_baselibs+//score/language/safecpp/safe_math/details/division:division", + "@@score_baselibs+//score/language/safecpp/safe_math/details/multiplication:multiplication", + "@@score_baselibs+//score/language/safecpp/safe_math:safe_math", + "@@score_baselibs+//score/memory:data_type_size_info", + "@@score_baselibs+//score/memory/shared:pointer_arithmetic_util", + "@@score_baselibs+//score/memory/shared:shared_memory_error", + "@@score_baselibs+//score/memory/shared:memory_resource_registry", + "@@score_baselibs+//score/memory/shared:memory_resource_proxy", + "@@score_baselibs+//score/memory/shared:offset_ptr_bounds_check", + "@@score_baselibs+//score/quality/compiler_warnings:compiler_warnings", + "@@score_baselibs+//score/memory/shared:offset_ptr", + "@@score_baselibs+//score/memory/shared:polymorphic_offset_ptr_allocator", + "@@boost.config+//:boost.config", + "@@boost.assert+//:boost.assert", + "@@boost.mp11+//:boost.mp11", + "@@boost.describe+//:boost.describe", + "@@boost.static_assert+//:boost.static_assert", + "@@boost.type_traits+//:boost.type_traits", + "@@boost.container_hash+//:boost.container_hash", + "@@boost.move+//:boost.move", + "@@boost.intrusive+//:boost.intrusive", + "@@boost.container+//:boost.container", + "@@boost.throw_exception+//:boost.throw_exception", + "@@boost.core+//:boost.core", + "@@boost.predef+//:boost.predef", + "@@boost.preprocessor+//:boost.preprocessor", + "@@boost.tuple+//:boost.tuple", + "@@boost.unordered+//:boost.unordered", + "@@boost.winapi+//:boost.winapi", + "@@boost.interprocess+//:boost.interprocess", + "@@score_baselibs+//score/memory/shared:map", + "@@score_baselibs+//score/memory/shared:string", + "@@score_baselibs+//score/memory/shared:vector", + "@@score_baselibs+//score/memory/shared:types", + "@@score_baselibs++_repo_rules+acl-deb//:acl", + "@@score_baselibs+//score/os:acl", + "@@score_baselibs+//score/os/utils/acl:acl", + "@@score_baselibs+//score/memory/shared:user_permission", + "@@score_baselibs+//score/memory/shared:i_shared_memory_resource", + "@@score_baselibs+//score/os:errno_logging", + "@@score_baselibs+//score/memory/shared/typedshm/typedshm_wrapper:typedmemory", + "@@score_baselibs+//score/memory/shared:i_shared_memory_factory", + "@@score_baselibs+//score/bitmanipulation:bitmask_operators", + "@@score_baselibs+//score/os:stat", + "@@score_baselibs+//score/os:fcntl", + "@@score_baselibs+//score/os:static_destruction_guard", + "@@score_baselibs+//score/os:unistd", + "@@score_baselibs+//score/memory/shared:lock_file", + "@@score_baselibs+//score/memory/shared/sealedshm/sealedshm_wrapper:sealedshm", + "@@score_baselibs+//score/os:rt_default", + "@@score_baselibs+//score/os:mman", + "@@score_baselibs+//score/language/safecpp/string_view:null_termination_violation_policies", + "@@score_baselibs+//score/language/safecpp/string_view/details:zspan", + "@@score_baselibs+//score/language/safecpp/string_view:zstring_view", + "@@score_baselibs+//score/language/safecpp/string_view:null_termination_check", + "@@score_baselibs+//score/memory/shared:shared_memory_resource", + "@@score_baselibs+//score/memory/shared:shared_memory_factory_impl", + "@@score_baselibs+//score/memory/shared:shared_memory_factory", + "@@score_baselibs+//score/memory/shared:shared" + ] +} \ No newline at end of file diff --git a/sbom/tests/fixtures/crates_metadata.json b/sbom/tests/fixtures/crates_metadata.json new file mode 100755 index 0000000..8fbd99b --- /dev/null +++ b/sbom/tests/fixtures/crates_metadata.json @@ -0,0 +1,2306 @@ +{ + "adler32": { + "checksum": "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234", + "description": "Minimal Adler32 implementation for Rust.", + "license": "Zlib", + "purl": "pkg:cargo/adler32@1.2.0", + "supplier": "remram44", + "version": "1.2.0" + }, + "aho-corasick": { + "checksum": "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301", + "description": "Fast multiple substring searching.", + "license": "MIT OR Unlicense", + "purl": "pkg:cargo/aho-corasick@1.1.4", + "supplier": "BurntSushi", + "version": "1.1.4" + }, + "aliasable": { + "checksum": "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd", + "description": "Basic aliasable (non unique pointer) types", + "license": "MIT", + "purl": "pkg:cargo/aliasable@0.1.3", + "supplier": "avitex", + "version": "0.1.3" + }, + "anstream": { + "checksum": "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a", + "description": "IO stream adapters for writing colored text that will gracefully degrade according to your terminal's capabilities.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/anstream@0.6.21", + "supplier": "rust-cli", + "version": "0.6.21" + }, + "anstyle": { + "checksum": "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78", + "description": "ANSI text styling", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/anstyle@1.0.13", + "supplier": "rust-cli", + "version": "1.0.13" + }, + "anstyle-parse": { + "checksum": "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2", + "description": "Parse ANSI Style Escapes", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/anstyle-parse@0.2.7", + "supplier": "rust-cli", + "version": "0.2.7" + }, + "anstyle-query": { + "checksum": "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc", + "description": "Look up colored console capabilities", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/anstyle-query@1.1.5", + "supplier": "rust-cli", + "version": "1.1.5" + }, + "anstyle-wincon": { + "checksum": "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d", + "description": "Styling legacy Windows terminals", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/anstyle-wincon@3.0.11", + "supplier": "rust-cli", + "version": "3.0.11" + }, + "anyhow": { + "checksum": "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61", + "description": "Flexible concrete Error type built on std::error::Error", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/anyhow@1.0.100", + "supplier": "dtolnay", + "version": "1.0.100" + }, + "argh": { + "checksum": "34ff18325c8a36b82f992e533ece1ec9f9a9db446bd1c14d4f936bac88fcd240", + "description": "Derive-based argument parsing optimized for code size", + "license": "BSD-3-Clause", + "purl": "pkg:cargo/argh@0.1.13", + "supplier": "google", + "version": "0.1.13" + }, + "argh_derive": { + "checksum": "adb7b2b83a50d329d5d8ccc620f5c7064028828538bdf5646acd60dc1f767803", + "description": "Derive-based argument parsing optimized for code size", + "license": "BSD-3-Clause", + "purl": "pkg:cargo/argh_derive@0.1.13", + "supplier": "google", + "version": "0.1.13" + }, + "argh_shared": { + "checksum": "a464143cc82dedcdc3928737445362466b7674b5db4e2eb8e869846d6d84f4f6", + "description": "Derive-based argument parsing optimized for code size", + "license": "BSD-3-Clause", + "purl": "pkg:cargo/argh_shared@0.1.13", + "supplier": "google", + "version": "0.1.13" + }, + "arrayvec": { + "checksum": "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50", + "description": "A vector with fixed capacity, backed by an array (it can be stored on the stack too). Implements fixed capacity ArrayVec and ArrayString.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/arrayvec@0.7.6", + "supplier": "bluss", + "version": "0.7.6" + }, + "async-stream": { + "checksum": "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476", + "description": "Asynchronous streams using async & await notation", + "license": "MIT", + "purl": "pkg:cargo/async-stream@0.3.6", + "supplier": "tokio-rs", + "version": "0.3.6" + }, + "async-stream-impl": { + "checksum": "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d", + "description": "proc macros for async-stream crate", + "license": "MIT", + "purl": "pkg:cargo/async-stream-impl@0.3.6", + "supplier": "tokio-rs", + "version": "0.3.6" + }, + "async-trait": { + "checksum": "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb", + "description": "Type erasure for async trait methods", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/async-trait@0.1.89", + "supplier": "dtolnay", + "version": "0.1.89" + }, + "atomic-polyfill": { + "checksum": "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4", + "description": "Atomic polyfills, for targets where they're not available.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/atomic-polyfill@1.0.3", + "supplier": "embassy-rs", + "version": "1.0.3" + }, + "atomic-waker": { + "checksum": "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0", + "description": "A synchronization primitive for task wakeup", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/atomic-waker@1.1.2", + "supplier": "smol-rs", + "version": "1.1.2" + }, + "axum": { + "checksum": "5b098575ebe77cb6d14fc7f32749631a6e44edbef6b796f89b020e99ba20d425", + "description": "Web framework that focuses on ergonomics and modularity", + "license": "MIT", + "purl": "pkg:cargo/axum@0.8.7", + "supplier": "tokio-rs", + "version": "0.8.7" + }, + "axum-core": { + "checksum": "59446ce19cd142f8833f856eb31f3eb097812d1479ab224f54d72428ca21ea22", + "description": "Core types and traits for axum", + "license": "MIT", + "purl": "pkg:cargo/axum-core@0.5.5", + "supplier": "tokio-rs", + "version": "0.5.5" + }, + "base64": { + "checksum": "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6", + "description": "encodes and decodes base64 as bytes or utf8", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/base64@0.22.1", + "supplier": "marshallpierce", + "version": "0.22.1" + }, + "bincode": { + "checksum": "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad", + "description": "A binary serialization / deserialization strategy for transforming structs into bytes and vice versa!", + "license": "MIT", + "purl": "pkg:cargo/bincode@1.3.3", + "supplier": "bincode-org", + "version": "1.3.3" + }, + "bindgen": { + "checksum": "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895", + "description": "Automatically generates Rust FFI bindings to C and C++ libraries.", + "license": "BSD-3-Clause", + "purl": "pkg:cargo/bindgen@0.72.1", + "supplier": "rust-lang", + "version": "0.72.1" + }, + "bitflags": { + "checksum": "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3", + "description": "A macro to generate structures which behave like bitflags.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/bitflags@2.10.0", + "supplier": "bitflags", + "version": "2.10.0" + }, + "block2": { + "checksum": "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5", + "description": "Apple's C language extension of blocks", + "license": "MIT", + "purl": "pkg:cargo/block2@0.6.2", + "supplier": "madsmtm", + "version": "0.6.2" + }, + "bumpalo": { + "checksum": "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43", + "description": "A fast bump allocation arena for Rust.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/bumpalo@3.19.0", + "supplier": "fitzgen", + "version": "3.19.0" + }, + "byteorder": { + "checksum": "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b", + "description": "Library for reading/writing numbers in big-endian and little-endian.", + "license": "Unlicense OR MIT", + "purl": "pkg:cargo/byteorder@1.5.0", + "supplier": "BurntSushi", + "version": "1.5.0" + }, + "bytes": { + "checksum": "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3", + "description": "Types and traits for working with bytes", + "license": "MIT", + "purl": "pkg:cargo/bytes@1.11.0", + "supplier": "tokio-rs", + "version": "1.11.0" + }, + "cc": { + "checksum": "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215", + "description": "A build-time dependency for Cargo build scripts to assist in invoking the native\nC compiler to compile native C code into a static archive to be linked into Rust\ncode.", + "license": "", + "purl": "pkg:cargo/cc@1.2.49", + "supplier": "rust-lang", + "version": "1.2.49" + }, + "cdr": { + "checksum": "9617422bf43fde9280707a7e90f8f7494389c182f5c70b0f67592d0f06d41dfa", + "description": "A serialization/deserialization implementation of Common Data Representation", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/cdr@0.2.4", + "supplier": "hrektts", + "version": "0.2.4" + }, + "cexpr": { + "checksum": "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766", + "description": "A C expression parser and evaluator", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/cexpr@0.6.0", + "supplier": "jethrogb", + "version": "0.6.0" + }, + "cfg-if": { + "checksum": "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801", + "description": "A macro to ergonomically define an item depending on a large number of #[cfg]\nparameters. Structured like an if-else chain, the first matching branch is the\nitem that gets emitted.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/cfg-if@1.0.4", + "supplier": "rust-lang", + "version": "1.0.4" + }, + "cfg_aliases": { + "checksum": "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724", + "description": "A tiny utility to help save you a lot of effort with long winded `#[cfg()]` checks.", + "license": "MIT", + "purl": "pkg:cargo/cfg_aliases@0.2.1", + "supplier": "katharostech", + "version": "0.2.1" + }, + "clang-sys": { + "checksum": "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4", + "description": "Rust bindings for libclang.", + "license": "Apache-2.0", + "purl": "pkg:cargo/clang-sys@1.8.1", + "supplier": "KyleMayes", + "version": "1.8.1" + }, + "clap": { + "checksum": "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8", + "description": "A simple to use, efficient, and full-featured Command Line Argument Parser", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/clap@4.5.53", + "supplier": "clap-rs", + "version": "4.5.53" + }, + "clap_builder": { + "checksum": "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00", + "description": "A simple to use, efficient, and full-featured Command Line Argument Parser", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/clap_builder@4.5.53", + "supplier": "clap-rs", + "version": "4.5.53" + }, + "clap_derive": { + "checksum": "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671", + "description": "Parse command line argument by defining a struct, derive crate.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT AND (Apache-2.0 AND MIT AND MIT-0)", + "purl": "pkg:cargo/clap_derive@4.5.49", + "supplier": "clap-rs", + "version": "4.5.49" + }, + "clap_lex": { + "checksum": "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d", + "description": "Minimal, flexible command line parser", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/clap_lex@0.7.6", + "supplier": "clap-rs", + "version": "0.7.6" + }, + "cobs": { + "checksum": "0fa961b519f0b462e3a3b4a34b64d119eeaca1d59af726fe450bbba07a9fc0a1", + "description": "This is an implementation of the Consistent Overhead Byte Stuffing (COBS) algorithm.\n COBS is an algorithm for transforming a message into an encoding where a specific value\n (the \"sentinel\" value) is not used. This value can then be used to mark frame boundaries\n in a serial communication channel.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/cobs@0.3.0", + "supplier": "jamesmunns", + "version": "0.3.0" + }, + "colorchoice": { + "checksum": "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75", + "description": "Global override of color control", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/colorchoice@1.0.4", + "supplier": "rust-cli", + "version": "1.0.4" + }, + "console": { + "checksum": "b430743a6eb14e9764d4260d4c0d8123087d504eeb9c48f2b2a5e810dd369df4", + "description": "A terminal and console abstraction for Rust", + "license": "MIT", + "purl": "pkg:cargo/console@0.16.1", + "supplier": "console-rs", + "version": "0.16.1" + }, + "critical-section": { + "checksum": "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b", + "description": "Cross-platform critical section", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/critical-section@1.2.0", + "supplier": "rust-embedded", + "version": "1.2.0" + }, + "crossbeam-channel": { + "checksum": "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2", + "description": "Multi-producer multi-consumer channels for message passing", + "license": "(Apache-2.0 or MIT) AND BSD-3-Clause AND CC-BY-3.0", + "purl": "pkg:cargo/crossbeam-channel@0.5.15", + "supplier": "crossbeam-rs", + "version": "0.5.15" + }, + "crossbeam-utils": { + "checksum": "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28", + "description": "Utilities for concurrent programming", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/crossbeam-utils@0.8.21", + "supplier": "crossbeam-rs", + "version": "0.8.21" + }, + "ctrlc": { + "checksum": "73736a89c4aff73035ba2ed2e565061954da00d4970fc9ac25dcc85a2a20d790", + "description": "Easy Ctrl-C handler for Rust projects", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/ctrlc@3.5.1", + "supplier": "Detegr", + "version": "3.5.1" + }, + "deranged": { + "checksum": "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587", + "description": "Ranged integers", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/deranged@0.5.5", + "supplier": "jhpratt", + "version": "0.5.5" + }, + "dispatch2": { + "checksum": "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec", + "description": "Bindings and wrappers for Apple's Grand Central Dispatch (GCD)", + "license": "Zlib OR Apache-2.0 OR MIT", + "purl": "pkg:cargo/dispatch2@0.3.0", + "supplier": "madsmtm", + "version": "0.3.0" + }, + "either": { + "checksum": "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719", + "description": "The enum `Either` with variants `Left` and `Right` is a general purpose sum type with two cases.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/either@1.15.0", + "supplier": "rayon-rs", + "version": "1.15.0" + }, + "embedded-io": { + "checksum": "edd0f118536f44f5ccd48bcb8b111bdc3de888b58c74639dfb034a357d0f206d", + "description": "Embedded IO traits", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/embedded-io@0.6.1", + "supplier": "rust-embedded", + "version": "0.6.1" + }, + "encode_unicode": { + "checksum": "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0", + "description": "UTF-8 and UTF-16 character types, iterators and related methods for char, u8 and u16.", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/encode_unicode@1.0.0", + "supplier": "tormol", + "version": "1.0.0" + }, + "enum-iterator": { + "checksum": "a4549325971814bda7a44061bf3fe7e487d447cba01e4220a4b454d630d7a016", + "description": "Tools to iterate over all values of a type (e.g. all variants of an enumeration)", + "license": "0BSD", + "purl": "pkg:cargo/enum-iterator@2.3.0", + "supplier": "stephaneyfx", + "version": "2.3.0" + }, + "enum-iterator-derive": { + "checksum": "685adfa4d6f3d765a26bc5dbc936577de9abf756c1feeb3089b01dd395034842", + "description": "Procedural macro to derive Sequence", + "license": "0BSD", + "purl": "pkg:cargo/enum-iterator-derive@1.5.0", + "supplier": "stephaneyfx", + "version": "1.5.0" + }, + "env_filter": { + "checksum": "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2", + "description": "Filter log events using environment variables", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/env_filter@0.1.4", + "supplier": "rust-cli", + "version": "0.1.4" + }, + "env_logger": { + "checksum": "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f", + "description": "A logging implementation for `log` which is configured via an environment\nvariable.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/env_logger@0.11.8", + "supplier": "rust-cli", + "version": "0.11.8" + }, + "equivalent": { + "checksum": "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f", + "description": "Traits for key comparison in maps.", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/equivalent@1.0.2", + "supplier": "indexmap-rs", + "version": "1.0.2" + }, + "errno": { + "checksum": "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb", + "description": "Cross-platform interface to the `errno` variable.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/errno@0.3.14", + "supplier": "lambda-fairy", + "version": "0.3.14" + }, + "fastrand": { + "checksum": "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be", + "description": "A simple and fast random number generator", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/fastrand@2.3.0", + "supplier": "smol-rs", + "version": "2.3.0" + }, + "filedesc": { + "checksum": "c4960c866d9cf4c48f64f11ef0020c98900d0cb32e9b019c000ed838470daa1d", + "description": "thin wrapper around raw file descriptors", + "license": "BSD-2-Clause", + "purl": "pkg:cargo/filedesc@0.6.3", + "supplier": "de-vri-es", + "version": "0.6.3" + }, + "find-msvc-tools": { + "checksum": "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844", + "description": "Find windows-specific tools, read MSVC versions from the registry and from COM interfaces", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/find-msvc-tools@0.1.5", + "supplier": "rust-lang", + "version": "0.1.5" + }, + "fixedbitset": { + "checksum": "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99", + "description": "FixedBitSet is a simple bitset collection", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/fixedbitset@0.5.7", + "supplier": "petgraph", + "version": "0.5.7" + }, + "fnv": { + "checksum": "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1", + "description": "Fowler\u2013Noll\u2013Vo hash function", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/fnv@1.0.7", + "supplier": "servo", + "version": "1.0.7" + }, + "foldhash": { + "checksum": "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2", + "description": "A fast, non-cryptographic, minimally DoS-resistant hashing algorithm.", + "license": "Zlib", + "purl": "pkg:cargo/foldhash@0.1.5", + "supplier": "orlp", + "version": "0.1.5" + }, + "futures": { + "checksum": "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876", + "description": "An implementation of futures and streams featuring zero allocations,\ncomposability, and iterator-like interfaces.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/futures@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "futures-channel": { + "checksum": "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10", + "description": "Channels for asynchronous communication using futures-rs.", + "license": "(Apache-2.0 or MIT) AND BSD-2-Clause-Views", + "purl": "pkg:cargo/futures-channel@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "futures-core": { + "checksum": "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e", + "description": "The core traits and types in for the `futures` library.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/futures-core@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "futures-executor": { + "checksum": "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f", + "description": "Executors for asynchronous tasks based on the futures-rs library.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/futures-executor@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "futures-io": { + "checksum": "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6", + "description": "The `AsyncRead`, `AsyncWrite`, `AsyncSeek`, and `AsyncBufRead` traits for the futures-rs library.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/futures-io@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "futures-macro": { + "checksum": "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650", + "description": "The futures-rs procedural macro implementations.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/futures-macro@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "futures-sink": { + "checksum": "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7", + "description": "The asynchronous `Sink` trait for the futures-rs library.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/futures-sink@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "futures-task": { + "checksum": "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988", + "description": "Tools for working with tasks.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/futures-task@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "futures-util": { + "checksum": "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81", + "description": "Common utilities and extension traits for the futures-rs library.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/futures-util@0.3.31", + "supplier": "rust-lang", + "version": "0.3.31" + }, + "getrandom": { + "checksum": "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd", + "description": "A small cross-platform library for retrieving random data from system source", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/getrandom@0.3.4", + "supplier": "rust-random", + "version": "0.3.4" + }, + "glob": { + "checksum": "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280", + "description": "Support for matching file paths against Unix shell style patterns.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/glob@0.3.3", + "supplier": "rust-lang", + "version": "0.3.3" + }, + "h2": { + "checksum": "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386", + "description": "An HTTP/2 client and server", + "license": "MIT", + "purl": "pkg:cargo/h2@0.4.12", + "supplier": "hyperium", + "version": "0.4.12" + }, + "hash32": { + "checksum": "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67", + "description": "32-bit hashing algorithms", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/hash32@0.2.1", + "supplier": "rust-embedded-community", + "version": "0.2.1" + }, + "hashbrown": { + "checksum": "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100", + "description": "A Rust port of Google's SwissTable hash map", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/hashbrown@0.16.1", + "supplier": "rust-lang", + "version": "0.16.1" + }, + "heapless": { + "checksum": "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f", + "description": "`static` friendly data structures that don't require dynamic memory allocation", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/heapless@0.7.17", + "supplier": "rust-embedded", + "version": "0.7.17" + }, + "heck": { + "checksum": "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea", + "description": "heck is a case conversion library.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/heck@0.5.0", + "supplier": "withoutboats", + "version": "0.5.0" + }, + "home": { + "checksum": "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d", + "description": "Shared definitions of home directories.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/home@0.5.12", + "supplier": "rust-lang", + "version": "0.5.12" + }, + "http": { + "checksum": "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a", + "description": "A set of types for representing HTTP requests and responses.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/http@1.4.0", + "supplier": "hyperium", + "version": "1.4.0" + }, + "http-body": { + "checksum": "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184", + "description": "Trait representing an asynchronous, streaming, HTTP request or response body.", + "license": "MIT", + "purl": "pkg:cargo/http-body@1.0.1", + "supplier": "hyperium", + "version": "1.0.1" + }, + "http-body-util": { + "checksum": "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a", + "description": "Combinators and adapters for HTTP request or response bodies.", + "license": "MIT", + "purl": "pkg:cargo/http-body-util@0.1.3", + "supplier": "hyperium", + "version": "0.1.3" + }, + "httparse": { + "checksum": "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87", + "description": "A tiny, safe, speedy, zero-copy HTTP/1.x parser.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/httparse@1.10.1", + "supplier": "seanmonstar", + "version": "1.10.1" + }, + "httpdate": { + "checksum": "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9", + "description": "HTTP date parsing and formatting", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/httpdate@1.0.3", + "supplier": "pyfisch", + "version": "1.0.3" + }, + "human_bytes": { + "checksum": "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e", + "description": "Crate to convert bytes into human-readable values", + "license": "BSD-2-Clause", + "purl": "pkg:cargo/human_bytes@0.4.3", + "supplier": "", + "version": "0.4.3" + }, + "hyper": { + "checksum": "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11", + "description": "A protective and efficient HTTP library for all.", + "license": "MIT", + "purl": "pkg:cargo/hyper@1.8.1", + "supplier": "hyperium", + "version": "1.8.1" + }, + "hyper-timeout": { + "checksum": "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0", + "description": "A connect, read and write timeout aware connector to be used with hyper Client.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/hyper-timeout@0.5.2", + "supplier": "hjr3", + "version": "0.5.2" + }, + "hyper-util": { + "checksum": "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f", + "description": "hyper utilities", + "license": "MIT", + "purl": "pkg:cargo/hyper-util@0.1.19", + "supplier": "hyperium", + "version": "0.1.19" + }, + "iceoryx2": { + "checksum": "12c091b5786a230240f8f2691b9d29508ed0a02c025c254abaed8bc788a66bb3", + "description": "iceoryx2: Lock-Free Zero-Copy Interprocess Communication", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT) AND Apache-2.0", + "purl": "pkg:cargo/iceoryx2@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-container": { + "checksum": "fff9d75921f8ccd3e19261d72082b52ccc4704dcdd6930efe6c17b15693ab519", + "description": "iceoryx2: IPC shared memory compatible containers", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-bb-container@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-container-qnx8": { + "checksum": "", + "description": "iceoryx2: IPC shared memory compatible containers", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-container-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-derive-macros": { + "checksum": "9257482fb822946bb3028225b6807350e3f815068961ff5bf683735ce01d59a4", + "description": "iceoryx2: [internal] helper derive proc-macros", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT) AND Apache-2.0", + "purl": "pkg:cargo/iceoryx2-bb-derive-macros@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-derive-macros-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] helper derive proc-macros", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-derive-macros-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-elementary": { + "checksum": "38aae0237ff1575a7d9672c0202e5d313e7f674a635f6aaf619d2090fb7a12c2", + "description": "iceoryx2: [internal] elementary constructs that have (almost) no dependencies at all", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-bb-elementary@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-elementary-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] elementary constructs that have (almost) no dependencies at all", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-elementary-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-elementary-traits-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] elementary traits that have (almost) no dependencies at all", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-elementary-traits-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-linux-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] high level safe abstractions for low level unsafe linux constructs", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-linux-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-lock-free": { + "checksum": "3ec3227a0c8b1d9e1ea4c61fa6a773e7c3b721fa3e2fd4363324f302b35ac85b", + "description": "iceoryx2: [internal] lock-free containers and constructs", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-bb-lock-free@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-lock-free-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] lock-free containers and constructs", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-lock-free-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-log": { + "checksum": "07df5e6ff06cc2ffb0a86b67c7cd4be86b11264d5e83c02a52b384e2d5e6363a", + "description": "iceoryx2: [internal] the logger", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-bb-log@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-log-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] the logger", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-log-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-memory": { + "checksum": "55a4ae8856404b6e7eca567004673ca39109fc70fcf387b96ca6d6d27c61f31b", + "description": "iceoryx2: [internal] allocators and tools for memory management", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-bb-memory@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-memory-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] allocators and tools for memory management", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-memory-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-posix": { + "checksum": "122ff88c452a3045a9de6db73d0429da736578f76eab75779dd0d3681de75d57", + "description": "iceoryx2: [internal] high level safe abstractions for low level unsafe posix constructs", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-bb-posix@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-posix-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] high level safe abstractions for low level unsafe posix constructs", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-posix-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-system-types": { + "checksum": "ac2fb11a94823c3b9117a5e14edf16fd467794cf5391738eb2d8b7fc8e9b04fd", + "description": "iceoryx2: [internal] strong types that represents low level constructs like file, path, ip address, etc...", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-bb-system-types@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-bb-system-types-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] strong types that represents low level constructs like file, path, ip address, etc...", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-system-types-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-testing-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] simple testing primitives", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-testing-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-bb-threadsafe-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] threadsafe non-lock-free containers and constructs", + "license": "", + "purl": "pkg:cargo/iceoryx2-bb-threadsafe-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-cal": { + "checksum": "6c64b8eee1d57c4336d7df7d521671d8b76c72cac16af0db651e40f8c45f0946", + "description": "iceoryx2: [internal] high-level traits and implementations that represents OS primitives in an exchangeable fashion", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-cal@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-cal-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] high-level traits and implementations that represents OS primitives in an exchangeable fashion", + "license": "", + "purl": "pkg:cargo/iceoryx2-cal-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-pal-concurrency-sync": { + "checksum": "92dc4ec7c023819c41b5a14c9ca58e8dbb86476053321decda73ac440996f35f", + "description": "iceoryx2: [internal] low-level basic building blocks to implement posix mutex, condition variables etc.", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-pal-concurrency-sync@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-pal-concurrency-sync-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] low-level basic building blocks to implement posix mutex, condition variables etc.", + "license": "", + "purl": "pkg:cargo/iceoryx2-pal-concurrency-sync-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-pal-configuration": { + "checksum": "d900aa8f9c5b661a9c5ddfbdc28eb87ac8c32be5375ca9b055b087d1c89c984d", + "description": "iceoryx2: [internal] platform specific settings", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-pal-configuration@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-pal-configuration-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] platform specific settings", + "license": "", + "purl": "pkg:cargo/iceoryx2-pal-configuration-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-pal-os-api-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] OS api layer for OS specific features", + "license": "", + "purl": "pkg:cargo/iceoryx2-pal-os-api-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-pal-posix": { + "checksum": "5789169791a6274b492561b95543e1c635285cb454d547f78e53cbc1acd9b3f5", + "description": "iceoryx2: [internal] A posix layer that ensures uniform posix behavior on all platforms according to https://posix.opengroup.org", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/iceoryx2-pal-posix@0.5.0", + "supplier": "eclipse-iceoryx", + "version": "0.5.0" + }, + "iceoryx2-pal-posix-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] A posix layer that ensures uniform posix behavior on all platforms according to https://posix.opengroup.org", + "license": "", + "purl": "pkg:cargo/iceoryx2-pal-posix-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-pal-testing-qnx8": { + "checksum": "", + "description": "iceoryx2: [internal] simple testing primitives", + "license": "", + "purl": "pkg:cargo/iceoryx2-pal-testing-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "iceoryx2-qnx8": { + "checksum": "", + "description": "iceoryx2: Lock-Free Zero-Copy Interprocess Communication", + "license": "", + "purl": "pkg:cargo/iceoryx2-qnx8@0.7.0", + "supplier": "eclipse-iceoryx", + "version": "0.7.0" + }, + "indexmap": { + "checksum": "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2", + "description": "A hash table with consistent order and fast iteration.", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/indexmap@2.12.1", + "supplier": "indexmap-rs", + "version": "2.12.1" + }, + "indicatif": { + "checksum": "9375e112e4b463ec1b1c6c011953545c65a30164fbab5b581df32b3abf0dcb88", + "description": "A progress bar and cli reporting library for Rust", + "license": "MIT", + "purl": "pkg:cargo/indicatif@0.18.3", + "supplier": "console-rs", + "version": "0.18.3" + }, + "indicatif-log-bridge": { + "checksum": "63703cf9069b85dbe6fe26e1c5230d013dee99d3559cd3d02ba39e099ef7ab02", + "description": "Bridge the log crate and indicatif to stop log lines from mixing up with progress bars", + "license": "", + "purl": "pkg:cargo/indicatif-log-bridge@0.2.3", + "supplier": "djugei", + "version": "0.2.3" + }, + "ipc-channel": { + "checksum": "f93600b5616c2d075f8af8dbd23c1d69278c5d24e4913d220cbc60b14c95c180", + "description": "A multiprocess drop-in replacement for Rust channels", + "license": "", + "purl": "pkg:cargo/ipc-channel@0.20.2", + "supplier": "servo", + "version": "0.20.2" + }, + "is_terminal_polyfill": { + "checksum": "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695", + "description": "Polyfill for `is_terminal` stdlib feature for use with older MSRVs", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/is_terminal_polyfill@1.70.2", + "supplier": "polyfill-rs", + "version": "1.70.2" + }, + "itertools": { + "checksum": "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285", + "description": "Extra iterator adaptors, iterator methods, free functions, and macros.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/itertools@0.14.0", + "supplier": "rust-itertools", + "version": "0.14.0" + }, + "itoa": { + "checksum": "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c", + "description": "Fast integer primitive to string conversion", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/itoa@1.0.15", + "supplier": "dtolnay", + "version": "1.0.15" + }, + "jiff": { + "checksum": "49cce2b81f2098e7e3efc35bc2e0a6b7abec9d34128283d7a26fa8f32a6dbb35", + "description": "A date-time library that encourages you to jump into the pit of success.\n\nThis library is heavily inspired by the Temporal project.", + "license": "MIT AND Unlicense AND MIT AND Unlicense", + "purl": "pkg:cargo/jiff@0.2.16", + "supplier": "BurntSushi", + "version": "0.2.16" + }, + "jiff-static": { + "checksum": "980af8b43c3ad5d8d349ace167ec8170839f753a42d233ba19e08afe1850fa69", + "description": "Create static TimeZone values for Jiff (useful in core-only environments).", + "license": "MIT AND Unlicense AND MIT AND Unlicense", + "purl": "pkg:cargo/jiff-static@0.2.16", + "supplier": "BurntSushi", + "version": "0.2.16" + }, + "js-sys": { + "checksum": "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8", + "description": "Bindings for all JS global objects and functions in all JS environments like\nNode.js and browsers, built on `#[wasm_bindgen]` using the `wasm-bindgen` crate.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/js-sys@0.3.83", + "supplier": "wasm-bindgen", + "version": "0.3.83" + }, + "lazy_static": { + "checksum": "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe", + "description": "A macro for declaring lazily evaluated statics in Rust.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/lazy_static@1.5.0", + "supplier": "rust-lang-nursery", + "version": "1.5.0" + }, + "lazycell": { + "checksum": "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55", + "description": "A library providing a lazily filled Cell struct", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/lazycell@1.3.0", + "supplier": "indiv0", + "version": "1.3.0" + }, + "libc": { + "checksum": "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091", + "description": "Raw FFI bindings to platform libraries like libc.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/libc@0.2.178", + "supplier": "rust-lang", + "version": "0.2.178" + }, + "libloading": { + "checksum": "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55", + "description": "Bindings around the platform's dynamic library loading primitives with greatly improved memory safety.", + "license": "ISC", + "purl": "pkg:cargo/libloading@0.8.9", + "supplier": "nagisa", + "version": "0.8.9" + }, + "linux-raw-sys": { + "checksum": "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039", + "description": "Generated bindings for Linux's userspace API", + "license": "Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT", + "purl": "pkg:cargo/linux-raw-sys@0.11.0", + "supplier": "sunfishcode", + "version": "0.11.0" + }, + "lock_api": { + "checksum": "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965", + "description": "Wrappers to create fully-featured Mutex and RwLock types. Compatible with no_std.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/lock_api@0.4.14", + "supplier": "Amanieu", + "version": "0.4.14" + }, + "log": { + "checksum": "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897", + "description": "A lightweight logging facade for Rust", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/log@0.4.29", + "supplier": "rust-lang", + "version": "0.4.29" + }, + "matchit": { + "checksum": "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3", + "description": "A high performance, zero-copy URL router.", + "license": "MIT AND BSD-3-Clause", + "purl": "pkg:cargo/matchit@0.8.4", + "supplier": "ibraheemdev", + "version": "0.8.4" + }, + "memchr": { + "checksum": "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273", + "description": "Provides extremely fast (uses SIMD on x86_64, aarch64 and wasm32) routines for\n1, 2 or 3 byte search and single substring search.", + "license": "Unlicense AND (MIT AND Unlicense) AND MIT", + "purl": "pkg:cargo/memchr@2.7.6", + "supplier": "BurntSushi", + "version": "2.7.6" + }, + "mime": { + "checksum": "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a", + "description": "Strongly Typed Mimes", + "license": "Apache-2.0 AND MIT", + "purl": "pkg:cargo/mime@0.3.17", + "supplier": "hyperium", + "version": "0.3.17" + }, + "minimal-lexical": { + "checksum": "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a", + "description": "Fast float parsing conversion routines.", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/minimal-lexical@0.2.1", + "supplier": "Alexhuszagh", + "version": "0.2.1" + }, + "mio": { + "checksum": "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc", + "description": "Lightweight non-blocking I/O.", + "license": "MIT", + "purl": "pkg:cargo/mio@1.1.1", + "supplier": "tokio-rs", + "version": "1.1.1" + }, + "multimap": { + "checksum": "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084", + "description": "A multimap implementation.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/multimap@0.10.1", + "supplier": "havarnov", + "version": "0.10.1" + }, + "nix": { + "checksum": "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6", + "description": "Rust friendly bindings to *nix APIs", + "license": "MIT", + "purl": "pkg:cargo/nix@0.30.1", + "supplier": "nix-rust", + "version": "0.30.1" + }, + "nom": { + "checksum": "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a", + "description": "A byte-oriented, zero-copy, parser combinators library", + "license": "MIT AND CC0-1.0", + "purl": "pkg:cargo/nom@7.1.3", + "supplier": "rust-bakery", + "version": "7.1.3" + }, + "nu-ansi-term": { + "checksum": "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5", + "description": "Library for ANSI terminal colors and styles (bold, underline)", + "license": "MIT", + "purl": "pkg:cargo/nu-ansi-term@0.50.3", + "supplier": "nushell", + "version": "0.50.3" + }, + "num-conv": { + "checksum": "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9", + "description": "`num_conv` is a crate to convert between integer types without using `as` casts. This provides\nbetter certainty when refactoring, makes the exact behavior of code more explicit, and allows using\nturbofish syntax.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/num-conv@0.1.0", + "supplier": "jhpratt", + "version": "0.1.0" + }, + "objc2": { + "checksum": "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05", + "description": "Objective-C interface and runtime bindings", + "license": "MIT", + "purl": "pkg:cargo/objc2@0.6.3", + "supplier": "madsmtm", + "version": "0.6.3" + }, + "objc2-encode": { + "checksum": "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33", + "description": "Objective-C type-encoding representation and parsing", + "license": "MIT", + "purl": "pkg:cargo/objc2-encode@4.1.0", + "supplier": "madsmtm", + "version": "4.1.0" + }, + "once_cell": { + "checksum": "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d", + "description": "Single assignment cells and lazy values.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/once_cell@1.21.3", + "supplier": "matklad", + "version": "1.21.3" + }, + "once_cell_polyfill": { + "checksum": "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe", + "description": "Polyfill for `OnceCell` stdlib feature for use with older MSRVs", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/once_cell_polyfill@1.70.2", + "supplier": "polyfill-rs", + "version": "1.70.2" + }, + "ouroboros": { + "checksum": "1e0f050db9c44b97a94723127e6be766ac5c340c48f2c4bb3ffa11713744be59", + "description": "Easy, safe self-referential struct generation.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/ouroboros@0.18.5", + "supplier": "someguynamedjosh", + "version": "0.18.5" + }, + "ouroboros_macro": { + "checksum": "3c7028bdd3d43083f6d8d4d5187680d0d3560d54df4cc9d752005268b41e64d0", + "description": "Proc macro for ouroboros crate.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/ouroboros_macro@0.18.5", + "supplier": "someguynamedjosh", + "version": "0.18.5" + }, + "paste": { + "checksum": "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a", + "description": "Macros for all your token pasting needs", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/paste@1.0.15", + "supplier": "dtolnay", + "version": "1.0.15" + }, + "percent-encoding": { + "checksum": "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220", + "description": "Percent encoding and decoding", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/percent-encoding@2.3.2", + "supplier": "servo", + "version": "2.3.2" + }, + "petgraph": { + "checksum": "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772", + "description": "Graph data structure library. Provides graph types and graph algorithms.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/petgraph@0.7.1", + "supplier": "petgraph", + "version": "0.7.1" + }, + "pico-args": { + "checksum": "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315", + "description": "An ultra simple CLI arguments parser.", + "license": "MIT", + "purl": "pkg:cargo/pico-args@0.5.0", + "supplier": "RazrFalcon", + "version": "0.5.0" + }, + "pin-project": { + "checksum": "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a", + "description": "A crate for safe and ergonomic pin-projection.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT AND (Apache-2.0 OR MIT)", + "purl": "pkg:cargo/pin-project@1.1.10", + "supplier": "taiki-e", + "version": "1.1.10" + }, + "pin-project-internal": { + "checksum": "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861", + "description": "Implementation detail of the `pin-project` crate.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT AND (Apache-2.0 OR MIT)", + "purl": "pkg:cargo/pin-project-internal@1.1.10", + "supplier": "taiki-e", + "version": "1.1.10" + }, + "pin-project-lite": { + "checksum": "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b", + "description": "A lightweight version of pin-project written with declarative macros.", + "license": "Apache-2.0 or MIT", + "purl": "pkg:cargo/pin-project-lite@0.2.16", + "supplier": "taiki-e", + "version": "0.2.16" + }, + "pin-utils": { + "checksum": "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184", + "description": "Utilities for pinning", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/pin-utils@0.1.0", + "supplier": "rust-lang-nursery", + "version": "0.1.0" + }, + "portable-atomic": { + "checksum": "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483", + "description": "Portable atomic types including support for 128-bit atomics, atomic float, etc.", + "license": "Apache-2.0 OR MIT AND (Apache-2.0 AND MIT) AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/portable-atomic@1.11.1", + "supplier": "taiki-e", + "version": "1.11.1" + }, + "portable-atomic-util": { + "checksum": "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507", + "description": "Synchronization primitives built with portable-atomic.", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/portable-atomic-util@0.2.4", + "supplier": "taiki-e", + "version": "0.2.4" + }, + "postcard": { + "checksum": "6764c3b5dd454e283a30e6dfe78e9b31096d9e32036b5d1eaac7a6119ccb9a24", + "description": "A no_std + serde compatible message library for Rust", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/postcard@1.1.3", + "supplier": "jamesmunns", + "version": "1.1.3" + }, + "postcard-derive": { + "checksum": "e0232bd009a197ceec9cc881ba46f727fcd8060a2d8d6a9dde7a69030a6fe2bb", + "description": "A no_std + serde compatible message library for Rust - Derive Crate", + "license": "", + "purl": "pkg:cargo/postcard-derive@0.2.2", + "supplier": "jamesmunns", + "version": "0.2.2" + }, + "powerfmt": { + "checksum": "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391", + "description": "`powerfmt` is a library that provides utilities for formatting values. This crate makes it\n significantly easier to support filling to a minimum width with alignment, avoid heap\n allocation, and avoid repetitive calculations.", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/powerfmt@0.2.0", + "supplier": "jhpratt", + "version": "0.2.0" + }, + "ppv-lite86": { + "checksum": "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9", + "description": "Cross-platform cryptography-oriented low-level SIMD library.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/ppv-lite86@0.2.21", + "supplier": "cryptocorrosion", + "version": "0.2.21" + }, + "prettyplease": { + "checksum": "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b", + "description": "A minimal `syn` syntax tree pretty-printer", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/prettyplease@0.2.37", + "supplier": "dtolnay", + "version": "0.2.37" + }, + "proc-macro2": { + "checksum": "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8", + "description": "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/proc-macro2@1.0.103", + "supplier": "dtolnay", + "version": "1.0.103" + }, + "proc-macro2-diagnostics": { + "checksum": "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8", + "description": "Diagnostics for proc-macro2.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/proc-macro2-diagnostics@0.10.1", + "supplier": "SergioBenitez", + "version": "0.10.1" + }, + "prost": { + "checksum": "7231bd9b3d3d33c86b58adbac74b5ec0ad9f496b19d22801d773636feaa95f3d", + "description": "A Protocol Buffers implementation for the Rust Language.", + "license": "Apache-2.0", + "purl": "pkg:cargo/prost@0.14.1", + "supplier": "tokio-rs", + "version": "0.14.1" + }, + "prost-build": { + "checksum": "ac6c3320f9abac597dcbc668774ef006702672474aad53c6d596b62e487b40b1", + "description": "Generate Prost annotated Rust types from Protocol Buffers files.", + "license": "Apache-2.0", + "purl": "pkg:cargo/prost-build@0.14.1", + "supplier": "tokio-rs", + "version": "0.14.1" + }, + "prost-derive": { + "checksum": "9120690fafc389a67ba3803df527d0ec9cbbc9cc45e4cc20b332996dfb672425", + "description": "Generate encoding and decoding implementations for Prost annotated types.", + "license": "Apache-2.0", + "purl": "pkg:cargo/prost-derive@0.14.1", + "supplier": "tokio-rs", + "version": "0.14.1" + }, + "prost-types": { + "checksum": "b9b4db3d6da204ed77bb26ba83b6122a73aeb2e87e25fbf7ad2e84c4ccbf8f72", + "description": "Prost definitions of Protocol Buffers well known types.", + "license": "Apache-2.0", + "purl": "pkg:cargo/prost-types@0.14.1", + "supplier": "tokio-rs", + "version": "0.14.1" + }, + "protoc-gen-prost": { + "checksum": "77eb17a7657a703f30cb9b7ba4d981e4037b8af2d819ab0077514b0bef537406", + "description": "Protocol Buffers compiler plugin powered by Prost!", + "license": "", + "purl": "pkg:cargo/protoc-gen-prost@0.4.0", + "supplier": "neoeinstein", + "version": "0.4.0" + }, + "protoc-gen-tonic": { + "checksum": "6ab6a0d73a0914752ed8fd7cc51afe169e28da87be3efef292de5676cc527634", + "description": "Protocol Buffers compiler plugin for gRPC services powered by Prost! and Tonic", + "license": "", + "purl": "pkg:cargo/protoc-gen-tonic@0.4.1", + "supplier": "neoeinstein", + "version": "0.4.1" + }, + "quote": { + "checksum": "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f", + "description": "Quasi-quoting macro quote!(...)", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/quote@1.0.42", + "supplier": "dtolnay", + "version": "1.0.42" + }, + "r-efi": { + "checksum": "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f", + "description": "UEFI Reference Specification Protocol Constants and Definitions", + "license": "Apache-2.0 AND LGPL-2.1-or-later AND MIT AND (Apache-2.0 AND MIT) AND MIT", + "purl": "pkg:cargo/r-efi@5.3.0", + "supplier": "r-efi", + "version": "5.3.0" + }, + "rand": { + "checksum": "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1", + "description": "Random number generators and other randomness functionality.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/rand@0.9.2", + "supplier": "rust-random", + "version": "0.9.2" + }, + "rand_chacha": { + "checksum": "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb", + "description": "ChaCha random number generator", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/rand_chacha@0.9.0", + "supplier": "rust-random", + "version": "0.9.0" + }, + "rand_core": { + "checksum": "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38", + "description": "Core random number generation traits and tools for implementation.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/rand_core@0.9.3", + "supplier": "rust-random", + "version": "0.9.3" + }, + "regex": { + "checksum": "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4", + "description": "An implementation of regular expressions for Rust. This implementation uses\nfinite automata and guarantees linear time matching on all inputs.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/regex@1.12.2", + "supplier": "rust-lang", + "version": "1.12.2" + }, + "regex-automata": { + "checksum": "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c", + "description": "Automata construction and matching using regular expressions.", + "license": "Apache-2.0 or MIT", + "purl": "pkg:cargo/regex-automata@0.4.13", + "supplier": "rust-lang", + "version": "0.4.13" + }, + "regex-syntax": { + "checksum": "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58", + "description": "A regular expression parser.", + "license": "(Apache-2.0 or MIT) AND Unicode-DFS-2016", + "purl": "pkg:cargo/regex-syntax@0.8.8", + "supplier": "rust-lang", + "version": "0.8.8" + }, + "rust-fuzzy-search": { + "checksum": "a157657054ffe556d8858504af8a672a054a6e0bd9e8ee531059100c0fa11bb2", + "description": "Fuzzy Search with trigrams implemented in Rust", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/rust-fuzzy-search@0.1.1", + "supplier": "", + "version": "0.1.1" + }, + "rustc-hash": { + "checksum": "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d", + "description": "A speedy, non-cryptographic hashing algorithm used by rustc", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/rustc-hash@2.1.1", + "supplier": "rust-lang", + "version": "2.1.1" + }, + "rustc_version": { + "checksum": "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92", + "description": "A library for querying the version of a installed rustc compiler", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/rustc_version@0.4.1", + "supplier": "djc", + "version": "0.4.1" + }, + "rustix": { + "checksum": "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e", + "description": "Safe Rust bindings to POSIX/Unix/Linux/Winsock-like syscalls", + "license": "(MIT OR Apache-2.0 OR (Apache-2.0 WITH LLVM-exception)) AND (Apache-2.0 OR MIT)", + "purl": "pkg:cargo/rustix@1.1.2", + "supplier": "bytecodealliance", + "version": "1.1.2" + }, + "rustversion": { + "checksum": "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d", + "description": "Conditional compilation according to rustc compiler version", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/rustversion@1.0.22", + "supplier": "dtolnay", + "version": "1.0.22" + }, + "ryu": { + "checksum": "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f", + "description": "Fast floating point to string conversion", + "license": "Apache-2.0 AND BSL-1.0 AND CC-BY-SA-3.0", + "purl": "pkg:cargo/ryu@1.0.20", + "supplier": "dtolnay", + "version": "1.0.20" + }, + "scopeguard": { + "checksum": "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49", + "description": "A RAII scope guard that will run a given closure when it goes out of scope,\neven if the code between panics (assuming unwinding panic).\n\nDefines the macros `defer!`, `defer_on_unwind!`, `defer_on_success!` as\nshorthands for guards with one of the implemented strategies.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/scopeguard@1.2.0", + "supplier": "bluss", + "version": "1.2.0" + }, + "semver": { + "checksum": "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2", + "description": "Parser and evaluator for Cargo's flavor of Semantic Versioning", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/semver@1.0.27", + "supplier": "dtolnay", + "version": "1.0.27" + }, + "serde": { + "checksum": "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e", + "description": "A generic serialization/deserialization framework", + "license": "Apache-2.0 or MIT", + "purl": "pkg:cargo/serde@1.0.228", + "supplier": "serde-rs", + "version": "1.0.228" + }, + "serde_core": { + "checksum": "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad", + "description": "Serde traits only, with no support for derive -- use the `serde` crate instead", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/serde_core@1.0.228", + "supplier": "serde-rs", + "version": "1.0.228" + }, + "serde_derive": { + "checksum": "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79", + "description": "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]", + "license": "Apache-2.0 or MIT", + "purl": "pkg:cargo/serde_derive@1.0.228", + "supplier": "serde-rs", + "version": "1.0.228" + }, + "serde_json": { + "checksum": "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c", + "description": "A JSON serialization file format", + "license": "Apache-2.0 AND MIT", + "purl": "pkg:cargo/serde_json@1.0.145", + "supplier": "serde-rs", + "version": "1.0.145" + }, + "serde_spanned": { + "checksum": "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3", + "description": "Serde-compatible spanned Value", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/serde_spanned@0.6.9", + "supplier": "toml-rs", + "version": "0.6.9" + }, + "sha1_smol": { + "checksum": "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d", + "description": "Minimal dependency-free implementation of SHA1 for Rust.", + "license": "BSD-3-Clause AND MIT", + "purl": "pkg:cargo/sha1_smol@1.0.1", + "supplier": "mitsuhiko", + "version": "1.0.1" + }, + "sharded-slab": { + "checksum": "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6", + "description": "A lock-free concurrent slab.", + "license": "MIT", + "purl": "pkg:cargo/sharded-slab@0.1.7", + "supplier": "hawkw", + "version": "0.1.7" + }, + "shlex": { + "checksum": "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64", + "description": "Split a string into shell words, like Python's shlex.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/shlex@1.3.0", + "supplier": "comex", + "version": "1.3.0" + }, + "signal-hook": { + "checksum": "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2", + "description": "Unix signal handling", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/signal-hook@0.3.18", + "supplier": "vorner", + "version": "0.3.18" + }, + "signal-hook-registry": { + "checksum": "7664a098b8e616bdfcc2dc0e9ac44eb231eedf41db4e9fe95d8d32ec728dedad", + "description": "Backend crate for signal-hook", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/signal-hook-registry@1.4.7", + "supplier": "vorner", + "version": "1.4.7" + }, + "slab": { + "checksum": "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589", + "description": "Pre-allocated storage for a uniform data type", + "license": "MIT", + "purl": "pkg:cargo/slab@0.4.11", + "supplier": "tokio-rs", + "version": "0.4.11" + }, + "smallvec": { + "checksum": "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03", + "description": "'Small vector' optimization: store up to a small number of items on the stack", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/smallvec@1.15.1", + "supplier": "servo", + "version": "1.15.1" + }, + "socket2": { + "checksum": "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881", + "description": "Utilities for handling networking sockets with a maximal amount of configuration\npossible intended.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/socket2@0.6.1", + "supplier": "rust-lang", + "version": "0.6.1" + }, + "spin": { + "checksum": "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67", + "description": "Spin-based synchronization primitives", + "license": "MIT AND (Apache-2.0 AND MIT)", + "purl": "pkg:cargo/spin@0.9.8", + "supplier": "mvdnes", + "version": "0.9.8" + }, + "stable_deref_trait": { + "checksum": "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596", + "description": "An unsafe marker trait for types like Box and Rc that dereference to a stable address even when moved, and hence can be used with libraries such as owning_ref and rental.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/stable_deref_trait@1.2.1", + "supplier": "storyyeller", + "version": "1.2.1" + }, + "static_assertions": { + "checksum": "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f", + "description": "Compile-time assertions to ensure that invariants are met.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/static_assertions@1.1.0", + "supplier": "nvzqz", + "version": "1.1.0" + }, + "strsim": { + "checksum": "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f", + "description": "Implementations of string similarity metrics. Includes Hamming, Levenshtein,\nOSA, Damerau-Levenshtein, Jaro, Jaro-Winkler, and S\u00f8rensen-Dice.", + "license": "MIT", + "purl": "pkg:cargo/strsim@0.11.1", + "supplier": "rapidfuzz", + "version": "0.11.1" + }, + "syn": { + "checksum": "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87", + "description": "Parser for Rust source code", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/syn@2.0.111", + "supplier": "dtolnay", + "version": "2.0.111" + }, + "sync_wrapper": { + "checksum": "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263", + "description": "A tool for enlisting the compiler's help in proving the absence of concurrency", + "license": "Apache-2.0", + "purl": "pkg:cargo/sync_wrapper@1.0.2", + "supplier": "Actyx", + "version": "1.0.2" + }, + "tempfile": { + "checksum": "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16", + "description": "A library for managing temporary files and directories.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/tempfile@3.23.0", + "supplier": "Stebalien", + "version": "3.23.0" + }, + "termsize": { + "checksum": "6f11ff5c25c172608d5b85e2fb43ee9a6d683a7f4ab7f96ae07b3d8b590368fd", + "description": "Retrieves terminal size", + "license": "MIT", + "purl": "pkg:cargo/termsize@0.1.9", + "supplier": "softprops", + "version": "0.1.9" + }, + "thiserror": { + "checksum": "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8", + "description": "derive(Error)", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/thiserror@2.0.17", + "supplier": "dtolnay", + "version": "2.0.17" + }, + "thiserror-impl": { + "checksum": "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913", + "description": "Implementation detail of the `thiserror` crate", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/thiserror-impl@2.0.17", + "supplier": "dtolnay", + "version": "2.0.17" + }, + "thread_local": { + "checksum": "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185", + "description": "Per-object thread-local storage", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/thread_local@1.1.9", + "supplier": "Amanieu", + "version": "1.1.9" + }, + "time": { + "checksum": "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d", + "description": "Date and time library. Fully interoperable with the standard library. Mostly compatible with #![no_std].", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/time@0.3.44", + "supplier": "time-rs", + "version": "0.3.44" + }, + "time-core": { + "checksum": "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b", + "description": "This crate is an implementation detail and should not be relied upon directly.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/time-core@0.1.6", + "supplier": "time-rs", + "version": "0.1.6" + }, + "time-macros": { + "checksum": "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3", + "description": "Procedural macros for the time crate.\n This crate is an implementation detail and should not be relied upon directly.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/time-macros@0.2.24", + "supplier": "time-rs", + "version": "0.2.24" + }, + "tiny-fn": { + "checksum": "9659b108631d1e1cf3e8e489f894bee40bc9d68fd6cc67ec4d4ce9b72d565228", + "description": "Type erased closures on stack", + "license": "", + "purl": "pkg:cargo/tiny-fn@0.1.9", + "supplier": "zakarumych", + "version": "0.1.9" + }, + "tinyjson": { + "checksum": "9ab95735ea2c8fd51154d01e39cf13912a78071c2d89abc49a7ef102a7dd725a", + "description": "Tiny simple JSON parser/generator", + "license": "MIT", + "purl": "pkg:cargo/tinyjson@2.5.1", + "supplier": "rhysd", + "version": "2.5.1" + }, + "tokio": { + "checksum": "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408", + "description": "An event-driven, non-blocking I/O platform for writing asynchronous I/O\nbacked applications.", + "license": "MIT", + "purl": "pkg:cargo/tokio@1.48.0", + "supplier": "tokio-rs", + "version": "1.48.0" + }, + "tokio-macros": { + "checksum": "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5", + "description": "Tokio's proc macros.", + "license": "MIT", + "purl": "pkg:cargo/tokio-macros@2.6.0", + "supplier": "tokio-rs", + "version": "2.6.0" + }, + "tokio-seqpacket": { + "checksum": "ab144b76e4ffb1d1a4e8b404073c922a243baebcc580cd75f415ae3ae9e42add", + "description": "unix seqpacket sockets for tokio", + "license": "", + "purl": "pkg:cargo/tokio-seqpacket@0.8.1", + "supplier": "de-vri-es", + "version": "0.8.1" + }, + "tokio-stream": { + "checksum": "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047", + "description": "Utilities to work with `Stream` and `tokio`.", + "license": "MIT", + "purl": "pkg:cargo/tokio-stream@0.1.17", + "supplier": "tokio-rs", + "version": "0.1.17" + }, + "tokio-util": { + "checksum": "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594", + "description": "Additional utilities for working with Tokio.", + "license": "MIT", + "purl": "pkg:cargo/tokio-util@0.7.17", + "supplier": "tokio-rs", + "version": "0.7.17" + }, + "toml": { + "checksum": "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362", + "description": "A native Rust encoder and decoder of TOML-formatted files and streams. Provides\nimplementations of the standard Serialize/Deserialize traits for TOML data to\nfacilitate deserializing and serializing Rust structures.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/toml@0.8.23", + "supplier": "toml-rs", + "version": "0.8.23" + }, + "toml_datetime": { + "checksum": "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c", + "description": "A TOML-compatible datetime type", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/toml_datetime@0.6.11", + "supplier": "toml-rs", + "version": "0.6.11" + }, + "toml_edit": { + "checksum": "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a", + "description": "Yet another format-preserving TOML parser.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/toml_edit@0.22.27", + "supplier": "toml-rs", + "version": "0.22.27" + }, + "toml_write": { + "checksum": "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801", + "description": "A low-level interface for writing out TOML", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/toml_write@0.1.2", + "supplier": "toml-rs", + "version": "0.1.2" + }, + "tonic": { + "checksum": "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203", + "description": "A gRPC over HTTP/2 implementation focused on high performance, interoperability, and flexibility.", + "license": "MIT", + "purl": "pkg:cargo/tonic@0.14.2", + "supplier": "hyperium", + "version": "0.14.2" + }, + "tonic-build": { + "checksum": "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11", + "description": "Codegen module of `tonic` gRPC implementation.", + "license": "MIT", + "purl": "pkg:cargo/tonic-build@0.12.3", + "supplier": "hyperium", + "version": "0.12.3" + }, + "tower": { + "checksum": "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9", + "description": "Tower is a library of modular and reusable components for building robust\nclients and servers.", + "license": "MIT AND Apache-2.0", + "purl": "pkg:cargo/tower@0.5.2", + "supplier": "tower-rs", + "version": "0.5.2" + }, + "tower-layer": { + "checksum": "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e", + "description": "Decorates a `Service` to allow easy composition between `Service`s.", + "license": "MIT", + "purl": "pkg:cargo/tower-layer@0.3.3", + "supplier": "tower-rs", + "version": "0.3.3" + }, + "tower-service": { + "checksum": "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3", + "description": "Trait representing an asynchronous, request / response based, client or server.", + "license": "MIT", + "purl": "pkg:cargo/tower-service@0.3.3", + "supplier": "tower-rs", + "version": "0.3.3" + }, + "tracing": { + "checksum": "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647", + "description": "Application-level tracing for Rust.", + "license": "MIT", + "purl": "pkg:cargo/tracing@0.1.43", + "supplier": "tokio-rs", + "version": "0.1.43" + }, + "tracing-attributes": { + "checksum": "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da", + "description": "Procedural macro attributes for automatically instrumenting functions.", + "license": "MIT", + "purl": "pkg:cargo/tracing-attributes@0.1.31", + "supplier": "tokio-rs", + "version": "0.1.31" + }, + "tracing-core": { + "checksum": "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c", + "description": "Core primitives for application-level tracing.", + "license": "MIT", + "purl": "pkg:cargo/tracing-core@0.1.35", + "supplier": "tokio-rs", + "version": "0.1.35" + }, + "tracing-log": { + "checksum": "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3", + "description": "Provides compatibility between `tracing` and the `log` crate.", + "license": "MIT", + "purl": "pkg:cargo/tracing-log@0.2.0", + "supplier": "tokio-rs", + "version": "0.2.0" + }, + "tracing-serde": { + "checksum": "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1", + "description": "A compatibility layer for serializing trace data with `serde`", + "license": "MIT", + "purl": "pkg:cargo/tracing-serde@0.2.0", + "supplier": "tokio-rs", + "version": "0.2.0" + }, + "tracing-subscriber": { + "checksum": "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e", + "description": "Utilities for implementing and composing `tracing` subscribers.", + "license": "MIT AND BSD-3-Clause AND BSD-2-Clause AND LicenseRef-Public-Domain", + "purl": "pkg:cargo/tracing-subscriber@0.3.22", + "supplier": "tokio-rs", + "version": "0.3.22" + }, + "try-lock": { + "checksum": "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b", + "description": "A lightweight atomic lock.", + "license": "MIT", + "purl": "pkg:cargo/try-lock@0.2.5", + "supplier": "seanmonstar", + "version": "0.2.5" + }, + "unicode-ident": { + "checksum": "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5", + "description": "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31", + "license": "Apache-2.0 AND MIT AND LicenseRef-unicode", + "purl": "pkg:cargo/unicode-ident@1.0.22", + "supplier": "dtolnay", + "version": "1.0.22" + }, + "unicode-width": { + "checksum": "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254", + "description": "Determine displayed width of `char` and `str` types\naccording to Unicode Standard Annex #11 rules.", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/unicode-width@0.2.2", + "supplier": "unicode-rs", + "version": "0.2.2" + }, + "unit-prefix": { + "checksum": "81e544489bf3d8ef66c953931f56617f423cd4b5494be343d9b9d3dda037b9a3", + "description": "Format numbers with metric and binary unit prefixes", + "license": "MIT", + "purl": "pkg:cargo/unit-prefix@0.5.2", + "supplier": "", + "version": "0.5.2" + }, + "utf8parse": { + "checksum": "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821", + "description": "Table-driven UTF-8 parser", + "license": "Apache-2.0 OR MIT", + "purl": "pkg:cargo/utf8parse@0.2.2", + "supplier": "alacritty", + "version": "0.2.2" + }, + "uuid": { + "checksum": "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a", + "description": "A library to generate and parse UUIDs.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/uuid@1.19.0", + "supplier": "uuid-rs", + "version": "1.19.0" + }, + "valuable": { + "checksum": "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65", + "description": "Object-safe value inspection, used to pass un-typed structured data across trait-object boundaries.", + "license": "MIT", + "purl": "pkg:cargo/valuable@0.1.1", + "supplier": "tokio-rs", + "version": "0.1.1" + }, + "version_check": { + "checksum": "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a", + "description": "Tiny crate to check the version of the installed/running rustc.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/version_check@0.9.5", + "supplier": "SergioBenitez", + "version": "0.9.5" + }, + "want": { + "checksum": "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e", + "description": "Detect when another Future wants a result.", + "license": "MIT", + "purl": "pkg:cargo/want@0.3.1", + "supplier": "seanmonstar", + "version": "0.3.1" + }, + "wasi": { + "checksum": "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b", + "description": "WASI API bindings for Rust", + "license": "(Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT)", + "purl": "pkg:cargo/wasi@0.11.1-wasi-snapshot-preview1", + "supplier": "bytecodealliance", + "version": "0.11.1-wasi-snapshot-preview1" + }, + "wasip2": { + "checksum": "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7", + "description": "WASIp2 API bindings for Rust", + "license": "Apache-2.0 WITH LLVM-exception AND MIT", + "purl": "pkg:cargo/wasip2@1.0.1-wasi-0.2.4", + "supplier": "bytecodealliance", + "version": "1.0.1-wasi-0.2.4" + }, + "wasm-bindgen": { + "checksum": "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd", + "description": "Easy support for interacting between JS and Rust.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/wasm-bindgen@0.2.106", + "supplier": "wasm-bindgen", + "version": "0.2.106" + }, + "wasm-bindgen-macro": { + "checksum": "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3", + "description": "Definition of the `#[wasm_bindgen]` attribute, an internal dependency", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/wasm-bindgen-macro@0.2.106", + "supplier": "wasm-bindgen", + "version": "0.2.106" + }, + "wasm-bindgen-macro-support": { + "checksum": "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40", + "description": "Implementation APIs for the `#[wasm_bindgen]` attribute", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/wasm-bindgen-macro-support@0.2.106", + "supplier": "wasm-bindgen", + "version": "0.2.106" + }, + "wasm-bindgen-shared": { + "checksum": "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4", + "description": "Shared support between wasm-bindgen and wasm-bindgen cli, an internal\ndependency.", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/wasm-bindgen-shared@0.2.106", + "supplier": "wasm-bindgen", + "version": "0.2.106" + }, + "web-time": { + "checksum": "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb", + "description": "Drop-in replacement for std::time for Wasm in browsers", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/web-time@1.1.0", + "supplier": "daxpedda", + "version": "1.1.0" + }, + "which": { + "checksum": "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7", + "description": "A Rust equivalent of Unix command \"which\". Locate installed executable in cross platforms.", + "license": "MIT", + "purl": "pkg:cargo/which@4.4.2", + "supplier": "harryfei", + "version": "4.4.2" + }, + "winapi": { + "checksum": "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419", + "description": "Raw FFI bindings for all of Windows API.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/winapi@0.3.9", + "supplier": "retep998", + "version": "0.3.9" + }, + "winapi-i686-pc-windows-gnu": { + "checksum": "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6", + "description": "Import libraries for the i686-pc-windows-gnu target. Please don't use this crate directly, depend on winapi instead.", + "license": "Apache-2.0 AND MIT", + "purl": "pkg:cargo/winapi-i686-pc-windows-gnu@0.4.0", + "supplier": "retep998", + "version": "0.4.0" + }, + "winapi-x86_64-pc-windows-gnu": { + "checksum": "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f", + "description": "Import libraries for the x86_64-pc-windows-gnu target. Please don't use this crate directly, depend on winapi instead.", + "license": "Apache-2.0 AND MIT", + "purl": "pkg:cargo/winapi-x86_64-pc-windows-gnu@0.4.0", + "supplier": "retep998", + "version": "0.4.0" + }, + "windows": { + "checksum": "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893", + "description": "Rust for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows@0.61.3", + "supplier": "microsoft", + "version": "0.61.3" + }, + "windows-collections": { + "checksum": "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8", + "description": "Windows collection types", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-collections@0.2.0", + "supplier": "microsoft", + "version": "0.2.0" + }, + "windows-core": { + "checksum": "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3", + "description": "Core type support for COM and Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-core@0.61.2", + "supplier": "microsoft", + "version": "0.61.2" + }, + "windows-future": { + "checksum": "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e", + "description": "Windows async types", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/windows-future@0.2.1", + "supplier": "microsoft", + "version": "0.2.1" + }, + "windows-implement": { + "checksum": "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf", + "description": "The implement macro for the Windows crates", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-implement@0.60.2", + "supplier": "microsoft", + "version": "0.60.2" + }, + "windows-interface": { + "checksum": "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358", + "description": "The interface macro for the Windows crates", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-interface@0.59.3", + "supplier": "microsoft", + "version": "0.59.3" + }, + "windows-link": { + "checksum": "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5", + "description": "Linking for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-link@0.2.1", + "supplier": "microsoft", + "version": "0.2.1" + }, + "windows-numerics": { + "checksum": "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1", + "description": "Windows numeric types", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-numerics@0.2.0", + "supplier": "microsoft", + "version": "0.2.0" + }, + "windows-result": { + "checksum": "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6", + "description": "Windows error handling", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-result@0.3.4", + "supplier": "microsoft", + "version": "0.3.4" + }, + "windows-strings": { + "checksum": "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57", + "description": "Windows string types", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-strings@0.4.2", + "supplier": "microsoft", + "version": "0.4.2" + }, + "windows-sys": { + "checksum": "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc", + "description": "Rust for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-sys@0.61.2", + "supplier": "microsoft", + "version": "0.61.2" + }, + "windows-targets": { + "checksum": "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3", + "description": "Import libs for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows-targets@0.53.5", + "supplier": "microsoft", + "version": "0.53.5" + }, + "windows-threading": { + "checksum": "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6", + "description": "Windows threading", + "license": "Apache-2.0 AND MIT AND Apache-2.0 AND MIT", + "purl": "pkg:cargo/windows-threading@0.1.0", + "supplier": "microsoft", + "version": "0.1.0" + }, + "windows_aarch64_gnullvm": { + "checksum": "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53", + "description": "Import lib for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows_aarch64_gnullvm@0.53.1", + "supplier": "microsoft", + "version": "0.53.1" + }, + "windows_aarch64_msvc": { + "checksum": "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006", + "description": "Import lib for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows_aarch64_msvc@0.53.1", + "supplier": "microsoft", + "version": "0.53.1" + }, + "windows_i686_gnu": { + "checksum": "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3", + "description": "Import lib for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows_i686_gnu@0.53.1", + "supplier": "microsoft", + "version": "0.53.1" + }, + "windows_i686_gnullvm": { + "checksum": "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c", + "description": "Import lib for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows_i686_gnullvm@0.53.1", + "supplier": "microsoft", + "version": "0.53.1" + }, + "windows_i686_msvc": { + "checksum": "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2", + "description": "Import lib for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows_i686_msvc@0.53.1", + "supplier": "microsoft", + "version": "0.53.1" + }, + "windows_x86_64_gnu": { + "checksum": "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499", + "description": "Import lib for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows_x86_64_gnu@0.53.1", + "supplier": "microsoft", + "version": "0.53.1" + }, + "windows_x86_64_gnullvm": { + "checksum": "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1", + "description": "Import lib for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows_x86_64_gnullvm@0.53.1", + "supplier": "microsoft", + "version": "0.53.1" + }, + "windows_x86_64_msvc": { + "checksum": "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650", + "description": "Import lib for Windows", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/windows_x86_64_msvc@0.53.1", + "supplier": "microsoft", + "version": "0.53.1" + }, + "winnow": { + "checksum": "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829", + "description": "A byte-oriented, zero-copy, parser combinators library", + "license": "MIT", + "purl": "pkg:cargo/winnow@0.7.14", + "supplier": "winnow-rs", + "version": "0.7.14" + }, + "wit-bindgen": { + "checksum": "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59", + "description": "Rust bindings generator and runtime support for WIT and the component model.\nUsed when compiling Rust programs to the component model.", + "license": "(Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT)", + "purl": "pkg:cargo/wit-bindgen@0.46.0", + "supplier": "bytecodealliance", + "version": "0.46.0" + }, + "yansi": { + "checksum": "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049", + "description": "A dead simple ANSI terminal color painting library.", + "license": "MIT OR Apache-2.0", + "purl": "pkg:cargo/yansi@1.0.1", + "supplier": "SergioBenitez", + "version": "1.0.1" + }, + "zerocopy": { + "checksum": "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3", + "description": "Zerocopy makes zero-cost memory manipulation effortless. We write \"unsafe\" so you don't have to.", + "license": "Apache-2.0 AND BSD-2-Clause AND BSD-3-Clause AND MIT AND (Apache-2.0 AND BSD-3-Clause AND MIT) AND (Apache-2.0 AND BSD-2-Clause AND MIT) AND Apache-2.0 AND BSD-2-Clause AND MIT", + "purl": "pkg:cargo/zerocopy@0.8.31", + "supplier": "google", + "version": "0.8.31" + }, + "zerocopy-derive": { + "checksum": "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a", + "description": "Custom derive for traits from the zerocopy crate", + "license": "Apache-2.0 AND BSD-2-Clause AND MIT AND (Apache-2.0 AND BSD-2-Clause AND BSD-3-Clause AND MIT) AND Apache-2.0 AND BSD-2-Clause AND MIT AND (Apache-2.0 AND BSD-3-Clause AND MIT)", + "purl": "pkg:cargo/zerocopy-derive@0.8.31", + "supplier": "google", + "version": "0.8.31" + }, + "zmij": { + "checksum": "3ff05f8caa9038894637571ae6b9e29466c1f4f829d26c9b28f869a29cbe3445", + "description": "A double-to-string conversion algorithm based on Schubfach and yy", + "license": "MIT", + "purl": "pkg:cargo/zmij@1.0.19", + "supplier": "dtolnay", + "version": "1.0.19" + } +} \ No newline at end of file diff --git a/sbom/tests/fixtures/kyron_cdxgen.cdx.json b/sbom/tests/fixtures/kyron_cdxgen.cdx.json new file mode 100755 index 0000000..deacfcf --- /dev/null +++ b/sbom/tests/fixtures/kyron_cdxgen.cdx.json @@ -0,0 +1 @@ +{"bomFormat":"CycloneDX","specVersion":"1.6","serialNumber":"urn:uuid:46cd3c9c-6dfc-4253-82f0-27ebfb92e2c9","version":1,"metadata":{"timestamp":"2026-03-03T17:44:35Z","tools":{"components":[{"group":"@cyclonedx","name":"cdxgen","version":"12.0.0","purl":"pkg:npm/%40cyclonedx/cdxgen@12.0.0","type":"application","bom-ref":"pkg:npm/@cyclonedx/cdxgen@12.0.0","publisher":"OWASP Foundation","authors":[{"name":"OWASP Foundation"}]}]},"authors":[{"name":"OWASP Foundation"}],"lifecycles":[{"phase":"build"}],"component":{"group":"","name":"external","version":"latest","type":"application","bom-ref":"pkg:generic/external@latest","purl":"pkg:generic/external@latest"},"properties":[]},"components":[],"dependencies":[],"annotations":[{"bom-ref":"metadata-annotations","subjects":["pkg:generic/external@latest"],"annotator":{"component":{"group":"@cyclonedx","name":"cdxgen","version":"12.0.0","purl":"pkg:npm/%40cyclonedx/cdxgen@12.0.0","type":"application","bom-ref":"pkg:npm/@cyclonedx/cdxgen@12.0.0","publisher":"OWASP Foundation","authors":[{"name":"OWASP Foundation"}]}},"timestamp":"2026-03-03T17:44:35Z","text":"This Software Bill-of-Materials (SBOM) document was created on Tuesday, March 3, 2026 with cdxgen. The data was captured during the build lifecycle phase. The document describes an application named 'external'. BOM file is empty without components."}]} \ No newline at end of file diff --git a/sbom/tests/fixtures/kyron_input.json b/sbom/tests/fixtures/kyron_input.json new file mode 100644 index 0000000..73a459c --- /dev/null +++ b/sbom/tests/fixtures/kyron_input.json @@ -0,0 +1,570 @@ +{ + "config": { + "component_name": "score_kyron", + "component_version": "", + "generation_context": "build", + "namespace": "https://eclipse.dev/score", + "producer_name": "Eclipse Foundation", + "producer_url": "https://projects.eclipse.org/projects/automotive.score", + "sbom_authors": [ + "Eclipse SCORE Team" + ], + "sbom_tools": [] + }, + "dep_module_files": [], + "exclude_patterns": [ + "rules_rust", + "rules_cc", + "bazel_tools", + "platforms", + "bazel_skylib", + "rules_python", + "rules_proto", + "protobuf", + "local_config_", + "remote_" + ], + "external_dep_edges": [ + "rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__serde_core-1.0.228::rules_rust++crate+crate_index__serde_core-1.0.228", + "rules_rust++crate+crate_index__unicode-ident-1.0.22::rules_rust++crate+crate_index__unicode-ident-1.0.22", + "rules_rust++crate+crate_index__proc-macro2-1.0.106::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__proc-macro2-1.0.106::rules_rust++crate+crate_index__unicode-ident-1.0.22", + "rules_rust++crate+crate_index__quote-1.0.44::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__quote-1.0.44::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__syn-2.0.114::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__syn-2.0.114::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__syn-2.0.114::rules_rust++crate+crate_index__unicode-ident-1.0.22", + "rules_rust++crate+crate_index__syn-2.0.114::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__serde_derive-1.0.228::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__serde_derive-1.0.228::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__serde_derive-1.0.228::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__serde_derive-1.0.228::rules_rust++crate+crate_index__serde_derive-1.0.228", + "rules_rust++crate+crate_index__serde-1.0.228::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__serde-1.0.228::rules_rust++crate+crate_index__serde_core-1.0.228", + "rules_rust++crate+crate_index__serde-1.0.228::rules_rust++crate+crate_index__serde_derive-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0::rules_rust++crate+crate_index__enum-iterator-derive-1.5.0", + "rules_rust++crate+crate_index__enum-iterator-2.3.0::rules_rust++crate+crate_index__enum-iterator-2.3.0", + "rules_rust++crate+crate_index__enum-iterator-2.3.0::rules_rust++crate+crate_index__enum-iterator-derive-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__bitflags-2.10.0::rules_rust++crate+crate_index__bitflags-2.10.0", + "rules_rust++crate+crate_index__memchr-2.8.0::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__minimal-lexical-0.2.1::rules_rust++crate+crate_index__minimal-lexical-0.2.1", + "rules_rust++crate+crate_index__nom-7.1.3::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__nom-7.1.3::rules_rust++crate+crate_index__minimal-lexical-0.2.1", + "rules_rust++crate+crate_index__nom-7.1.3::rules_rust++crate+crate_index__nom-7.1.3", + "rules_rust++crate+crate_index__cexpr-0.6.0::rules_rust++crate+crate_index__nom-7.1.3", + "rules_rust++crate+crate_index__cexpr-0.6.0::rules_rust++crate+crate_index__cexpr-0.6.0", + "rules_rust++crate+crate_index__glob-0.3.3::rules_rust++crate+crate_index__glob-0.3.3", + "rules_rust++crate+crate_index__clang-sys-1.8.1::rules_rust++crate+crate_index__glob-0.3.3", + "rules_rust++crate+crate_index__libc-0.2.180::rules_rust++crate+crate_index__libc-0.2.180", + "rules_rust++crate+crate_index__cfg-if-1.0.4::rules_rust++crate+crate_index__cfg-if-1.0.4", + "rules_rust++crate+crate_index__libloading-0.8.9::rules_rust++crate+crate_index__cfg-if-1.0.4", + "rules_rust++crate+crate_index__libloading-0.8.9::rules_rust++crate+crate_index__libloading-0.8.9", + "rules_rust++crate+crate_index__clang-sys-1.8.1::rules_rust++crate+crate_index__clang-sys-1.8.1", + "rules_rust++crate+crate_index__clang-sys-1.8.1::rules_rust++crate+crate_index__libc-0.2.180", + "rules_rust++crate+crate_index__clang-sys-1.8.1::rules_rust++crate+crate_index__libloading-0.8.9", + "rules_rust++crate+crate_index__either-1.15.0::rules_rust++crate+crate_index__either-1.15.0", + "rules_rust++crate+crate_index__itertools-0.13.0::rules_rust++crate+crate_index__either-1.15.0", + "rules_rust++crate+crate_index__itertools-0.13.0::rules_rust++crate+crate_index__itertools-0.13.0", + "rules_rust++crate+crate_index__log-0.4.29::rules_rust++crate+crate_index__log-0.4.29", + "rules_rust++crate+crate_index__prettyplease-0.2.37::rules_rust++crate+crate_index__prettyplease-0.2.37", + "rules_rust++crate+crate_index__prettyplease-0.2.37::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__prettyplease-0.2.37::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__aho-corasick-1.1.4::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__aho-corasick-1.1.4::rules_rust++crate+crate_index__aho-corasick-1.1.4", + "rules_rust++crate+crate_index__regex-syntax-0.8.9::rules_rust++crate+crate_index__regex-syntax-0.8.9", + "rules_rust++crate+crate_index__regex-automata-0.4.14::rules_rust++crate+crate_index__aho-corasick-1.1.4", + "rules_rust++crate+crate_index__regex-automata-0.4.14::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__regex-automata-0.4.14::rules_rust++crate+crate_index__regex-syntax-0.8.9", + "rules_rust++crate+crate_index__regex-automata-0.4.14::rules_rust++crate+crate_index__regex-automata-0.4.14", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__aho-corasick-1.1.4", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__regex-automata-0.4.14", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__regex-syntax-0.8.9", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__regex-1.12.3", + "rules_rust++crate+crate_index__rustc-hash-2.1.1::rules_rust++crate+crate_index__rustc-hash-2.1.1", + "rules_rust++crate+crate_index__shlex-1.3.0::rules_rust++crate+crate_index__shlex-1.3.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__bindgen-0.72.1", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__bitflags-2.10.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__cexpr-0.6.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__clang-sys-1.8.1", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__itertools-0.13.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__log-0.4.29", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__prettyplease-0.2.37", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__regex-1.12.3", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__rustc-hash-2.1.1", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__shlex-1.3.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__find-msvc-tools-0.1.9::rules_rust++crate+crate_index__find-msvc-tools-0.1.9", + "rules_rust++crate+crate_index__cc-1.2.55::rules_rust++crate+crate_index__find-msvc-tools-0.1.9", + "rules_rust++crate+crate_index__cc-1.2.55::rules_rust++crate+crate_index__shlex-1.3.0", + "rules_rust++crate+crate_index__cc-1.2.55::rules_rust++crate+crate_index__cc-1.2.55", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__bindgen-0.72.1", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__cc-1.2.55", + "rules_rust++crate+crate_index__lazy_static-1.5.0::rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__tiny-fn-0.1.9::rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__enum-iterator-2.3.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0", + "rules_rust++crate+crate_index__pin-project-lite-0.2.16::rules_rust++crate+crate_index__pin-project-lite-0.2.16", + "rules_rust++crate+crate_index__once_cell-1.21.3::rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__tracing-core-0.1.36::rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__tracing-core-0.1.36::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31::rules_rust++crate+crate_index__tracing-attributes-0.1.31", + "rules_rust++crate+crate_index__tracing-0.1.44::rules_rust++crate+crate_index__pin-project-lite-0.2.16", + "rules_rust++crate+crate_index__tracing-0.1.44::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-0.1.44::rules_rust++crate+crate_index__tracing-0.1.44", + "rules_rust++crate+crate_index__tracing-0.1.44::rules_rust++crate+crate_index__tracing-attributes-0.1.31", + "rules_rust++crate+crate_index__nu-ansi-term-0.50.3::rules_rust++crate+crate_index__nu-ansi-term-0.50.3", + "rules_rust++crate+crate_index__itoa-1.0.17::rules_rust++crate+crate_index__itoa-1.0.17", + "rules_rust++crate+crate_index__zmij-1.0.19::rules_rust++crate+crate_index__zmij-1.0.19", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__itoa-1.0.17", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__serde_core-1.0.228", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__serde_json-1.0.149", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__zmij-1.0.19", + "rules_rust++crate+crate_index__sharded-slab-0.1.7::rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__sharded-slab-0.1.7::rules_rust++crate+crate_index__sharded-slab-0.1.7", + "rules_rust++crate+crate_index__smallvec-1.15.1::rules_rust++crate+crate_index__smallvec-1.15.1", + "rules_rust++crate+crate_index__thread_local-1.1.9::rules_rust++crate+crate_index__cfg-if-1.0.4", + "rules_rust++crate+crate_index__thread_local-1.1.9::rules_rust++crate+crate_index__thread_local-1.1.9", + "rules_rust++crate+crate_index__tracing-log-0.2.0::rules_rust++crate+crate_index__log-0.4.29", + "rules_rust++crate+crate_index__tracing-log-0.2.0::rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__tracing-log-0.2.0::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-log-0.2.0::rules_rust++crate+crate_index__tracing-log-0.2.0", + "rules_rust++crate+crate_index__tracing-serde-0.2.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__tracing-serde-0.2.0::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-serde-0.2.0::rules_rust++crate+crate_index__tracing-serde-0.2.0", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__nu-ansi-term-0.50.3", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__serde_json-1.0.149", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__sharded-slab-0.1.7", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__smallvec-1.15.1", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__thread_local-1.1.9", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__tracing-log-0.2.0", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__tracing-serde-0.2.0", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__tracing-subscriber-0.3.22", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__tracing-0.1.44", + "score_kyron+::rules_rust++crate+crate_index__tracing-subscriber-0.3.22", + "score_kyron+::score_kyron+", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__byteorder-1.5.0::rules_rust++crate+crate_index__byteorder-1.5.0", + "rules_rust++crate+crate_index__cdr-0.2.4::rules_rust++crate+crate_index__byteorder-1.5.0", + "rules_rust++crate+crate_index__cdr-0.2.4::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__cdr-0.2.4::rules_rust++crate+crate_index__cdr-0.2.4", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0::rules_rust++crate+crate_index__bindgen-0.72.1", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0::rules_rust++crate+crate_index__cc-1.2.55", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18::rules_rust++crate+crate_index__thiserror-impl-2.0.18", + "rules_rust++crate+crate_index__thiserror-2.0.18::rules_rust++crate+crate_index__thiserror-2.0.18", + "rules_rust++crate+crate_index__thiserror-2.0.18::rules_rust++crate+crate_index__thiserror-impl-2.0.18", + "rules_rust++crate+crate_index__cobs-0.3.0::rules_rust++crate+crate_index__thiserror-2.0.18", + "rules_rust++crate+crate_index__cobs-0.3.0::rules_rust++crate+crate_index__cobs-0.3.0", + "rules_rust++crate+crate_index__hash32-0.2.1::rules_rust++crate+crate_index__byteorder-1.5.0", + "rules_rust++crate+crate_index__hash32-0.2.1::rules_rust++crate+crate_index__hash32-0.2.1", + "rules_rust++crate+crate_index__semver-1.0.27::rules_rust++crate+crate_index__semver-1.0.27", + "rules_rust++crate+crate_index__rustc_version-0.4.1::rules_rust++crate+crate_index__semver-1.0.27", + "rules_rust++crate+crate_index__rustc_version-0.4.1::rules_rust++crate+crate_index__rustc_version-0.4.1", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__rustc_version-0.4.1", + "rules_rust++crate+crate_index__stable_deref_trait-1.2.1::rules_rust++crate+crate_index__stable_deref_trait-1.2.1", + "rules_rust++crate+crate_index__scopeguard-1.2.0::rules_rust++crate+crate_index__scopeguard-1.2.0", + "rules_rust++crate+crate_index__lock_api-0.4.14::rules_rust++crate+crate_index__scopeguard-1.2.0", + "rules_rust++crate+crate_index__lock_api-0.4.14::rules_rust++crate+crate_index__lock_api-0.4.14", + "rules_rust++crate+crate_index__spin-0.9.8::rules_rust++crate+crate_index__lock_api-0.4.14", + "rules_rust++crate+crate_index__spin-0.9.8::rules_rust++crate+crate_index__spin-0.9.8", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__hash32-0.2.1", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__heapless-0.7.17", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__stable_deref_trait-1.2.1", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__spin-0.9.8", + "rules_rust++crate+crate_index__postcard-derive-0.2.2::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__postcard-derive-0.2.2::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__postcard-derive-0.2.2::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__postcard-derive-0.2.2::rules_rust++crate+crate_index__postcard-derive-0.2.2", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__cobs-0.3.0", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__heapless-0.7.17", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__postcard-1.1.3", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__postcard-derive-0.2.2", + "rules_rust++crate+crate_index__sha1_smol-1.0.1::rules_rust++crate+crate_index__sha1_smol-1.0.1", + "rules_rust++crate+crate_index__serde_spanned-0.6.9::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__serde_spanned-0.6.9::rules_rust++crate+crate_index__serde_spanned-0.6.9", + "rules_rust++crate+crate_index__toml_datetime-0.6.11::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__toml_datetime-0.6.11::rules_rust++crate+crate_index__toml_datetime-0.6.11", + "rules_rust++crate+crate_index__equivalent-1.0.2::rules_rust++crate+crate_index__equivalent-1.0.2", + "rules_rust++crate+crate_index__hashbrown-0.16.1::rules_rust++crate+crate_index__hashbrown-0.16.1", + "rules_rust++crate+crate_index__indexmap-2.13.0::rules_rust++crate+crate_index__equivalent-1.0.2", + "rules_rust++crate+crate_index__indexmap-2.13.0::rules_rust++crate+crate_index__hashbrown-0.16.1", + "rules_rust++crate+crate_index__indexmap-2.13.0::rules_rust++crate+crate_index__indexmap-2.13.0", + "rules_rust++crate+crate_index__toml_write-0.1.2::rules_rust++crate+crate_index__toml_write-0.1.2", + "rules_rust++crate+crate_index__winnow-0.7.14::rules_rust++crate+crate_index__winnow-0.7.14", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__indexmap-2.13.0", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__serde_spanned-0.6.9", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__toml_datetime-0.6.11", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__toml_write-0.1.2", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__winnow-0.7.14", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__toml_edit-0.22.27", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__serde_spanned-0.6.9", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__toml_datetime-0.6.11", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__toml_edit-0.22.27", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__toml-0.8.23", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__cdr-0.2.4", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__postcard-1.1.3", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__sha1_smol-1.0.1", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__toml-0.8.23", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__toml-0.8.23", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "score_kyron+::rules_rust++crate+crate_index__quote-1.0.44", + "score_kyron+::rules_rust++crate+crate_index__syn-2.0.114", + "score_kyron+::rules_rust++crate+crate_index__libc-0.2.180", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0" + ], + "external_repos": [ + "rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__serde_core-1.0.228", + "rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__unicode-ident-1.0.22", + "rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__serde_derive-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0", + "rules_rust++crate+crate_index__enum-iterator-2.3.0", + "rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__bindgen-0.72.1", + "rules_rust++crate+crate_index__bitflags-2.10.0", + "rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__minimal-lexical-0.2.1", + "rules_rust++crate+crate_index__nom-7.1.3", + "rules_rust++crate+crate_index__cexpr-0.6.0", + "rules_rust++crate+crate_index__glob-0.3.3", + "rules_rust++crate+crate_index__clang-sys-1.8.1", + "rules_rust++crate+crate_index__libc-0.2.180", + "rules_rust++crate+crate_index__cfg-if-1.0.4", + "rules_rust++crate+crate_index__libloading-0.8.9", + "rules_rust++crate+crate_index__either-1.15.0", + "rules_rust++crate+crate_index__itertools-0.13.0", + "rules_rust++crate+crate_index__log-0.4.29", + "rules_rust++crate+crate_index__prettyplease-0.2.37", + "rules_rust++crate+crate_index__aho-corasick-1.1.4", + "rules_rust++crate+crate_index__regex-syntax-0.8.9", + "rules_rust++crate+crate_index__regex-automata-0.4.14", + "rules_rust++crate+crate_index__regex-1.12.3", + "rules_rust++crate+crate_index__rustc-hash-2.1.1", + "rules_rust++crate+crate_index__shlex-1.3.0", + "rules_rust++crate+crate_index__find-msvc-tools-0.1.9", + "rules_rust++crate+crate_index__cc-1.2.55", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0", + "rules_rust++crate+crate_index__pin-project-lite-0.2.16", + "rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31", + "rules_rust++crate+crate_index__tracing-0.1.44", + "rules_rust++crate+crate_index__nu-ansi-term-0.50.3", + "rules_rust++crate+crate_index__itoa-1.0.17", + "rules_rust++crate+crate_index__serde_json-1.0.149", + "rules_rust++crate+crate_index__zmij-1.0.19", + "rules_rust++crate+crate_index__sharded-slab-0.1.7", + "rules_rust++crate+crate_index__smallvec-1.15.1", + "rules_rust++crate+crate_index__thread_local-1.1.9", + "rules_rust++crate+crate_index__tracing-log-0.2.0", + "rules_rust++crate+crate_index__tracing-serde-0.2.0", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22", + "score_kyron+", + "rules_rust++crate+crate_index__byteorder-1.5.0", + "rules_rust++crate+crate_index__cdr-0.2.4", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0", + "rules_rust++crate+crate_index__thiserror-2.0.18", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18", + "rules_rust++crate+crate_index__cobs-0.3.0", + "rules_rust++crate+crate_index__hash32-0.2.1", + "rules_rust++crate+crate_index__semver-1.0.27", + "rules_rust++crate+crate_index__rustc_version-0.4.1", + "rules_rust++crate+crate_index__heapless-0.7.17", + "rules_rust++crate+crate_index__stable_deref_trait-1.2.1", + "rules_rust++crate+crate_index__scopeguard-1.2.0", + "rules_rust++crate+crate_index__lock_api-0.4.14", + "rules_rust++crate+crate_index__spin-0.9.8", + "rules_rust++crate+crate_index__postcard-derive-0.2.2", + "rules_rust++crate+crate_index__postcard-1.1.3", + "rules_rust++crate+crate_index__sha1_smol-1.0.1", + "rules_rust++crate+crate_index__serde_spanned-0.6.9", + "rules_rust++crate+crate_index__toml_datetime-0.6.11", + "rules_rust++crate+crate_index__equivalent-1.0.2", + "rules_rust++crate+crate_index__hashbrown-0.16.1", + "rules_rust++crate+crate_index__indexmap-2.13.0", + "rules_rust++crate+crate_index__toml_write-0.1.2", + "rules_rust++crate+crate_index__winnow-0.7.14", + "rules_rust++crate+crate_index__toml_edit-0.22.27", + "rules_rust++crate+crate_index__toml-0.8.23", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0" + ], + "module_lockfiles": [ + "__FIXTURE_REFINT_LOCK__" + ], + "target_labels": [ + "@@score_kyron+//src/kyron:libkyron", + "@@score_kyron+//src/kyron-foundation:libkyron_foundation" + ], + "transitive_deps": [ + "@@rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0//:iceoryx2_pal_concurrency_sync_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0//:iceoryx2_bb_elementary_traits_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0//:iceoryx2_bb_elementary_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0//:iceoryx2_bb_log_qnx8", + "@@rules_rust++crate+crate_index__serde-1.0.228//:_bs", + "@@rules_rust++crate+crate_index__serde_core-1.0.228//:_bs", + "@@rules_rust++crate+crate_index__serde_core-1.0.228//:serde_core", + "@@rules_rust++crate+crate_index__proc-macro2-1.0.106//:_bs", + "@@rules_rust++crate+crate_index__unicode-ident-1.0.22//:unicode_ident", + "@@rules_rust++crate+crate_index__proc-macro2-1.0.106//:proc_macro2", + "@@rules_rust++crate+crate_index__quote-1.0.44//:_bs", + "@@rules_rust++crate+crate_index__quote-1.0.44//:quote", + "@@rules_rust++crate+crate_index__syn-2.0.114//:syn", + "@@rules_rust++crate+crate_index__serde_derive-1.0.228//:serde_derive", + "@@rules_rust++crate+crate_index__serde-1.0.228//:serde", + "@@rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0//:iceoryx2_bb_derive_macros_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0//:iceoryx2_bb_container_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0//:iceoryx2_bb_lock_free_qnx8", + "@@rules_rust++crate+crate_index__enum-iterator-derive-1.5.0//:enum_iterator_derive", + "@@rules_rust++crate+crate_index__enum-iterator-2.3.0//:enum_iterator", + "@@rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0//:iceoryx2_pal_configuration_qnx8", + "@@rules_rust++crate+crate_index__bindgen-0.72.1//:_bs", + "@@rules_rust++crate+crate_index__bitflags-2.10.0//:bitflags", + "@@rules_rust++crate+crate_index__memchr-2.8.0//:memchr", + "@@rules_rust++crate+crate_index__minimal-lexical-0.2.1//:minimal_lexical", + "@@rules_rust++crate+crate_index__nom-7.1.3//:nom", + "@@rules_rust++crate+crate_index__cexpr-0.6.0//:cexpr", + "@@rules_rust++crate+crate_index__glob-0.3.3//:glob", + "@@rules_rust++crate+crate_index__clang-sys-1.8.1//:_bs", + "@@rules_rust++crate+crate_index__libc-0.2.180//:_bs", + "@@rules_rust++crate+crate_index__libc-0.2.180//:libc", + "@@rules_rust++crate+crate_index__cfg-if-1.0.4//:cfg_if", + "@@rules_rust++crate+crate_index__libloading-0.8.9//:libloading", + "@@rules_rust++crate+crate_index__clang-sys-1.8.1//:clang_sys", + "@@rules_rust++crate+crate_index__either-1.15.0//:either", + "@@rules_rust++crate+crate_index__itertools-0.13.0//:itertools", + "@@rules_rust++crate+crate_index__log-0.4.29//:log", + "@@rules_rust++crate+crate_index__prettyplease-0.2.37//:_bs", + "@@rules_rust++crate+crate_index__prettyplease-0.2.37//:prettyplease", + "@@rules_rust++crate+crate_index__aho-corasick-1.1.4//:aho_corasick", + "@@rules_rust++crate+crate_index__regex-syntax-0.8.9//:regex_syntax", + "@@rules_rust++crate+crate_index__regex-automata-0.4.14//:regex_automata", + "@@rules_rust++crate+crate_index__regex-1.12.3//:regex", + "@@rules_rust++crate+crate_index__rustc-hash-2.1.1//:rustc_hash", + "@@rules_rust++crate+crate_index__shlex-1.3.0//:shlex", + "@@rules_rust++crate+crate_index__bindgen-0.72.1//:bindgen", + "@@rules_rust++crate+crate_index__find-msvc-tools-0.1.9//:find_msvc_tools", + "@@rules_rust++crate+crate_index__cc-1.2.55//:cc", + "@@rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0//:_bs", + "@@rules_rust++crate+crate_index__lazy_static-1.5.0//:lazy_static", + "@@rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0//:iceoryx2_pal_posix_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0//:iceoryx2_bb_system_types_qnx8", + "@@rules_rust++crate+crate_index__tiny-fn-0.1.9//:tiny_fn", + "@@rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0//:iceoryx2_bb_posix_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0//:iceoryx2_bb_memory_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0//:iceoryx2_pal_testing_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0//:iceoryx2_bb_testing_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0//:iceoryx2_bb_threadsafe_qnx8", + "@@rules_rust++crate+crate_index__pin-project-lite-0.2.16//:pin_project_lite", + "@@rules_rust++crate+crate_index__once_cell-1.21.3//:once_cell", + "@@rules_rust++crate+crate_index__tracing-core-0.1.36//:tracing_core", + "@@rules_rust++crate+crate_index__tracing-attributes-0.1.31//:tracing_attributes", + "@@rules_rust++crate+crate_index__tracing-0.1.44//:tracing", + "@@rules_rust++crate+crate_index__nu-ansi-term-0.50.3//:nu_ansi_term", + "@@rules_rust++crate+crate_index__itoa-1.0.17//:itoa", + "@@rules_rust++crate+crate_index__serde_json-1.0.149//:_bs", + "@@rules_rust++crate+crate_index__zmij-1.0.19//:_bs", + "@@rules_rust++crate+crate_index__zmij-1.0.19//:zmij", + "@@rules_rust++crate+crate_index__serde_json-1.0.149//:serde_json", + "@@rules_rust++crate+crate_index__sharded-slab-0.1.7//:sharded_slab", + "@@rules_rust++crate+crate_index__smallvec-1.15.1//:smallvec", + "@@rules_rust++crate+crate_index__thread_local-1.1.9//:thread_local", + "@@rules_rust++crate+crate_index__tracing-log-0.2.0//:tracing_log", + "@@rules_rust++crate+crate_index__tracing-serde-0.2.0//:tracing_serde", + "@@rules_rust++crate+crate_index__tracing-subscriber-0.3.22//:tracing_subscriber", + "@@score_kyron+//src/kyron-foundation:libkyron_foundation", + "@@rules_rust++crate+crate_index__byteorder-1.5.0//:byteorder", + "@@rules_rust++crate+crate_index__cdr-0.2.4//:cdr", + "@@rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0//:_bs", + "@@rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0//:iceoryx2_pal_os_api_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0//:iceoryx2_bb_linux_qnx8", + "@@rules_rust++crate+crate_index__thiserror-2.0.18//:_bs", + "@@rules_rust++crate+crate_index__thiserror-impl-2.0.18//:thiserror_impl", + "@@rules_rust++crate+crate_index__thiserror-2.0.18//:thiserror", + "@@rules_rust++crate+crate_index__cobs-0.3.0//:cobs", + "@@rules_rust++crate+crate_index__hash32-0.2.1//:hash32", + "@@rules_rust++crate+crate_index__semver-1.0.27//:semver", + "@@rules_rust++crate+crate_index__rustc_version-0.4.1//:rustc_version", + "@@rules_rust++crate+crate_index__heapless-0.7.17//:_bs", + "@@rules_rust++crate+crate_index__stable_deref_trait-1.2.1//:stable_deref_trait", + "@@rules_rust++crate+crate_index__scopeguard-1.2.0//:scopeguard", + "@@rules_rust++crate+crate_index__lock_api-0.4.14//:lock_api", + "@@rules_rust++crate+crate_index__spin-0.9.8//:spin", + "@@rules_rust++crate+crate_index__heapless-0.7.17//:heapless", + "@@rules_rust++crate+crate_index__postcard-derive-0.2.2//:postcard_derive", + "@@rules_rust++crate+crate_index__postcard-1.1.3//:postcard", + "@@rules_rust++crate+crate_index__sha1_smol-1.0.1//:sha1_smol", + "@@rules_rust++crate+crate_index__serde_spanned-0.6.9//:serde_spanned", + "@@rules_rust++crate+crate_index__toml_datetime-0.6.11//:toml_datetime", + "@@rules_rust++crate+crate_index__equivalent-1.0.2//:equivalent", + "@@rules_rust++crate+crate_index__hashbrown-0.16.1//:hashbrown", + "@@rules_rust++crate+crate_index__indexmap-2.13.0//:indexmap", + "@@rules_rust++crate+crate_index__toml_write-0.1.2//:toml_write", + "@@rules_rust++crate+crate_index__winnow-0.7.14//:winnow", + "@@rules_rust++crate+crate_index__toml_edit-0.22.27//:toml_edit", + "@@rules_rust++crate+crate_index__toml-0.8.23//:toml", + "@@rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0//:iceoryx2_cal_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0//:iceoryx2_qnx8", + "@@score_kyron+//src/kyron-macros:runtime_macros", + "@@score_kyron+//src/kyron:libkyron" + ] +} \ No newline at end of file diff --git a/sbom/tests/fixtures/orchestrator_cdxgen.cdx.json b/sbom/tests/fixtures/orchestrator_cdxgen.cdx.json new file mode 100755 index 0000000..4b5b9df --- /dev/null +++ b/sbom/tests/fixtures/orchestrator_cdxgen.cdx.json @@ -0,0 +1 @@ +{"bomFormat":"CycloneDX","specVersion":"1.6","serialNumber":"urn:uuid:ce91ca80-c29c-4c55-a233-3253be18cc6f","version":1,"metadata":{"timestamp":"2026-03-03T18:05:18Z","tools":{"components":[{"group":"@cyclonedx","name":"cdxgen","version":"12.0.0","purl":"pkg:npm/%40cyclonedx/cdxgen@12.0.0","type":"application","bom-ref":"pkg:npm/@cyclonedx/cdxgen@12.0.0","publisher":"OWASP Foundation","authors":[{"name":"OWASP Foundation"}]}]},"authors":[{"name":"OWASP Foundation"}],"lifecycles":[{"phase":"build"}],"component":{"group":"","name":"external","version":"latest","type":"application","bom-ref":"pkg:generic/external@latest","purl":"pkg:generic/external@latest"},"properties":[]},"components":[],"dependencies":[],"annotations":[{"bom-ref":"metadata-annotations","subjects":["pkg:generic/external@latest"],"annotator":{"component":{"group":"@cyclonedx","name":"cdxgen","version":"12.0.0","purl":"pkg:npm/%40cyclonedx/cdxgen@12.0.0","type":"application","bom-ref":"pkg:npm/@cyclonedx/cdxgen@12.0.0","publisher":"OWASP Foundation","authors":[{"name":"OWASP Foundation"}]}},"timestamp":"2026-03-03T18:05:18Z","text":"This Software Bill-of-Materials (SBOM) document was created on Tuesday, March 3, 2026 with cdxgen. The data was captured during the build lifecycle phase. The document describes an application named 'external'. BOM file is empty without components."}]} \ No newline at end of file diff --git a/sbom/tests/fixtures/orchestrator_input.json b/sbom/tests/fixtures/orchestrator_input.json new file mode 100644 index 0000000..8417dd7 --- /dev/null +++ b/sbom/tests/fixtures/orchestrator_input.json @@ -0,0 +1,576 @@ +{ + "config": { + "component_name": "score_orchestrator", + "component_version": "", + "generation_context": "build", + "namespace": "https://eclipse.dev/score", + "producer_name": "Eclipse Foundation", + "producer_url": "https://projects.eclipse.org/projects/automotive.score", + "sbom_authors": [ + "Eclipse SCORE Team" + ], + "sbom_tools": [] + }, + "dep_module_files": [], + "exclude_patterns": [ + "rules_rust", + "rules_cc", + "bazel_tools", + "platforms", + "bazel_skylib", + "rules_python", + "rules_proto", + "protobuf", + "local_config_", + "remote_" + ], + "external_dep_edges": [ + "rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__serde_core-1.0.228::rules_rust++crate+crate_index__serde_core-1.0.228", + "rules_rust++crate+crate_index__unicode-ident-1.0.22::rules_rust++crate+crate_index__unicode-ident-1.0.22", + "rules_rust++crate+crate_index__proc-macro2-1.0.106::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__proc-macro2-1.0.106::rules_rust++crate+crate_index__unicode-ident-1.0.22", + "rules_rust++crate+crate_index__quote-1.0.44::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__quote-1.0.44::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__syn-2.0.114::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__syn-2.0.114::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__syn-2.0.114::rules_rust++crate+crate_index__unicode-ident-1.0.22", + "rules_rust++crate+crate_index__syn-2.0.114::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__serde_derive-1.0.228::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__serde_derive-1.0.228::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__serde_derive-1.0.228::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__serde_derive-1.0.228::rules_rust++crate+crate_index__serde_derive-1.0.228", + "rules_rust++crate+crate_index__serde-1.0.228::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__serde-1.0.228::rules_rust++crate+crate_index__serde_core-1.0.228", + "rules_rust++crate+crate_index__serde-1.0.228::rules_rust++crate+crate_index__serde_derive-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0::rules_rust++crate+crate_index__enum-iterator-derive-1.5.0", + "rules_rust++crate+crate_index__enum-iterator-2.3.0::rules_rust++crate+crate_index__enum-iterator-2.3.0", + "rules_rust++crate+crate_index__enum-iterator-2.3.0::rules_rust++crate+crate_index__enum-iterator-derive-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__bitflags-2.10.0::rules_rust++crate+crate_index__bitflags-2.10.0", + "rules_rust++crate+crate_index__memchr-2.8.0::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__minimal-lexical-0.2.1::rules_rust++crate+crate_index__minimal-lexical-0.2.1", + "rules_rust++crate+crate_index__nom-7.1.3::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__nom-7.1.3::rules_rust++crate+crate_index__minimal-lexical-0.2.1", + "rules_rust++crate+crate_index__nom-7.1.3::rules_rust++crate+crate_index__nom-7.1.3", + "rules_rust++crate+crate_index__cexpr-0.6.0::rules_rust++crate+crate_index__nom-7.1.3", + "rules_rust++crate+crate_index__cexpr-0.6.0::rules_rust++crate+crate_index__cexpr-0.6.0", + "rules_rust++crate+crate_index__glob-0.3.3::rules_rust++crate+crate_index__glob-0.3.3", + "rules_rust++crate+crate_index__clang-sys-1.8.1::rules_rust++crate+crate_index__glob-0.3.3", + "rules_rust++crate+crate_index__libc-0.2.180::rules_rust++crate+crate_index__libc-0.2.180", + "rules_rust++crate+crate_index__cfg-if-1.0.4::rules_rust++crate+crate_index__cfg-if-1.0.4", + "rules_rust++crate+crate_index__libloading-0.8.9::rules_rust++crate+crate_index__cfg-if-1.0.4", + "rules_rust++crate+crate_index__libloading-0.8.9::rules_rust++crate+crate_index__libloading-0.8.9", + "rules_rust++crate+crate_index__clang-sys-1.8.1::rules_rust++crate+crate_index__clang-sys-1.8.1", + "rules_rust++crate+crate_index__clang-sys-1.8.1::rules_rust++crate+crate_index__libc-0.2.180", + "rules_rust++crate+crate_index__clang-sys-1.8.1::rules_rust++crate+crate_index__libloading-0.8.9", + "rules_rust++crate+crate_index__either-1.15.0::rules_rust++crate+crate_index__either-1.15.0", + "rules_rust++crate+crate_index__itertools-0.13.0::rules_rust++crate+crate_index__either-1.15.0", + "rules_rust++crate+crate_index__itertools-0.13.0::rules_rust++crate+crate_index__itertools-0.13.0", + "rules_rust++crate+crate_index__log-0.4.29::rules_rust++crate+crate_index__log-0.4.29", + "rules_rust++crate+crate_index__prettyplease-0.2.37::rules_rust++crate+crate_index__prettyplease-0.2.37", + "rules_rust++crate+crate_index__prettyplease-0.2.37::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__prettyplease-0.2.37::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__aho-corasick-1.1.4::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__aho-corasick-1.1.4::rules_rust++crate+crate_index__aho-corasick-1.1.4", + "rules_rust++crate+crate_index__regex-syntax-0.8.9::rules_rust++crate+crate_index__regex-syntax-0.8.9", + "rules_rust++crate+crate_index__regex-automata-0.4.14::rules_rust++crate+crate_index__aho-corasick-1.1.4", + "rules_rust++crate+crate_index__regex-automata-0.4.14::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__regex-automata-0.4.14::rules_rust++crate+crate_index__regex-syntax-0.8.9", + "rules_rust++crate+crate_index__regex-automata-0.4.14::rules_rust++crate+crate_index__regex-automata-0.4.14", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__aho-corasick-1.1.4", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__regex-automata-0.4.14", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__regex-syntax-0.8.9", + "rules_rust++crate+crate_index__regex-1.12.3::rules_rust++crate+crate_index__regex-1.12.3", + "rules_rust++crate+crate_index__rustc-hash-2.1.1::rules_rust++crate+crate_index__rustc-hash-2.1.1", + "rules_rust++crate+crate_index__shlex-1.3.0::rules_rust++crate+crate_index__shlex-1.3.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__bindgen-0.72.1", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__bitflags-2.10.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__cexpr-0.6.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__clang-sys-1.8.1", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__itertools-0.13.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__log-0.4.29", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__prettyplease-0.2.37", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__regex-1.12.3", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__rustc-hash-2.1.1", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__shlex-1.3.0", + "rules_rust++crate+crate_index__bindgen-0.72.1::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__find-msvc-tools-0.1.9::rules_rust++crate+crate_index__find-msvc-tools-0.1.9", + "rules_rust++crate+crate_index__cc-1.2.55::rules_rust++crate+crate_index__find-msvc-tools-0.1.9", + "rules_rust++crate+crate_index__cc-1.2.55::rules_rust++crate+crate_index__shlex-1.3.0", + "rules_rust++crate+crate_index__cc-1.2.55::rules_rust++crate+crate_index__cc-1.2.55", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__bindgen-0.72.1", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__cc-1.2.55", + "rules_rust++crate+crate_index__lazy_static-1.5.0::rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0::rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__tiny-fn-0.1.9::rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__enum-iterator-2.3.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0", + "rules_rust++crate+crate_index__pin-project-lite-0.2.16::rules_rust++crate+crate_index__pin-project-lite-0.2.16", + "rules_rust++crate+crate_index__once_cell-1.21.3::rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__tracing-core-0.1.36::rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__tracing-core-0.1.36::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31::rules_rust++crate+crate_index__tracing-attributes-0.1.31", + "rules_rust++crate+crate_index__tracing-0.1.44::rules_rust++crate+crate_index__pin-project-lite-0.2.16", + "rules_rust++crate+crate_index__tracing-0.1.44::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-0.1.44::rules_rust++crate+crate_index__tracing-0.1.44", + "rules_rust++crate+crate_index__tracing-0.1.44::rules_rust++crate+crate_index__tracing-attributes-0.1.31", + "rules_rust++crate+crate_index__nu-ansi-term-0.50.3::rules_rust++crate+crate_index__nu-ansi-term-0.50.3", + "rules_rust++crate+crate_index__itoa-1.0.17::rules_rust++crate+crate_index__itoa-1.0.17", + "rules_rust++crate+crate_index__zmij-1.0.19::rules_rust++crate+crate_index__zmij-1.0.19", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__itoa-1.0.17", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__serde_core-1.0.228", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__serde_json-1.0.149", + "rules_rust++crate+crate_index__serde_json-1.0.149::rules_rust++crate+crate_index__zmij-1.0.19", + "rules_rust++crate+crate_index__sharded-slab-0.1.7::rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__sharded-slab-0.1.7::rules_rust++crate+crate_index__sharded-slab-0.1.7", + "rules_rust++crate+crate_index__smallvec-1.15.1::rules_rust++crate+crate_index__smallvec-1.15.1", + "rules_rust++crate+crate_index__thread_local-1.1.9::rules_rust++crate+crate_index__cfg-if-1.0.4", + "rules_rust++crate+crate_index__thread_local-1.1.9::rules_rust++crate+crate_index__thread_local-1.1.9", + "rules_rust++crate+crate_index__tracing-log-0.2.0::rules_rust++crate+crate_index__log-0.4.29", + "rules_rust++crate+crate_index__tracing-log-0.2.0::rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__tracing-log-0.2.0::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-log-0.2.0::rules_rust++crate+crate_index__tracing-log-0.2.0", + "rules_rust++crate+crate_index__tracing-serde-0.2.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__tracing-serde-0.2.0::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-serde-0.2.0::rules_rust++crate+crate_index__tracing-serde-0.2.0", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__nu-ansi-term-0.50.3", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__serde_json-1.0.149", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__sharded-slab-0.1.7", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__smallvec-1.15.1", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__thread_local-1.1.9", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__tracing-log-0.2.0", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__tracing-serde-0.2.0", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22::rules_rust++crate+crate_index__tracing-subscriber-0.3.22", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__tracing-0.1.44", + "score_kyron+::rules_rust++crate+crate_index__tracing-subscriber-0.3.22", + "score_kyron+::score_kyron+", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__byteorder-1.5.0::rules_rust++crate+crate_index__byteorder-1.5.0", + "rules_rust++crate+crate_index__cdr-0.2.4::rules_rust++crate+crate_index__byteorder-1.5.0", + "rules_rust++crate+crate_index__cdr-0.2.4::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__cdr-0.2.4::rules_rust++crate+crate_index__cdr-0.2.4", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0::rules_rust++crate+crate_index__bindgen-0.72.1", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0::rules_rust++crate+crate_index__cc-1.2.55", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18::rules_rust++crate+crate_index__thiserror-impl-2.0.18", + "rules_rust++crate+crate_index__thiserror-2.0.18::rules_rust++crate+crate_index__thiserror-2.0.18", + "rules_rust++crate+crate_index__thiserror-2.0.18::rules_rust++crate+crate_index__thiserror-impl-2.0.18", + "rules_rust++crate+crate_index__cobs-0.3.0::rules_rust++crate+crate_index__thiserror-2.0.18", + "rules_rust++crate+crate_index__cobs-0.3.0::rules_rust++crate+crate_index__cobs-0.3.0", + "rules_rust++crate+crate_index__hash32-0.2.1::rules_rust++crate+crate_index__byteorder-1.5.0", + "rules_rust++crate+crate_index__hash32-0.2.1::rules_rust++crate+crate_index__hash32-0.2.1", + "rules_rust++crate+crate_index__semver-1.0.27::rules_rust++crate+crate_index__semver-1.0.27", + "rules_rust++crate+crate_index__rustc_version-0.4.1::rules_rust++crate+crate_index__semver-1.0.27", + "rules_rust++crate+crate_index__rustc_version-0.4.1::rules_rust++crate+crate_index__rustc_version-0.4.1", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__rustc_version-0.4.1", + "rules_rust++crate+crate_index__stable_deref_trait-1.2.1::rules_rust++crate+crate_index__stable_deref_trait-1.2.1", + "rules_rust++crate+crate_index__scopeguard-1.2.0::rules_rust++crate+crate_index__scopeguard-1.2.0", + "rules_rust++crate+crate_index__lock_api-0.4.14::rules_rust++crate+crate_index__scopeguard-1.2.0", + "rules_rust++crate+crate_index__lock_api-0.4.14::rules_rust++crate+crate_index__lock_api-0.4.14", + "rules_rust++crate+crate_index__spin-0.9.8::rules_rust++crate+crate_index__lock_api-0.4.14", + "rules_rust++crate+crate_index__spin-0.9.8::rules_rust++crate+crate_index__spin-0.9.8", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__hash32-0.2.1", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__heapless-0.7.17", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__stable_deref_trait-1.2.1", + "rules_rust++crate+crate_index__heapless-0.7.17::rules_rust++crate+crate_index__spin-0.9.8", + "rules_rust++crate+crate_index__postcard-derive-0.2.2::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__postcard-derive-0.2.2::rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__postcard-derive-0.2.2::rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__postcard-derive-0.2.2::rules_rust++crate+crate_index__postcard-derive-0.2.2", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__cobs-0.3.0", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__heapless-0.7.17", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__postcard-1.1.3", + "rules_rust++crate+crate_index__postcard-1.1.3::rules_rust++crate+crate_index__postcard-derive-0.2.2", + "rules_rust++crate+crate_index__sha1_smol-1.0.1::rules_rust++crate+crate_index__sha1_smol-1.0.1", + "rules_rust++crate+crate_index__serde_spanned-0.6.9::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__serde_spanned-0.6.9::rules_rust++crate+crate_index__serde_spanned-0.6.9", + "rules_rust++crate+crate_index__toml_datetime-0.6.11::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__toml_datetime-0.6.11::rules_rust++crate+crate_index__toml_datetime-0.6.11", + "rules_rust++crate+crate_index__equivalent-1.0.2::rules_rust++crate+crate_index__equivalent-1.0.2", + "rules_rust++crate+crate_index__hashbrown-0.16.1::rules_rust++crate+crate_index__hashbrown-0.16.1", + "rules_rust++crate+crate_index__indexmap-2.13.0::rules_rust++crate+crate_index__equivalent-1.0.2", + "rules_rust++crate+crate_index__indexmap-2.13.0::rules_rust++crate+crate_index__hashbrown-0.16.1", + "rules_rust++crate+crate_index__indexmap-2.13.0::rules_rust++crate+crate_index__indexmap-2.13.0", + "rules_rust++crate+crate_index__toml_write-0.1.2::rules_rust++crate+crate_index__toml_write-0.1.2", + "rules_rust++crate+crate_index__winnow-0.7.14::rules_rust++crate+crate_index__winnow-0.7.14", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__indexmap-2.13.0", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__serde_spanned-0.6.9", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__toml_datetime-0.6.11", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__toml_write-0.1.2", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__winnow-0.7.14", + "rules_rust++crate+crate_index__toml_edit-0.22.27::rules_rust++crate+crate_index__toml_edit-0.22.27", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__serde_spanned-0.6.9", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__toml_datetime-0.6.11", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__toml_edit-0.22.27", + "rules_rust++crate+crate_index__toml-0.8.23::rules_rust++crate+crate_index__toml-0.8.23", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__cdr-0.2.4", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__postcard-1.1.3", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__sha1_smol-1.0.1", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__toml-0.8.23", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__toml-0.8.23", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0::rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__proc-macro2-1.0.106", + "score_kyron+::rules_rust++crate+crate_index__quote-1.0.44", + "score_kyron+::rules_rust++crate+crate_index__syn-2.0.114", + "score_kyron+::rules_rust++crate+crate_index__libc-0.2.180", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0", + "score_kyron+::rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0", + "score_orchestrator+::score_kyron+", + "score_orchestrator+::rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0", + "score_orchestrator+::rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "score_orchestrator+::rules_rust++crate+crate_index__libc-0.2.180", + "score_orchestrator+::score_orchestrator+" + ], + "external_repos": [ + "rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0", + "rules_rust++crate+crate_index__serde-1.0.228", + "rules_rust++crate+crate_index__serde_core-1.0.228", + "rules_rust++crate+crate_index__proc-macro2-1.0.106", + "rules_rust++crate+crate_index__unicode-ident-1.0.22", + "rules_rust++crate+crate_index__quote-1.0.44", + "rules_rust++crate+crate_index__syn-2.0.114", + "rules_rust++crate+crate_index__serde_derive-1.0.228", + "rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", + "rules_rust++crate+crate_index__enum-iterator-derive-1.5.0", + "rules_rust++crate+crate_index__enum-iterator-2.3.0", + "rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0", + "rules_rust++crate+crate_index__bindgen-0.72.1", + "rules_rust++crate+crate_index__bitflags-2.10.0", + "rules_rust++crate+crate_index__memchr-2.8.0", + "rules_rust++crate+crate_index__minimal-lexical-0.2.1", + "rules_rust++crate+crate_index__nom-7.1.3", + "rules_rust++crate+crate_index__cexpr-0.6.0", + "rules_rust++crate+crate_index__glob-0.3.3", + "rules_rust++crate+crate_index__clang-sys-1.8.1", + "rules_rust++crate+crate_index__libc-0.2.180", + "rules_rust++crate+crate_index__cfg-if-1.0.4", + "rules_rust++crate+crate_index__libloading-0.8.9", + "rules_rust++crate+crate_index__either-1.15.0", + "rules_rust++crate+crate_index__itertools-0.13.0", + "rules_rust++crate+crate_index__log-0.4.29", + "rules_rust++crate+crate_index__prettyplease-0.2.37", + "rules_rust++crate+crate_index__aho-corasick-1.1.4", + "rules_rust++crate+crate_index__regex-syntax-0.8.9", + "rules_rust++crate+crate_index__regex-automata-0.4.14", + "rules_rust++crate+crate_index__regex-1.12.3", + "rules_rust++crate+crate_index__rustc-hash-2.1.1", + "rules_rust++crate+crate_index__shlex-1.3.0", + "rules_rust++crate+crate_index__find-msvc-tools-0.1.9", + "rules_rust++crate+crate_index__cc-1.2.55", + "rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__lazy_static-1.5.0", + "rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0", + "rules_rust++crate+crate_index__tiny-fn-0.1.9", + "rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0", + "rules_rust++crate+crate_index__pin-project-lite-0.2.16", + "rules_rust++crate+crate_index__once_cell-1.21.3", + "rules_rust++crate+crate_index__tracing-core-0.1.36", + "rules_rust++crate+crate_index__tracing-attributes-0.1.31", + "rules_rust++crate+crate_index__tracing-0.1.44", + "rules_rust++crate+crate_index__nu-ansi-term-0.50.3", + "rules_rust++crate+crate_index__itoa-1.0.17", + "rules_rust++crate+crate_index__serde_json-1.0.149", + "rules_rust++crate+crate_index__zmij-1.0.19", + "rules_rust++crate+crate_index__sharded-slab-0.1.7", + "rules_rust++crate+crate_index__smallvec-1.15.1", + "rules_rust++crate+crate_index__thread_local-1.1.9", + "rules_rust++crate+crate_index__tracing-log-0.2.0", + "rules_rust++crate+crate_index__tracing-serde-0.2.0", + "rules_rust++crate+crate_index__tracing-subscriber-0.3.22", + "score_kyron+", + "rules_rust++crate+crate_index__byteorder-1.5.0", + "rules_rust++crate+crate_index__cdr-0.2.4", + "rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0", + "rules_rust++crate+crate_index__thiserror-2.0.18", + "rules_rust++crate+crate_index__thiserror-impl-2.0.18", + "rules_rust++crate+crate_index__cobs-0.3.0", + "rules_rust++crate+crate_index__hash32-0.2.1", + "rules_rust++crate+crate_index__semver-1.0.27", + "rules_rust++crate+crate_index__rustc_version-0.4.1", + "rules_rust++crate+crate_index__heapless-0.7.17", + "rules_rust++crate+crate_index__stable_deref_trait-1.2.1", + "rules_rust++crate+crate_index__scopeguard-1.2.0", + "rules_rust++crate+crate_index__lock_api-0.4.14", + "rules_rust++crate+crate_index__spin-0.9.8", + "rules_rust++crate+crate_index__postcard-derive-0.2.2", + "rules_rust++crate+crate_index__postcard-1.1.3", + "rules_rust++crate+crate_index__sha1_smol-1.0.1", + "rules_rust++crate+crate_index__serde_spanned-0.6.9", + "rules_rust++crate+crate_index__toml_datetime-0.6.11", + "rules_rust++crate+crate_index__equivalent-1.0.2", + "rules_rust++crate+crate_index__hashbrown-0.16.1", + "rules_rust++crate+crate_index__indexmap-2.13.0", + "rules_rust++crate+crate_index__toml_write-0.1.2", + "rules_rust++crate+crate_index__winnow-0.7.14", + "rules_rust++crate+crate_index__toml_edit-0.22.27", + "rules_rust++crate+crate_index__toml-0.8.23", + "rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0", + "rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0", + "score_orchestrator+" + ], + "module_lockfiles": [ + "__FIXTURE_REFINT_LOCK__" + ], + "target_labels": [ + "@@score_orchestrator+//src/orchestration:liborchestration" + ], + "transitive_deps": [ + "@@rules_rust++crate+crate_index__iceoryx2-pal-concurrency-sync-qnx8-0.7.0//:iceoryx2_pal_concurrency_sync_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-elementary-traits-qnx8-0.7.0//:iceoryx2_bb_elementary_traits_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-elementary-qnx8-0.7.0//:iceoryx2_bb_elementary_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-log-qnx8-0.7.0//:iceoryx2_bb_log_qnx8", + "@@rules_rust++crate+crate_index__serde-1.0.228//:_bs", + "@@rules_rust++crate+crate_index__serde_core-1.0.228//:_bs", + "@@rules_rust++crate+crate_index__serde_core-1.0.228//:serde_core", + "@@rules_rust++crate+crate_index__proc-macro2-1.0.106//:_bs", + "@@rules_rust++crate+crate_index__unicode-ident-1.0.22//:unicode_ident", + "@@rules_rust++crate+crate_index__proc-macro2-1.0.106//:proc_macro2", + "@@rules_rust++crate+crate_index__quote-1.0.44//:_bs", + "@@rules_rust++crate+crate_index__quote-1.0.44//:quote", + "@@rules_rust++crate+crate_index__syn-2.0.114//:syn", + "@@rules_rust++crate+crate_index__serde_derive-1.0.228//:serde_derive", + "@@rules_rust++crate+crate_index__serde-1.0.228//:serde", + "@@rules_rust++crate+crate_index__iceoryx2-bb-derive-macros-qnx8-0.7.0//:iceoryx2_bb_derive_macros_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-container-qnx8-0.7.0//:iceoryx2_bb_container_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0//:iceoryx2_bb_lock_free_qnx8", + "@@rules_rust++crate+crate_index__enum-iterator-derive-1.5.0//:enum_iterator_derive", + "@@rules_rust++crate+crate_index__enum-iterator-2.3.0//:enum_iterator", + "@@rules_rust++crate+crate_index__iceoryx2-pal-configuration-qnx8-0.7.0//:iceoryx2_pal_configuration_qnx8", + "@@rules_rust++crate+crate_index__bindgen-0.72.1//:_bs", + "@@rules_rust++crate+crate_index__bitflags-2.10.0//:bitflags", + "@@rules_rust++crate+crate_index__memchr-2.8.0//:memchr", + "@@rules_rust++crate+crate_index__minimal-lexical-0.2.1//:minimal_lexical", + "@@rules_rust++crate+crate_index__nom-7.1.3//:nom", + "@@rules_rust++crate+crate_index__cexpr-0.6.0//:cexpr", + "@@rules_rust++crate+crate_index__glob-0.3.3//:glob", + "@@rules_rust++crate+crate_index__clang-sys-1.8.1//:_bs", + "@@rules_rust++crate+crate_index__libc-0.2.180//:_bs", + "@@rules_rust++crate+crate_index__libc-0.2.180//:libc", + "@@rules_rust++crate+crate_index__cfg-if-1.0.4//:cfg_if", + "@@rules_rust++crate+crate_index__libloading-0.8.9//:libloading", + "@@rules_rust++crate+crate_index__clang-sys-1.8.1//:clang_sys", + "@@rules_rust++crate+crate_index__either-1.15.0//:either", + "@@rules_rust++crate+crate_index__itertools-0.13.0//:itertools", + "@@rules_rust++crate+crate_index__log-0.4.29//:log", + "@@rules_rust++crate+crate_index__prettyplease-0.2.37//:_bs", + "@@rules_rust++crate+crate_index__prettyplease-0.2.37//:prettyplease", + "@@rules_rust++crate+crate_index__aho-corasick-1.1.4//:aho_corasick", + "@@rules_rust++crate+crate_index__regex-syntax-0.8.9//:regex_syntax", + "@@rules_rust++crate+crate_index__regex-automata-0.4.14//:regex_automata", + "@@rules_rust++crate+crate_index__regex-1.12.3//:regex", + "@@rules_rust++crate+crate_index__rustc-hash-2.1.1//:rustc_hash", + "@@rules_rust++crate+crate_index__shlex-1.3.0//:shlex", + "@@rules_rust++crate+crate_index__bindgen-0.72.1//:bindgen", + "@@rules_rust++crate+crate_index__find-msvc-tools-0.1.9//:find_msvc_tools", + "@@rules_rust++crate+crate_index__cc-1.2.55//:cc", + "@@rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0//:_bs", + "@@rules_rust++crate+crate_index__lazy_static-1.5.0//:lazy_static", + "@@rules_rust++crate+crate_index__iceoryx2-pal-posix-qnx8-0.7.0//:iceoryx2_pal_posix_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-system-types-qnx8-0.7.0//:iceoryx2_bb_system_types_qnx8", + "@@rules_rust++crate+crate_index__tiny-fn-0.1.9//:tiny_fn", + "@@rules_rust++crate+crate_index__iceoryx2-bb-posix-qnx8-0.7.0//:iceoryx2_bb_posix_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-memory-qnx8-0.7.0//:iceoryx2_bb_memory_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-pal-testing-qnx8-0.7.0//:iceoryx2_pal_testing_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-testing-qnx8-0.7.0//:iceoryx2_bb_testing_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-threadsafe-qnx8-0.7.0//:iceoryx2_bb_threadsafe_qnx8", + "@@rules_rust++crate+crate_index__pin-project-lite-0.2.16//:pin_project_lite", + "@@rules_rust++crate+crate_index__once_cell-1.21.3//:once_cell", + "@@rules_rust++crate+crate_index__tracing-core-0.1.36//:tracing_core", + "@@rules_rust++crate+crate_index__tracing-attributes-0.1.31//:tracing_attributes", + "@@rules_rust++crate+crate_index__tracing-0.1.44//:tracing", + "@@rules_rust++crate+crate_index__nu-ansi-term-0.50.3//:nu_ansi_term", + "@@rules_rust++crate+crate_index__itoa-1.0.17//:itoa", + "@@rules_rust++crate+crate_index__serde_json-1.0.149//:_bs", + "@@rules_rust++crate+crate_index__zmij-1.0.19//:_bs", + "@@rules_rust++crate+crate_index__zmij-1.0.19//:zmij", + "@@rules_rust++crate+crate_index__serde_json-1.0.149//:serde_json", + "@@rules_rust++crate+crate_index__sharded-slab-0.1.7//:sharded_slab", + "@@rules_rust++crate+crate_index__smallvec-1.15.1//:smallvec", + "@@rules_rust++crate+crate_index__thread_local-1.1.9//:thread_local", + "@@rules_rust++crate+crate_index__tracing-log-0.2.0//:tracing_log", + "@@rules_rust++crate+crate_index__tracing-serde-0.2.0//:tracing_serde", + "@@rules_rust++crate+crate_index__tracing-subscriber-0.3.22//:tracing_subscriber", + "@@score_kyron+//src/kyron-foundation:libkyron_foundation", + "@@rules_rust++crate+crate_index__byteorder-1.5.0//:byteorder", + "@@rules_rust++crate+crate_index__cdr-0.2.4//:cdr", + "@@rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0//:_bs", + "@@rules_rust++crate+crate_index__iceoryx2-pal-os-api-qnx8-0.7.0//:iceoryx2_pal_os_api_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-bb-linux-qnx8-0.7.0//:iceoryx2_bb_linux_qnx8", + "@@rules_rust++crate+crate_index__thiserror-2.0.18//:_bs", + "@@rules_rust++crate+crate_index__thiserror-impl-2.0.18//:thiserror_impl", + "@@rules_rust++crate+crate_index__thiserror-2.0.18//:thiserror", + "@@rules_rust++crate+crate_index__cobs-0.3.0//:cobs", + "@@rules_rust++crate+crate_index__hash32-0.2.1//:hash32", + "@@rules_rust++crate+crate_index__semver-1.0.27//:semver", + "@@rules_rust++crate+crate_index__rustc_version-0.4.1//:rustc_version", + "@@rules_rust++crate+crate_index__heapless-0.7.17//:_bs", + "@@rules_rust++crate+crate_index__stable_deref_trait-1.2.1//:stable_deref_trait", + "@@rules_rust++crate+crate_index__scopeguard-1.2.0//:scopeguard", + "@@rules_rust++crate+crate_index__lock_api-0.4.14//:lock_api", + "@@rules_rust++crate+crate_index__spin-0.9.8//:spin", + "@@rules_rust++crate+crate_index__heapless-0.7.17//:heapless", + "@@rules_rust++crate+crate_index__postcard-derive-0.2.2//:postcard_derive", + "@@rules_rust++crate+crate_index__postcard-1.1.3//:postcard", + "@@rules_rust++crate+crate_index__sha1_smol-1.0.1//:sha1_smol", + "@@rules_rust++crate+crate_index__serde_spanned-0.6.9//:serde_spanned", + "@@rules_rust++crate+crate_index__toml_datetime-0.6.11//:toml_datetime", + "@@rules_rust++crate+crate_index__equivalent-1.0.2//:equivalent", + "@@rules_rust++crate+crate_index__hashbrown-0.16.1//:hashbrown", + "@@rules_rust++crate+crate_index__indexmap-2.13.0//:indexmap", + "@@rules_rust++crate+crate_index__toml_write-0.1.2//:toml_write", + "@@rules_rust++crate+crate_index__winnow-0.7.14//:winnow", + "@@rules_rust++crate+crate_index__toml_edit-0.22.27//:toml_edit", + "@@rules_rust++crate+crate_index__toml-0.8.23//:toml", + "@@rules_rust++crate+crate_index__iceoryx2-cal-qnx8-0.7.0//:iceoryx2_cal_qnx8", + "@@rules_rust++crate+crate_index__iceoryx2-qnx8-0.7.0//:iceoryx2_qnx8", + "@@score_kyron+//src/kyron-macros:runtime_macros", + "@@score_kyron+//src/kyron:libkyron", + "@@score_orchestrator+//src/orchestration:liborchestration" + ] +} \ No newline at end of file diff --git a/sbom/tests/fixtures/reference_integration.MODULE.bazel.lock b/sbom/tests/fixtures/reference_integration.MODULE.bazel.lock new file mode 100644 index 0000000..b3931fd --- /dev/null +++ b/sbom/tests/fixtures/reference_integration.MODULE.bazel.lock @@ -0,0 +1,47 @@ +{ + "lockFileVersion": 18, + "registryFileHashes": { + "https://bcr.bazel.build/modules/boost.assert/1.87.0/MODULE.bazel": "8a950da6e19dd6d6427b95b1cfe1d2fc86eb598f6fb753345d925eb92d74a821", + "https://bcr.bazel.build/modules/boost.assert/1.87.0/source.json": "085a7c247d90bb4f8f5c3cc7eec1994f3d46d2a26af1947c85ff6f8ddd86ce59", + "https://bcr.bazel.build/modules/boost.config/1.87.0/MODULE.bazel": "01da6517cb341d5abea9be54337bf85ba50132b3690a621f09be5890ecd12796", + "https://bcr.bazel.build/modules/boost.config/1.87.0/source.json": "9a00cd7c9afd6f58b349a806791c7aab040b1cd1a35464e59b79be724261a5ae", + "https://bcr.bazel.build/modules/boost.core/1.87.0/MODULE.bazel": "33517eb46bb16f4b4f4a1bde61fe8b2475f45b5574bcd9f04c85f4bf3afe30d2", + "https://bcr.bazel.build/modules/boost.core/1.87.0/source.json": "9e2920b45c833a6a2cd42b16e17a5b97201bb73698a3902936cc90c1aa3de667", + "https://bcr.bazel.build/modules/boost.describe/1.87.0/MODULE.bazel": "638752de4ad46348a7e3ac72910b699fde5a3c71d42fc69047d2aa8825411646", + "https://bcr.bazel.build/modules/boost.describe/1.87.0/source.json": "c260a5c38806ea88ce50b2d070484ae634941d2be7a1ddb0f959923ca8ef10d4", + "https://bcr.bazel.build/modules/boost.mp11/1.87.0/MODULE.bazel": "af9644d2b668f3e014ac335a8a84ac74d9cb263454cd07cd5b84ce206f5dd81f", + "https://bcr.bazel.build/modules/boost.mp11/1.87.0/source.json": "fb17f9453d8e62a6425efccc3a827f29ddb4577aaffee68393c86bd21e517187", + "https://bcr.bazel.build/modules/boost.preprocessor/1.87.0/MODULE.bazel": "fdbcce15c585de47e4a5e9f6e2b9aa87f690a87e205eded400c5590f7e64535a", + "https://bcr.bazel.build/modules/boost.preprocessor/1.87.0/source.json": "ef9b9006890126f5880bb51ccbe8a97a95baf21606e2f30acdb1c30c0dd79758", + "https://bcr.bazel.build/modules/boost.static_assert/1.87.0/MODULE.bazel": "06e7170d6e4ec08d6a4a83d1f0bce3f7fdacd89e4dcaa93d508f971e4e363d4f", + "https://bcr.bazel.build/modules/boost.static_assert/1.87.0/source.json": "d5b3f81fba6382b83885ff2dfaef07a5788323ed82d472d2fd629fcbba04ec7a", + "https://bcr.bazel.build/modules/boost.type_traits/1.87.0/MODULE.bazel": "8d2d44e992e85a59b6bd13b145ae27736d932a29e5aec743a0cfd014af5aee27", + "https://bcr.bazel.build/modules/boost.type_traits/1.87.0/source.json": "fd7434b8e36d19a1c8e9349e041ceaf19d0b98e90b6d4c7b86249735907cea34", + "https://bcr.bazel.build/modules/nlohmann_json/3.11.3/MODULE.bazel": "87023db2f55fc3a9949c7b08dc711fae4d4be339a80a99d04453c4bb3998eefc", + "https://bcr.bazel.build/modules/nlohmann_json/3.12.0/MODULE.bazel": "21f19a4479e994c1546cf6f10c65d2fa464cd95f49eebad98dc5bac49c801dab", + "https://bcr.bazel.build/modules/nlohmann_json/3.12.0/source.json": "6bf17b358c467effad70c02ab43e2d65939d740f667157397f583435909cfae1", + "https://bcr.bazel.build/modules/nlohmann_json/3.6.1/MODULE.bazel": "6f7b417dcc794d9add9e556673ad25cb3ba835224290f4f848f8e2db1e1fca74", + "https://bcr.bazel.build/modules/rules_rust/0.45.1/MODULE.bazel": "a69d0db3a958fab2c6520961e1b2287afcc8b36690fd31bbc4f6f7391397150d", + "https://bcr.bazel.build/modules/rules_rust/0.51.0/MODULE.bazel": "2b6d1617ac8503bfdcc0e4520c20539d4bba3a691100bee01afe193ceb0310f9", + "https://bcr.bazel.build/modules/rules_rust/0.61.0/MODULE.bazel": "0318a95777b9114c8740f34b60d6d68f9cfef61e2f4b52424ca626213d33787b", + "https://bcr.bazel.build/modules/rules_rust/0.67.0/MODULE.bazel": "87c3816c4321352dcfd9e9e26b58e84efc5b21351ae3ef8fb5d0d57bde7237f5", + "https://bcr.bazel.build/modules/rules_rust/0.67.0/source.json": "a8ef4d3be30eb98e060cad9e5875a55b603195487f76e01b619b51a1df4641cc", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/boost.assert/1.87.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/boost.config/1.87.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/boost.core/1.87.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/boost.describe/1.87.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/boost.mp11/1.87.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/boost.preprocessor/1.87.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/boost.static_assert/1.87.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/boost.type_traits/1.87.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/nlohmann_json/3.11.3/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/nlohmann_json/3.12.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/nlohmann_json/3.6.1/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/rules_rust/0.45.1/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/rules_rust/0.51.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/rules_rust/0.61.0/MODULE.bazel": "not found", + "https://raw.githubusercontent.com/eclipse-score/bazel_registry/main/modules/rules_rust/0.67.0/MODULE.bazel": "not found" + }, + "selectedYankedVersions": {}, + "moduleExtensions": {} +} \ No newline at end of file diff --git a/sbom/tests/fixtures/sbom_metadata.json b/sbom/tests/fixtures/sbom_metadata.json new file mode 100755 index 0000000..530131c --- /dev/null +++ b/sbom/tests/fixtures/sbom_metadata.json @@ -0,0 +1 @@ +{"crates":{},"git_repositories":{},"http_archives":{},"licenses":{},"modules":{"score_baselibs":{"purl":"pkg:generic/score_baselibs@0.2.3","version":"0.2.3"},"score_communication":{"purl":"pkg:generic/score_communication@0.1.2","version":"0.1.2"},"score_feo":{"purl":"pkg:generic/score_feo@1.0.2","version":"1.0.2"},"score_kyron":{"purl":"pkg:generic/score_kyron@0.0.4","version":"0.0.4"},"score_logging":{"purl":"pkg:generic/score_logging@0.0.3","version":"0.0.3"},"score_orchestrator":{"purl":"pkg:generic/score_orchestrator@0.0.4","version":"0.0.4"},"score_persistency":{"purl":"pkg:generic/score_persistency@0.2.2","version":"0.2.2"},"score_reference_integration":{"purl":"pkg:generic/score_reference_integration@0.5.0-beta","version":"0.5.0-beta"},"score_test_scenarios":{"purl":"pkg:generic/score_test_scenarios@0.3.2","version":"0.3.2"},"score_tooling":{"purl":"pkg:generic/score_tooling@0.0.0","version":"0.0.0"}}} \ No newline at end of file diff --git a/sbom/tests/test_bcr_known_licenses.py b/sbom/tests/test_bcr_known_licenses.py index c5baba6..a7e72e4 100644 --- a/sbom/tests/test_bcr_known_licenses.py +++ b/sbom/tests/test_bcr_known_licenses.py @@ -1,8 +1,29 @@ """Tests for BCR known-license resolution in sbom_generator. -These tests verify that C++ modules from the Bazel Central Registry -(e.g. boost.*) receive correct license data even when cdxgen and -lockfile parsing cannot provide it. +What this file tests +--------------------- +BCR_KNOWN_LICENSES table + - Every entry carries a non-empty license field. + - Spot-check: boost entry has BSL-1.0. + +apply_known_licenses() — priority chain (highest to lowest) + - Priority 1: module already has a license → nothing is overwritten. + - Priority 2: exact-name entry in metadata["licenses"] (user override) → wins. + - Priority 3: parent-level entry in metadata["licenses"] + (e.g. "boost" covers "boost.config" and "boost.container"). + - Priority 4: exact match in BCR_KNOWN_LICENSES (e.g. "abseil-cpp", "zlib"). + - Priority 5: parent match in BCR_KNOWN_LICENSES + (e.g. "boost" entry covers all "boost.*" sub-modules). + - Existing supplier field is preserved even when a new license is filled in. + - Empty metadata and missing "licenses" key do not raise. + +resolve_component() integration + - After apply_known_licenses() the license field flows through to the + component dict returned by resolve_component(). + +Bazel target : //sbom/tests:test_bcr_known_licenses +Run : bazel test //sbom/tests:test_bcr_known_licenses + pytest sbom/tests/test_bcr_known_licenses.py -v """ import unittest diff --git a/sbom/tests/test_cpp_enrich_checksum.py b/sbom/tests/test_cpp_enrich_checksum.py index 37187ee..ed54253 100644 --- a/sbom/tests/test_cpp_enrich_checksum.py +++ b/sbom/tests/test_cpp_enrich_checksum.py @@ -1,8 +1,26 @@ -"""Tests for enrich_components_from_cpp_cache and the no-manual-fallback rule. - -Requirement: All SBOM fields must originate from automated sources. -No manually-curated fallback values are permitted for any field — -not checksum, not license, not supplier, not version, not PURL. +"""Tests for enrich_components_from_cpp_cache() and the no-manual-curation rule. + +What this file tests +--------------------- +enrich_components_from_cpp_cache() — field propagation + - SHA-256 checksum is copied from cache to a component that has none. + - An existing checksum on the component is never overwritten. + - A cache entry with no checksum leaves the component's checksum empty. + - Components with no matching cache entry are left unchanged. + - Normalised-name matching: nlohmann_json (underscore) matches + nlohmann-json (hyphen) cache entry. + - Parent-name matching: boost.config component matches a "boost" cache entry. + +No-manual-curation rule (on-disk cpp_metadata.json) + - cpp_metadata.json must be empty ({}); any entry signals a policy violation. + All C++ metadata must be produced by generate_cpp_metadata_cache.py from + cdxgen output, never written by hand. + - Belt-and-suspenders: even if the file is non-empty, no SBOM field + (checksum, license, supplier, version, purl, description) may appear. + +Bazel target : //sbom/tests:test_cpp_enrich_checksum +Run : bazel test //sbom/tests:test_cpp_enrich_checksum + pytest sbom/tests/test_cpp_enrich_checksum.py -v """ import json diff --git a/sbom/tests/test_cyclonedx_formatter.py b/sbom/tests/test_cyclonedx_formatter.py index b7eaa80..35403d5 100644 --- a/sbom/tests/test_cyclonedx_formatter.py +++ b/sbom/tests/test_cyclonedx_formatter.py @@ -1,4 +1,41 @@ -"""Tests for CycloneDX 1.6 formatter.""" +"""Tests for the CycloneDX 1.6 JSON formatter. + +What this file tests +--------------------- +Document structure + - bomFormat = "CycloneDX", specVersion = "1.6". + - $schema URL uses https://. + - serialNumber starts with "urn:uuid:". + - metadata: timestamp, component (name/version/type), tools present. + +Component fields + - name, version, type ("library"), purl, bom-ref all set correctly. + - bom-refs are unique across all components. + +License encoding (CycloneDX spec requirement) + - Single SPDX identifiers → {"license": {"id": …}}. + - Compound expressions (OR / AND) → {"expression": …}. + - Lowercase operators ("or", "and") from dash-license-scan are normalised + to uppercase before the expression-vs-id routing decision. + - GPL-2.0-or-later is not mangled (hyphen-delimited "or" untouched). + +Dependency graph + - Root component depends on every listed component. + - Empty component list → one root dependency entry with empty dependsOn. + +External references + - Crates with source = "crates.io" get a distribution externalReference. + +_normalize_spdx_license() unit tests + - or → OR, and → AND, with → WITH. + - Already-uppercase expressions unchanged. + - GPL-2.0-or-later unchanged. + - End-to-end: lowercase "or" in component input → "expression" field in output. + +Bazel target : //sbom/tests:test_cyclonedx_formatter +Run : bazel test //sbom/tests:test_cyclonedx_formatter + pytest sbom/tests/test_cyclonedx_formatter.py -v +""" import unittest from datetime import datetime, timezone diff --git a/sbom/tests/test_generate_cpp_metadata_cache.py b/sbom/tests/test_generate_cpp_metadata_cache.py new file mode 100644 index 0000000..7221258 --- /dev/null +++ b/sbom/tests/test_generate_cpp_metadata_cache.py @@ -0,0 +1,393 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +"""Tests for generate_cpp_metadata_cache.py — convert_cdxgen_to_cache(). + +What this file tests +--------------------- +Basic extraction + - name and version are extracted; version defaults to "unknown" when absent. + - Components with no name are silently skipped. + - Multiple components all appear in the cache. + - Empty components list returns an empty dict. + +License field extraction + - SPDX ID from license.id. + - Fallback to license.name when id is absent. + - Top-level expression field for compound SPDX expressions. + - license.id takes priority over license.name in the same entry. + - Compound AND expressions are preserved verbatim. + - Components with no license produce no "license" key in the cache entry. + +Supplier extraction + - From supplier.name. + - Fallback to publisher field when supplier is absent. + - Components with neither produce no "supplier" key. + +PURL and URL + - purl is copied directly from the component. + - No purl field → no "purl" key in the cache entry. + - URL extracted from externalReferences type = website, vcs, or distribution + (first matching entry wins). + - No externalReferences → no "url" key. + +Description + - description is extracted when present. + - No description → no "description" key. + +Bazel target : //sbom/tests:test_generate_cpp_metadata_cache +Run : bazel test //sbom/tests:test_generate_cpp_metadata_cache + pytest sbom/tests/test_generate_cpp_metadata_cache.py -v +""" + +import json +import os +import tempfile +import unittest + +from sbom.scripts.generate_cpp_metadata_cache import convert_cdxgen_to_cache + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_cdx_doc(components: list) -> dict: + """Build a minimal valid CycloneDX document wrapping the given components.""" + return { + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "components": components, + } + + +def _write_cdx(components: list) -> tuple[str, str]: + """Write a CycloneDX document to a temp file; return (fd_path, cleanup_path).""" + data = _make_cdx_doc(components) + fd, path = tempfile.mkstemp(suffix=".cdx.json") + with os.fdopen(fd, "w") as f: + json.dump(data, f) + return path + + +# --------------------------------------------------------------------------- +# Tests +# --------------------------------------------------------------------------- + + +class TestConvertCdxgenToCacheBasic(unittest.TestCase): + """Basic field extraction from a cdxgen CycloneDX document.""" + + def setUp(self): + self._cleanup: list[str] = [] + + def tearDown(self): + for path in self._cleanup: + try: + os.unlink(path) + except FileNotFoundError: + pass + + def _convert(self, components: list) -> dict: + path = _write_cdx(components) + self._cleanup.append(path) + return convert_cdxgen_to_cache(path) + + def test_basic_name_and_version(self): + result = self._convert([{"name": "nlohmann-json", "version": "3.11.3"}]) + self.assertIn("nlohmann-json", result) + self.assertEqual(result["nlohmann-json"]["version"], "3.11.3") + + def test_version_defaults_to_unknown(self): + result = self._convert([{"name": "some-lib"}]) + self.assertEqual(result["some-lib"]["version"], "unknown") + + def test_multiple_components(self): + result = self._convert( + [ + {"name": "lib_a", "version": "1.0"}, + {"name": "lib_b", "version": "2.0"}, + {"name": "lib_c", "version": "3.0"}, + ] + ) + self.assertEqual(len(result), 3) + self.assertIn("lib_a", result) + self.assertIn("lib_b", result) + self.assertIn("lib_c", result) + + def test_entry_with_no_name_skipped(self): + """Components without a name must not appear in the cache.""" + result = self._convert( + [{"version": "1.0", "licenses": [{"license": {"id": "MIT"}}]}] + ) + self.assertEqual(result, {}) + + def test_empty_components_list(self): + result = self._convert([]) + self.assertEqual(result, {}) + + +class TestConvertCdxgenToCacheLicense(unittest.TestCase): + """License field extraction — license.id, license.name, and expression.""" + + def setUp(self): + self._cleanup: list[str] = [] + + def tearDown(self): + for path in self._cleanup: + try: + os.unlink(path) + except FileNotFoundError: + pass + + def _convert(self, components: list) -> dict: + path = _write_cdx(components) + self._cleanup.append(path) + return convert_cdxgen_to_cache(path) + + def test_license_from_license_id(self): + result = self._convert( + [ + { + "name": "zlib", + "version": "1.3.1", + "licenses": [{"license": {"id": "Zlib"}}], + } + ] + ) + self.assertEqual(result["zlib"]["license"], "Zlib") + + def test_license_from_license_name_fallback(self): + """When license.id is absent, license.name is used as the identifier.""" + result = self._convert( + [ + { + "name": "curl", + "version": "8.0.0", + "licenses": [{"license": {"name": "curl/libcurl"}}], + } + ] + ) + self.assertEqual(result["curl"]["license"], "curl/libcurl") + + def test_license_from_expression(self): + result = self._convert( + [ + { + "name": "openssl", + "version": "3.0.0", + "licenses": [{"expression": "Apache-2.0 OR OpenSSL"}], + } + ] + ) + self.assertEqual(result["openssl"]["license"], "Apache-2.0 OR OpenSSL") + + def test_license_id_takes_priority_over_name(self): + """license.id is checked before license.name.""" + result = self._convert( + [ + { + "name": "mylib", + "version": "1.0", + "licenses": [{"license": {"id": "MIT", "name": "MIT License"}}], + } + ] + ) + self.assertEqual(result["mylib"]["license"], "MIT") + + def test_no_license_field_absent_from_cache(self): + result = self._convert([{"name": "no-license-lib", "version": "1.0"}]) + self.assertNotIn("license", result["no-license-lib"]) + + def test_compound_spdx_expression(self): + result = self._convert( + [ + { + "name": "dual-licensed", + "version": "1.0", + "licenses": [{"expression": "Apache-2.0 AND MIT"}], + } + ] + ) + self.assertEqual(result["dual-licensed"]["license"], "Apache-2.0 AND MIT") + + +class TestConvertCdxgenToCacheSupplier(unittest.TestCase): + """Supplier extraction from supplier.name and publisher fallback.""" + + def setUp(self): + self._cleanup: list[str] = [] + + def tearDown(self): + for path in self._cleanup: + try: + os.unlink(path) + except FileNotFoundError: + pass + + def _convert(self, components: list) -> dict: + path = _write_cdx(components) + self._cleanup.append(path) + return convert_cdxgen_to_cache(path) + + def test_supplier_from_supplier_name(self): + result = self._convert( + [ + { + "name": "abseil-cpp", + "version": "20230802.0", + "supplier": {"name": "Google LLC"}, + } + ] + ) + self.assertEqual(result["abseil-cpp"]["supplier"], "Google LLC") + + def test_supplier_from_publisher_fallback(self): + """If supplier.name is absent, publisher field is used as the supplier.""" + result = self._convert( + [ + { + "name": "flatbuffers", + "version": "25.2.10", + "publisher": "Google", + } + ] + ) + self.assertEqual(result["flatbuffers"]["supplier"], "Google") + + def test_no_supplier_field_absent_from_cache(self): + result = self._convert([{"name": "anon-lib", "version": "1.0"}]) + self.assertNotIn("supplier", result["anon-lib"]) + + +class TestConvertCdxgenToCachePurlAndUrl(unittest.TestCase): + """PURL and external URL extraction.""" + + def setUp(self): + self._cleanup: list[str] = [] + + def tearDown(self): + for path in self._cleanup: + try: + os.unlink(path) + except FileNotFoundError: + pass + + def _convert(self, components: list) -> dict: + path = _write_cdx(components) + self._cleanup.append(path) + return convert_cdxgen_to_cache(path) + + def test_purl_extracted(self): + result = self._convert( + [ + { + "name": "nlohmann-json", + "version": "3.11.3", + "purl": "pkg:generic/nlohmann-json@3.11.3", + } + ] + ) + self.assertEqual( + result["nlohmann-json"]["purl"], "pkg:generic/nlohmann-json@3.11.3" + ) + + def test_no_purl_field_absent_from_cache(self): + result = self._convert([{"name": "no-purl-lib", "version": "1.0"}]) + self.assertNotIn("purl", result["no-purl-lib"]) + + def test_url_from_website_external_reference(self): + result = self._convert( + [ + { + "name": "zlib", + "version": "1.3.1", + "externalReferences": [ + {"type": "website", "url": "https://zlib.net"}, + ], + } + ] + ) + self.assertEqual(result["zlib"]["url"], "https://zlib.net") + + def test_url_from_vcs_external_reference(self): + result = self._convert( + [ + { + "name": "my-lib", + "version": "1.0", + "externalReferences": [ + {"type": "vcs", "url": "https://github.com/example/my-lib"}, + ], + } + ] + ) + self.assertEqual(result["my-lib"]["url"], "https://github.com/example/my-lib") + + def test_url_from_distribution_external_reference(self): + result = self._convert( + [ + { + "name": "dist-lib", + "version": "1.0", + "externalReferences": [ + { + "type": "distribution", + "url": "https://releases.example.com/dist-lib", + }, + ], + } + ] + ) + self.assertEqual( + result["dist-lib"]["url"], "https://releases.example.com/dist-lib" + ) + + def test_no_url_field_absent_when_no_external_refs(self): + result = self._convert([{"name": "local-lib", "version": "1.0"}]) + self.assertNotIn("url", result["local-lib"]) + + +class TestConvertCdxgenToCacheDescription(unittest.TestCase): + """Description field extraction.""" + + def setUp(self): + self._cleanup: list[str] = [] + + def tearDown(self): + for path in self._cleanup: + try: + os.unlink(path) + except FileNotFoundError: + pass + + def _convert(self, components: list) -> dict: + path = _write_cdx(components) + self._cleanup.append(path) + return convert_cdxgen_to_cache(path) + + def test_description_extracted(self): + result = self._convert( + [ + { + "name": "boost", + "version": "1.87.0", + "description": "Boost C++ Libraries", + } + ] + ) + self.assertEqual(result["boost"]["description"], "Boost C++ Libraries") + + def test_no_description_field_absent(self): + result = self._convert([{"name": "lib-no-desc", "version": "1.0"}]) + self.assertNotIn("description", result["lib-no-desc"]) diff --git a/sbom/tests/test_generate_crates_metadata_cache.py b/sbom/tests/test_generate_crates_metadata_cache.py index c84f3d9..6942e33 100644 --- a/sbom/tests/test_generate_crates_metadata_cache.py +++ b/sbom/tests/test_generate_crates_metadata_cache.py @@ -1,7 +1,37 @@ """Tests for generate_crates_metadata_cache.py. -These tests verify the core parsing and data transformation functions -used to extract Rust crate license metadata via dash-license-scan. +What this file tests +--------------------- +parse_dash_summary() + - Standard "crate/cratesio/-/NAME/VERSION, SPDX, STATUS, SOURCE" lines + produce correct crate → license mappings. + - Lines with an empty license expression are excluded. + - Compound SPDX expressions (AND / OR / LicenseRef-*) are preserved verbatim. + - Malformed lines (fewer than 4 comma-separated fields) are silently skipped. + - Non-crate entries (pypi, npm) are ignored. + - Empty file returns an empty dict. + - Restricted crates still yield their license expression. + +parse_module_bazel_lock() + - Crate name and version are extracted from generatedRepoSpecs keys + (format: crate_index__NAME-VERSION). + - sha256 checksum is extracted from the attributes dict. + - The bare "crate_index" meta-repo entry is not treated as a real crate. + - Complex names (iceoryx2-bb-lock-free-qnx8-0.7.0) are parsed correctly. + - Lockfiles without a crate extension return an empty dict. + - Completely empty lockfiles return an empty dict. + +generate_synthetic_cargo_lock() + - Produces valid TOML with [[package]] entries and crates.io-index source. + - Entries are sorted alphabetically by crate name. + +TestEndToEndLicenseExtraction + - parse_dash_summary() correctly round-trips JAR-style CSV output. + - Full pipeline verified for a representative set of score_kyron crates. + +Bazel target : //sbom/tests:test_generate_crates_metadata_cache +Run : bazel test //sbom/tests:test_generate_crates_metadata_cache + pytest sbom/tests/test_generate_crates_metadata_cache.py -v """ import json @@ -9,17 +39,7 @@ import tempfile import unittest -# The script lives under sbom/scripts/ and is not a regular Python package. -# Import functions by adding the scripts directory to sys.path. -import sys - -sys.path.insert( - 0, - os.path.join(os.path.dirname(__file__), "..", "scripts"), -) - -from generate_crates_metadata_cache import ( - build_dash_coordinates, +from sbom.scripts.generate_crates_metadata_cache import ( generate_synthetic_cargo_lock, parse_dash_summary, parse_module_bazel_lock, @@ -124,51 +144,6 @@ def test_licenseref_expression(self): self.assertEqual(result["ring"], "LicenseRef-ring") -class TestBuildDashCoordinates(unittest.TestCase): - """Tests for build_dash_coordinates — coordinate string construction.""" - - def test_basic_coordinate_building(self): - """Crate data produces correct coordinate strings.""" - crates = { - "serde": {"name": "serde", "version": "1.0.228", "checksum": "abc123"}, - "tokio": {"name": "tokio", "version": "1.10.0", "checksum": "def456"}, - } - coords = build_dash_coordinates(crates) - - self.assertEqual(len(coords), 2) - self.assertIn("crate/cratesio/-/serde/1.0.228", coords) - self.assertIn("crate/cratesio/-/tokio/1.10.0", coords) - - def test_empty_crates(self): - """Empty crates dict produces empty coordinates list.""" - coords = build_dash_coordinates({}) - self.assertEqual(coords, []) - - def test_coordinates_are_sorted(self): - """Coordinates are sorted by crate name.""" - crates = { - "z-crate": {"name": "z-crate", "version": "1.0.0", "checksum": ""}, - "a-crate": {"name": "a-crate", "version": "2.0.0", "checksum": ""}, - } - coords = build_dash_coordinates(crates) - - self.assertEqual(coords[0], "crate/cratesio/-/a-crate/2.0.0") - self.assertEqual(coords[1], "crate/cratesio/-/z-crate/1.0.0") - - def test_hyphenated_crate_name(self): - """Crate names with hyphens are preserved in coordinates.""" - crates = { - "iceoryx2-bb-lock-free": { - "name": "iceoryx2-bb-lock-free", - "version": "0.7.0", - "checksum": "", - }, - } - coords = build_dash_coordinates(crates) - - self.assertEqual(coords[0], "crate/cratesio/-/iceoryx2-bb-lock-free/0.7.0") - - class TestParseModuleBazelLock(unittest.TestCase): """Tests for parse_module_bazel_lock — MODULE.bazel.lock crate extraction.""" @@ -314,8 +289,7 @@ class TestEndToEndLicenseExtraction(unittest.TestCase): """Integration tests verifying the full license extraction pipeline. These tests verify that the parse_dash_summary function correctly - handles the output format of the Eclipse dash-licenses JAR, which - is the format that build_dash_coordinates + JAR invocation produces. + handles the output format of the Eclipse dash-licenses JAR. """ def _write_summary(self, content: str) -> str: @@ -325,20 +299,8 @@ def _write_summary(self, content: str) -> str: self.addCleanup(os.unlink, path) return path - def test_coordinates_match_summary_format(self): - """Coordinates built by build_dash_coordinates match the format - that parse_dash_summary expects in the JAR output.""" - crates = { - "serde": {"name": "serde", "version": "1.0.228", "checksum": "abc"}, - "tokio": {"name": "tokio", "version": "1.10.0", "checksum": "def"}, - } - - # Build coordinates (what we send to the JAR) - coords = build_dash_coordinates(crates) - self.assertEqual(coords[0], "crate/cratesio/-/serde/1.0.228") - self.assertEqual(coords[1], "crate/cratesio/-/tokio/1.10.0") - - # Simulate JAR summary output (what the JAR would produce) + def test_summary_format_round_trip(self): + """parse_dash_summary correctly maps crate names from JAR-style CSV output.""" summary = ( "crate/cratesio/-/serde/1.0.228, Apache-2.0 OR MIT, approved, clearlydefined\n" "crate/cratesio/-/tokio/1.10.0, MIT, approved, clearlydefined\n" @@ -346,27 +308,13 @@ def test_coordinates_match_summary_format(self): path = self._write_summary(summary) license_map = parse_dash_summary(path) - # Verify licenses are correctly mapped back to crate names self.assertEqual(license_map["serde"], "Apache-2.0 OR MIT") self.assertEqual(license_map["tokio"], "MIT") - # Verify all crates got licenses - for name in crates: - self.assertIn(name, license_map, f"Missing license for crate: {name}") - def test_kyron_style_crates(self): """Verify license extraction works for crates typical in the score_kyron module.""" - crates = { - "proc-macro2": {"name": "proc-macro2", "version": "1.0.92", "checksum": ""}, - "quote": {"name": "quote", "version": "1.0.37", "checksum": ""}, - "syn": {"name": "syn", "version": "2.0.96", "checksum": ""}, - "iceoryx2": {"name": "iceoryx2", "version": "0.7.0", "checksum": ""}, - } - - coords = build_dash_coordinates(crates) - self.assertEqual(len(coords), 4) + crate_names = ["proc-macro2", "quote", "syn", "iceoryx2"] - # Simulate JAR output summary = ( "crate/cratesio/-/proc-macro2/1.0.92, Apache-2.0 OR MIT, approved, clearlydefined\n" "crate/cratesio/-/quote/1.0.37, Apache-2.0 OR MIT, approved, clearlydefined\n" @@ -376,7 +324,6 @@ def test_kyron_style_crates(self): path = self._write_summary(summary) license_map = parse_dash_summary(path) - # All crates should have licenses - for name in crates: + for name in crate_names: self.assertIn(name, license_map, f"Missing license for {name}") self.assertTrue(license_map[name], f"Empty license for {name}") diff --git a/sbom/tests/test_real_sbom_integration.py b/sbom/tests/test_real_sbom_integration.py new file mode 100644 index 0000000..acebfe9 --- /dev/null +++ b/sbom/tests/test_real_sbom_integration.py @@ -0,0 +1,593 @@ +"""Integration tests that generate real SBOMs from reference_integration fixtures. + +What this file tests +--------------------- +Generates SPDX 2.3 and CycloneDX 1.6 SBOMs for three real S-CORE components +using fixture files extracted directly from the reference_integration workspace: + + score_baselibs — C++ foundational libraries (Boost, nlohmann_json, …) + score_kyron — Rust kyron framework (iceoryx2-qnx8, serde, syn, …) + score_orchestrator — Rust orchestration layer (kyron + tracing + postcard, …) + +Fixtures are stored in sbom/tests/fixtures/ and include: + *_input.json — Real Bazel aspect output (external_repos, config, …) + sbom_metadata.json — Real sbom_metadata Bazel extension output + crates_metadata.json — 288-crate dash-license-scan + crates.io cache + *_cdxgen.cdx.json — Real cdxgen C++-scan output for each component + reference_integration.MODULE.bazel.lock — Minimal MODULE.bazel.lock slice + +SPDX 2.3 structural rules validated per https://spdx.github.io/spdx-spec/v2.3/ +CycloneDX 1.6 structural rules validated per https://cyclonedx.org/docs/1.6/json/ + +Online validation against https://sbomgenerator.com/tools/validator is performed +automatically in test_online_validator_accepts_all_sboms and skipped gracefully +when the service is unreachable (e.g. offline CI environments). + +Bazel target : //sbom/tests:test_real_sbom_integration +Run : bazel test //sbom/tests:test_real_sbom_integration + pytest sbom/tests/test_real_sbom_integration.py -v +""" + +import json +import os +import re +import shutil +import tempfile +import unittest +import unittest.mock +import urllib.request +from pathlib import Path + +from sbom.internal.generator.sbom_generator import main + +FIXTURES = Path(__file__).parent / "fixtures" + + +def _load_fixture(name: str) -> dict: + with open(FIXTURES / name, encoding="utf-8") as f: + return json.load(f) + + +def _write_json(path: str, data: dict) -> None: + with open(path, "w", encoding="utf-8") as f: + json.dump(data, f) + + +class TestRealSbomGeneration(unittest.TestCase): + """SBOM generation and structural validation using real reference_integration fixtures.""" + + def setUp(self): + self.tmpdir = tempfile.mkdtemp(prefix="sbom_real_") + self._lock_path = str(FIXTURES / "reference_integration.MODULE.bazel.lock") + self._meta_path = str(FIXTURES / "sbom_metadata.json") + self._crates_path = str(FIXTURES / "crates_metadata.json") + + def tearDown(self): + shutil.rmtree(self.tmpdir, ignore_errors=True) + + # ----------------------------------------------------------------------- + # Helpers + # ----------------------------------------------------------------------- + + def _run( + self, + input_fixture: str, + cdxgen_fixture: str | None = None, + ) -> tuple[dict, dict]: + """Load fixture, run sbom_generator.main(), return (spdx_doc, cdx_doc).""" + input_data = _load_fixture(input_fixture) + + # Substitute the sentinel lockfile path with the real fixture path. + input_data["module_lockfiles"] = [self._lock_path] + + input_path = os.path.join(self.tmpdir, "input.json") + spdx_path = os.path.join(self.tmpdir, "out.spdx.json") + cdx_path = os.path.join(self.tmpdir, "out.cdx.json") + + _write_json(input_path, input_data) + + argv = [ + "sbom_generator.py", + "--input", + input_path, + "--metadata", + self._meta_path, + "--spdx-output", + spdx_path, + "--cyclonedx-output", + cdx_path, + "--crates-cache", + self._crates_path, + ] + if cdxgen_fixture: + argv += ["--cdxgen-sbom", str(FIXTURES / cdxgen_fixture)] + + with unittest.mock.patch("sys.argv", argv): + rc = main() + + self.assertEqual(rc, 0, "sbom_generator.main() must return 0") + + with open(spdx_path, encoding="utf-8") as f: + spdx = json.load(f) + with open(cdx_path, encoding="utf-8") as f: + cdx = json.load(f) + + return spdx, cdx + + # ── SPDX 2.3 structural validator ────────────────────────────────────── + + def _assert_valid_spdx( + self, + spdx: dict, + component_name: str, + expected_dep_names: list[str], + ) -> None: + """Assert SPDX 2.3 structural validity per the specification.""" + # Top-level required fields + self.assertEqual(spdx["spdxVersion"], "SPDX-2.3") + self.assertEqual(spdx["dataLicense"], "CC0-1.0") + self.assertEqual(spdx["SPDXID"], "SPDXRef-DOCUMENT") + self.assertIn("documentNamespace", spdx) + self.assertIn("creationInfo", spdx) + self.assertIn("packages", spdx) + self.assertIn("relationships", spdx) + + ns = spdx["documentNamespace"] + self.assertRegex(ns, r"^https?://", "documentNamespace must be a URI") + + ci = spdx["creationInfo"] + self.assertIn("created", ci) + self.assertIn("creators", ci) + self.assertIsInstance(ci["creators"], list) + self.assertTrue(ci["creators"], "creators must not be empty") + + pkgs = spdx["packages"] + self.assertIsInstance(pkgs, list) + self.assertGreater(len(pkgs), 1, "Must have root + at least one dep package") + + spdx_id_pattern = re.compile(r"^SPDXRef-[a-zA-Z0-9.\-]+$") + + # Root package + root = next((p for p in pkgs if p.get("SPDXID") == "SPDXRef-RootPackage"), None) + self.assertIsNotNone(root, "Root package SPDXRef-RootPackage must exist") + self.assertEqual(root["name"], component_name) + + # All packages + all_spdx_ids: set[str] = set() + for pkg in pkgs: + name = pkg.get("name", "") + self.assertTrue(name, f"Package name must not be empty: {pkg}") + + sid = pkg.get("SPDXID", "") + self.assertRegex( + sid, spdx_id_pattern, f"Invalid SPDXID on package {name!r}" + ) + self.assertNotIn(sid, all_spdx_ids, f"Duplicate SPDXID: {sid!r}") + all_spdx_ids.add(sid) + + self.assertIn("versionInfo", pkg, f"Missing versionInfo on {name!r}") + self.assertIn( + "downloadLocation", pkg, f"Missing downloadLocation on {name!r}" + ) + self.assertIn("filesAnalyzed", pkg, f"Missing filesAnalyzed on {name!r}") + self.assertFalse( + pkg["filesAnalyzed"], f"filesAnalyzed must be False on {name!r}" + ) + self.assertIn( + "licenseConcluded", pkg, f"Missing licenseConcluded on {name!r}" + ) + self.assertIn( + "licenseDeclared", pkg, f"Missing licenseDeclared on {name!r}" + ) + self.assertIn("copyrightText", pkg, f"Missing copyrightText on {name!r}") + + # checksums entries must have algorithm + value + for chk in pkg.get("checksums", []): + self.assertIn("algorithm", chk) + self.assertIn("checksumValue", chk) + if chk["algorithm"] == "SHA256": + self.assertRegex( + chk["checksumValue"], + r"^[0-9a-f]{64}$", + f"SHA256 value on {name!r} must be 64 lowercase hex digits", + ) + + # LicenseRef-* identifiers in packages must be declared + licenseref_re = re.compile(r"LicenseRef-[A-Za-z0-9\-.]+") + used_refs: set[str] = set() + for pkg in pkgs: + for field in ("licenseConcluded", "licenseDeclared"): + used_refs.update(licenseref_re.findall(pkg.get(field, ""))) + if used_refs: + declared = { + e["licenseId"] for e in spdx.get("hasExtractedLicensingInfos", []) + } + for ref in used_refs: + self.assertIn( + ref, + declared, + f"LicenseRef {ref!r} used but not declared in hasExtractedLicensingInfos", + ) + + # Relationships: at least DESCRIBES + one DEPENDS_ON + rels = spdx["relationships"] + self.assertIsInstance(rels, list) + rel_types = {r["relationshipType"] for r in rels} + self.assertIn("DESCRIBES", rel_types) + self.assertIn("DEPENDS_ON", rel_types) + + # All relationship element IDs must reference known SPDXIDs + doc_spdx_ids = all_spdx_ids | {"SPDXRef-DOCUMENT"} + for rel in rels: + for field in ("spdxElementId", "relatedSpdxElement"): + self.assertIn( + rel[field], + doc_spdx_ids, + f"Relationship references unknown SPDXID {rel[field]!r}", + ) + + # Spot-check: expected dependency names must appear + dep_names = { + p["name"] for p in pkgs if p.get("SPDXID") != "SPDXRef-RootPackage" + } + for dep in expected_dep_names: + self.assertIn( + dep, dep_names, f"Expected dep {dep!r} not found in SPDX packages" + ) + + # ── CycloneDX 1.6 structural validator ───────────────────────────────── + + def _assert_valid_cdx( + self, + cdx: dict, + component_name: str, + expected_dep_names: list[str], + ) -> None: + """Assert CycloneDX 1.6 structural validity per the specification.""" + self.assertEqual(cdx["bomFormat"], "CycloneDX") + self.assertEqual(cdx["specVersion"], "1.6") + self.assertIn("serialNumber", cdx) + self.assertRegex( + cdx["serialNumber"], + r"^urn:uuid:[0-9a-f-]{36}$", + "serialNumber must be a URN UUID", + ) + self.assertIsInstance(cdx.get("version"), int) + + # metadata + meta = cdx.get("metadata", {}) + self.assertIn("timestamp", meta) + self.assertIn("tools", meta) + self.assertIn("component", meta) + + mc = meta["component"] + self.assertEqual(mc["name"], component_name) + self.assertIn("type", mc) + self.assertIn("version", mc) + self.assertIn("bom-ref", mc) + + # components + comps = cdx.get("components", []) + self.assertIsInstance(comps, list) + self.assertGreater(len(comps), 0, "components must not be empty") + + CDX_TYPES = { + "application", + "library", + "framework", + "container", + "device", + "firmware", + "file", + "operating-system", + "device-driver", + "platform", + "machine-learning-model", + "data", + } + + bom_refs: list[str] = [] + for comp in comps: + name = comp.get("name", "") + self.assertTrue(name, f"Component name must not be empty: {comp}") + self.assertIn("type", comp, f"Missing type on {name!r}") + self.assertIn("version", comp, f"Missing version on {name!r}") + self.assertIn("bom-ref", comp, f"Missing bom-ref on {name!r}") + self.assertIn( + comp["type"], + CDX_TYPES, + f"Unknown CDX type {comp['type']!r} on {name!r}", + ) + bom_refs.append(comp["bom-ref"]) + + # hashes entries must have alg + content + for h in comp.get("hashes", []): + self.assertIn("alg", h) + self.assertIn("content", h) + + # licenses must be a list of licence or expression objects + for lic_entry in comp.get("licenses", []): + self.assertTrue( + "license" in lic_entry or "expression" in lic_entry, + f"License entry on {name!r} must have 'license' or 'expression': {lic_entry}", + ) + + # bom-refs must be unique across all components + self.assertEqual( + len(bom_refs), + len(set(bom_refs)), + f"Duplicate bom-refs found: {[r for r in bom_refs if bom_refs.count(r) > 1]}", + ) + + # dependencies: root must depend on at least one component + deps = cdx.get("dependencies", []) + self.assertIsInstance(deps, list) + root_dep = next((d for d in deps if d.get("ref") == mc["bom-ref"]), None) + self.assertIsNotNone(root_dep, "Root component must have a dependency entry") + self.assertGreater( + len(root_dep.get("dependsOn", [])), + 0, + "Root component must depend on at least one component", + ) + + # Spot-check: expected dependency names must appear + comp_names = {c["name"] for c in comps} + for dep in expected_dep_names: + self.assertIn( + dep, comp_names, f"Expected dep {dep!r} not found in CDX components" + ) + + # ── sbomgenerator.com online validator ───────────────────────────────── + + def _validate_online(self, content: str, fmt: str) -> dict | None: + """POST content to sbomgenerator.com/tools/validator/validate. + + Returns the parsed JSON response dict on success, or None when the + service is unreachable (network error, timeout, non-200 response). + Never raises — callers must handle the None case. + + Args: + content: Serialised SBOM string (JSON). + fmt: Format string accepted by the API: ``"spdx"`` or ``"cyclonedx"``. + """ + payload = json.dumps( + { + "sbom_data": content, + "format": fmt, + "options": { + "strict": True, + "bestPractices": True, + "validatePurls": True, + "checkLicenses": True, + }, + } + ).encode() + req = urllib.request.Request( + "https://sbomgenerator.com/tools/validator/validate", + data=payload, + headers={ + "Content-Type": "application/json", + "User-Agent": "Mozilla/5.0", + "Referer": "https://sbomgenerator.com/tools/validator", + "Origin": "https://sbomgenerator.com", + }, + method="POST", + ) + try: + with urllib.request.urlopen(req, timeout=30) as resp: + return json.loads(resp.read()) + except Exception: + return None + + # ----------------------------------------------------------------------- + # Test cases + # ----------------------------------------------------------------------- + + def test_baselibs_spdx_and_cdx_are_valid(self): + """score_baselibs SBOM (Boost + nlohmann_json) passes structural validation.""" + spdx, cdx = self._run("baselibs_input.json") + + self._assert_valid_spdx( + spdx, + component_name="score_baselibs", + expected_dep_names=["boost.config", "boost.assert", "boost.mp11"], + ) + self._assert_valid_cdx( + cdx, + component_name="score_baselibs", + expected_dep_names=["boost.config", "boost.assert", "boost.mp11"], + ) + + def test_kyron_spdx_and_cdx_are_valid(self): + """score_kyron SBOM (iceoryx2, serde, syn, …) passes structural validation.""" + spdx, cdx = self._run( + "kyron_input.json", cdxgen_fixture="kyron_cdxgen.cdx.json" + ) + + self._assert_valid_spdx( + spdx, + component_name="score_kyron", + expected_dep_names=["serde", "syn"], + ) + self._assert_valid_cdx( + cdx, + component_name="score_kyron", + expected_dep_names=["serde", "syn"], + ) + + def test_orchestrator_spdx_and_cdx_are_valid(self): + """score_orchestrator SBOM (kyron + tracing + postcard, …) passes structural validation.""" + spdx, cdx = self._run( + "orchestrator_input.json", + cdxgen_fixture="orchestrator_cdxgen.cdx.json", + ) + + self._assert_valid_spdx( + spdx, + component_name="score_orchestrator", + expected_dep_names=["serde", "postcard", "tracing"], + ) + self._assert_valid_cdx( + cdx, + component_name="score_orchestrator", + expected_dep_names=["serde", "postcard", "tracing"], + ) + + def test_baselibs_package_count(self): + """score_baselibs SBOM must contain the expected number of packages.""" + spdx, cdx = self._run("baselibs_input.json") + pkgs = spdx["packages"] + comps = cdx["components"] + # root + 19 deps (Boost sub-libs + nlohmann_json + acl-deb) + self.assertEqual(len(pkgs), 20, f"Expected 20 SPDX packages, got {len(pkgs)}") + self.assertEqual( + len(comps), 19, f"Expected 19 CDX components, got {len(comps)}" + ) + + def test_kyron_crate_license_enrichment(self): + """Rust crates in score_kyron SBOM must have license data from crates_metadata.json.""" + spdx, _ = self._run("kyron_input.json", cdxgen_fixture="kyron_cdxgen.cdx.json") + pkgs = spdx["packages"] + serde = next((p for p in pkgs if p["name"] == "serde"), None) + self.assertIsNotNone(serde, "serde package must exist in kyron SBOM") + self.assertNotEqual( + serde["licenseConcluded"], + "NOASSERTION", + "serde must have a resolved license from crates_metadata.json", + ) + self.assertIn("Apache-2.0", serde["licenseConcluded"]) + + def test_orchestrator_crate_checksum_present(self): + """Crates with known checksums must have checksums in the SPDX output.""" + spdx, _ = self._run( + "orchestrator_input.json", + cdxgen_fixture="orchestrator_cdxgen.cdx.json", + ) + pkgs = spdx["packages"] + crates_with_checksum = [ + p + for p in pkgs + if p.get("checksums") and p.get("SPDXID") != "SPDXRef-RootPackage" + ] + self.assertGreater( + len(crates_with_checksum), + 50, + f"Expected >50 crates with checksums, got {len(crates_with_checksum)}", + ) + + def test_lockfile_enriches_module_version(self): + """MODULE.bazel.lock fixture must enrich boost.config with version from BCR URL.""" + spdx, _ = self._run("baselibs_input.json") + pkgs = spdx["packages"] + boost_config = next((p for p in pkgs if p["name"] == "boost.config"), None) + self.assertIsNotNone(boost_config) + self.assertNotEqual( + boost_config["versionInfo"], + "unknown", + "boost.config version must be extracted from MODULE.bazel.lock", + ) + + def test_spdx_licenseref_declarations(self): + """All LicenseRef-* identifiers used in SPDX packages must be declared.""" + for fixture in ( + "baselibs_input.json", + "kyron_input.json", + "orchestrator_input.json", + ): + with self.subTest(fixture=fixture): + spdx, _ = self._run(fixture) + licenseref_re = re.compile(r"LicenseRef-[A-Za-z0-9\-.]+") + used: set[str] = set() + for pkg in spdx["packages"]: + for field in ("licenseConcluded", "licenseDeclared"): + used.update(licenseref_re.findall(pkg.get(field, ""))) + if used: + declared = { + e["licenseId"] + for e in spdx.get("hasExtractedLicensingInfos", []) + } + self.assertEqual( + used, + used & declared, + f"Undeclared LicenseRef-* in {fixture}: {used - declared}", + ) + + def test_cdx_bom_refs_are_unique(self): + """All CycloneDX bom-ref values must be unique within each document.""" + for fixture, cdxgen in [ + ("baselibs_input.json", None), + ("kyron_input.json", "kyron_cdxgen.cdx.json"), + ("orchestrator_input.json", "orchestrator_cdxgen.cdx.json"), + ]: + with self.subTest(fixture=fixture): + _, cdx = self._run(fixture, cdxgen_fixture=cdxgen) + refs = [c["bom-ref"] for c in cdx["components"]] + self.assertEqual( + len(refs), len(set(refs)), f"Duplicate bom-refs in {fixture}" + ) + + def test_all_spdx_ids_reference_valid_nodes(self): + """Relationship element IDs must reference only packages defined in the document.""" + for fixture in ( + "baselibs_input.json", + "kyron_input.json", + "orchestrator_input.json", + ): + with self.subTest(fixture=fixture): + spdx, _ = self._run(fixture) + valid_ids = {p["SPDXID"] for p in spdx["packages"]} | { + "SPDXRef-DOCUMENT" + } + for rel in spdx["relationships"]: + self.assertIn( + rel["spdxElementId"], + valid_ids, + f"Dangling spdxElementId in {fixture}: {rel['spdxElementId']!r}", + ) + self.assertIn( + rel["relatedSpdxElement"], + valid_ids, + f"Dangling relatedSpdxElement in {fixture}: {rel['relatedSpdxElement']!r}", + ) + + def test_online_validator_accepts_all_sboms(self): + """SPDX and CycloneDX outputs pass sbomgenerator.com/tools/validator. + + Posts each generated SBOM to https://sbomgenerator.com/tools/validator/validate + and asserts that it is reported as valid with zero errors. + + Skipped automatically (per subtest) when the service is unreachable so + that offline CI environments are not broken by network failures. + """ + cases = [ + ("baselibs_input.json", None, "score_baselibs"), + ("kyron_input.json", "kyron_cdxgen.cdx.json", "score_kyron"), + ( + "orchestrator_input.json", + "orchestrator_cdxgen.cdx.json", + "score_orchestrator", + ), + ] + for input_fixture, cdxgen_fixture, component_name in cases: + spdx, cdx = self._run(input_fixture, cdxgen_fixture=cdxgen_fixture) + for content, fmt in [ + (json.dumps(spdx), "spdx"), + (json.dumps(cdx), "cyclonedx"), + ]: + with self.subTest(component=component_name, format=fmt): + result = self._validate_online(content, fmt) + if result is None: + self.skipTest( + "sbomgenerator.com is unreachable — skipping online validation" + ) + self.assertTrue( + result.get("valid"), + f"{component_name} {fmt}: validator reports invalid — " + f"errors: {result.get('errors', [])}", + ) + self.assertEqual( + result.get("errors", []), + [], + f"{component_name} {fmt}: unexpected errors from validator: " + f"{result.get('errors', [])}", + ) diff --git a/sbom/tests/test_sbom_generator.py b/sbom/tests/test_sbom_generator.py new file mode 100644 index 0000000..770059e --- /dev/null +++ b/sbom/tests/test_sbom_generator.py @@ -0,0 +1,1184 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* +"""Tests for the core orchestration functions in sbom_generator.py. + +What this file tests +--------------------- +filter_repos() + - Repos matching an exclude_pattern are removed. + - crate_index__ / crates_io__ / _crates__ repos are always kept regardless + of patterns — they are real dependencies, not build tools. + - Multiple patterns are each applied independently. + +resolve_component() — all repo-type branches + - bazel_dep module : version, PURL, sha256 → checksum, pedigree fields. + - bzlmod "+" suffix : repo name "boost+" resolves to component name "boost". + - http_archive : version, URL, license, sha256 → checksum; absent sha256 + means no checksum key on the result. + - git_repository : URL, license; commit_date replaces version when + version == "unknown". + - Crate from cache : direct lookup, hyphen/underscore normalisation, checksum. + - crate_universe bzlmod format (rules_rust++crate+crate_index__NAME-VER): + simple crate name, complex platform-suffix name + (iceoryx2-bb-lock-free-qnx8-0.7.0), metadata enrichment, no-cache fallback. + - Legacy crates_io__NAME-VERSION format + metadata enrichment. + - score_-prefixed repos → Eclipse Foundation supplier. + - Dot sub-library (boost.config+) inherits version/license/checksum from the + parent entry (works for modules, http_archives, and git_repositories). + - Unknown repos → placeholder dict with version = "unknown"; never None. + - Return type is always dict, never None, for any input. + +deduplicate_components() + - No duplicates → list unchanged. + - Exact duplicate → keep first entry. + - Known version preferred over "unknown". + - Entry with license preferred over entry without license. + - Empty input returns empty list. + +parse_module_bazel_files() + - Extracts name, version, PURL from a module() call. + - Missing or unreadable files are silently skipped. + - Files without a module() block are skipped. + - Multiple files are merged into one dict. + - Multiline module() blocks with extra attributes are handled. + +parse_module_lockfiles() + - Extracts version from registryFileHashes MODULE.bazel URL keys. + - Extracts sha256 from source.json URL keys. + - Modules with conflicting (ambiguous) versions are excluded. + - Missing, malformed JSON, or empty files are silently skipped. + - Multiple lockfiles are merged. + - version appears inside the purl string. + +mark_missing_cpp_descriptions() + - "Missing" is injected for non-Rust library components with no description. + - pkg:cargo/ crates are never marked "Missing". + - Components with an existing description are not modified. + - Non-library types (application, etc.) are not marked. + - Mixed component lists are handled independently per component. + +main() — end-to-end integration + - Returns 0 on success. + - Writes a valid SPDX 2.3 JSON file when --spdx-output is given. + - Writes a valid CycloneDX 1.6 JSON file when --cyclonedx-output is given. + - A component present in metadata appears in both output files. + - The declared component_name is excluded from its own dependency list. + - BCR known licenses (e.g. boost.config → BSL-1.0) are applied before output. + - crate_universe repos resolve and appear in output. + - Exclude patterns remove repos from output. + - component_version is auto-detected from metadata["modules"] when not in config. + - dep_module_files: MODULE.bazel version flows into output. + - module_lockfiles: lockfile-derived version flows into output. + - --crates-cache: external crate metadata cache enriches crate components. + - --cdxgen-sbom: C++ enrichment data applied to matching components. + - Requesting only --spdx-output does not create a CycloneDX file. + - Requesting only --cyclonedx-output does not create an SPDX file. + +Bazel target : //sbom/tests:test_sbom_generator +Run : bazel test //sbom/tests:test_sbom_generator + PYTHONPATH=. pytest sbom/tests/test_sbom_generator.py -v +""" + +import json +import os +import shutil +import tempfile +import unittest +import unittest.mock + +from sbom.internal.generator.sbom_generator import ( + deduplicate_components, + filter_repos, + main, + mark_missing_cpp_descriptions, + parse_module_bazel_files, + parse_module_lockfiles, + resolve_component, +) + + +# --------------------------------------------------------------------------- +# filter_repos +# --------------------------------------------------------------------------- + + +class TestFilterRepos(unittest.TestCase): + """filter_repos() — build-tool exclusion logic.""" + + def test_no_patterns_keeps_all_repos(self): + repos = ["nlohmann_json", "googletest", "abseil-cpp"] + self.assertEqual(filter_repos(repos, []), repos) + + def test_matching_pattern_excludes_repo(self): + repos = ["cc_toolchain", "nlohmann_json"] + result = filter_repos(repos, ["cc_toolchain"]) + self.assertNotIn("cc_toolchain", result) + self.assertIn("nlohmann_json", result) + + def test_crate_index_repo_always_kept_even_when_pattern_matches(self): + """crate_index__ repos are real dependencies and must never be filtered out.""" + repos = ["rules_rust++crate+crate_index__serde-1.0.228"] + result = filter_repos(repos, ["rules_rust"]) + self.assertEqual(result, repos) + + def test_crates_io_prefix_always_kept(self): + repos = ["crates_io__tokio-1.10.0"] + result = filter_repos(repos, ["crates_io"]) + self.assertEqual(result, repos) + + def test_score_crates_always_kept(self): + repos = ["score_crates__serde-1.0.0"] + result = filter_repos(repos, ["score"]) + self.assertEqual(result, repos) + + def test_multiple_patterns_combined(self): + repos = ["cc_toolchain", "rust_toolchain", "nlohmann_json"] + result = filter_repos(repos, ["cc_toolchain", "rust_toolchain"]) + self.assertEqual(result, ["nlohmann_json"]) + + def test_empty_repos(self): + self.assertEqual(filter_repos([], ["pattern"]), []) + + def test_partial_pattern_match_excludes_repo(self): + repos = ["score_cc_toolchain_linux", "my_lib"] + result = filter_repos(repos, ["cc_toolchain"]) + self.assertNotIn("score_cc_toolchain_linux", result) + self.assertIn("my_lib", result) + + +# --------------------------------------------------------------------------- +# resolve_component +# --------------------------------------------------------------------------- + + +class TestResolveComponentBazelDep(unittest.TestCase): + """resolve_component() — bazel_dep module paths.""" + + def _meta(self, **kwargs) -> dict: + return {"modules": kwargs} + + def test_basic_bazel_dep_module(self): + meta = self._meta( + nlohmann_json={ + "version": "3.11.3", + "purl": "pkg:generic/nlohmann_json@3.11.3", + "license": "MIT", + "supplier": "Niels Lohmann", + } + ) + comp = resolve_component("nlohmann_json", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "nlohmann_json") + self.assertEqual(comp["version"], "3.11.3") + self.assertEqual(comp["purl"], "pkg:generic/nlohmann_json@3.11.3") + self.assertEqual(comp["license"], "MIT") + self.assertEqual(comp["supplier"], "Niels Lohmann") + + def test_bzlmod_plus_suffix_stripped(self): + """bzlmod appends '+' to repo names; the suffix must be stripped.""" + meta = self._meta( + boost={ + "version": "1.87.0", + "purl": "pkg:generic/boost@1.87.0", + "license": "BSL-1.0", + } + ) + comp = resolve_component("boost+", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "boost") + self.assertEqual(comp["version"], "1.87.0") + + def test_sha256_from_lockfile_becomes_checksum(self): + """sha256 field from parse_module_lockfiles is surfaced as checksum.""" + meta = self._meta( + boost={ + "version": "1.87.0", + "purl": "pkg:generic/boost@1.87.0", + "sha256": "abc123def456", + } + ) + comp = resolve_component("boost", meta) + self.assertEqual(comp["checksum"], "abc123def456") + + def test_pedigree_fields_propagated(self): + meta = self._meta( + linux_kernel={ + "version": "5.10.130", + "purl": "pkg:generic/linux_kernel@5.10.130", + "pedigree_ancestors": ["pkg:generic/linux-kernel@5.10.0"], + "pedigree_notes": "Backported CVE-2025-12345 fix", + } + ) + comp = resolve_component("linux_kernel", meta) + self.assertEqual( + comp["pedigree_ancestors"], ["pkg:generic/linux-kernel@5.10.0"] + ) + self.assertEqual(comp["pedigree_notes"], "Backported CVE-2025-12345 fix") + + +class TestResolveComponentHttpArchive(unittest.TestCase): + """resolve_component() — http_archive paths.""" + + def _meta(self, **archives) -> dict: + return {"modules": {}, "http_archives": archives} + + def test_http_archive_basic(self): + meta = self._meta( + linux_kernel={ + "version": "5.10.0", + "purl": "pkg:generic/linux_kernel@5.10.0", + "url": "https://example.com/linux.tar.gz", + "license": "GPL-2.0-only", + "sha256": "deadbeef1234", + } + ) + comp = resolve_component("linux_kernel", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["version"], "5.10.0") + self.assertEqual(comp["license"], "GPL-2.0-only") + self.assertEqual(comp["checksum"], "deadbeef1234") + self.assertEqual(comp["url"], "https://example.com/linux.tar.gz") + + def test_http_archive_no_sha256_has_no_checksum_key(self): + meta = self._meta( + mylib={ + "version": "1.0", + "purl": "pkg:generic/mylib@1.0", + } + ) + comp = resolve_component("mylib", meta) + self.assertNotIn("checksum", comp) + + +class TestResolveComponentGitRepository(unittest.TestCase): + """resolve_component() — git_repository paths.""" + + def _meta(self, **repos) -> dict: + return {"modules": {}, "http_archives": {}, "git_repositories": repos} + + def test_git_repository_basic(self): + meta = self._meta( + my_lib={ + "version": "abc1234", + "purl": "pkg:generic/my_lib@abc1234", + "remote": "https://github.com/example/my_lib", + "license": "Apache-2.0", + } + ) + comp = resolve_component("my_lib", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["version"], "abc1234") + self.assertEqual(comp["url"], "https://github.com/example/my_lib") + + def test_commit_date_used_when_version_unknown(self): + """If version is 'unknown', commit_date provides the version string.""" + meta = self._meta( + my_lib={ + "version": "unknown", + "purl": "pkg:generic/my_lib@unknown", + "remote": "https://github.com/example/my_lib", + "commit_date": "2024-01-15", + } + ) + comp = resolve_component("my_lib", meta) + self.assertEqual(comp["version"], "2024-01-15") + + def test_commit_date_not_used_when_version_known(self): + meta = self._meta( + my_lib={ + "version": "v2.0.0", + "purl": "pkg:generic/my_lib@v2.0.0", + "remote": "https://github.com/example/my_lib", + "commit_date": "2024-01-15", + } + ) + comp = resolve_component("my_lib", meta) + self.assertEqual(comp["version"], "v2.0.0") + + +class TestResolveComponentCrateCache(unittest.TestCase): + """resolve_component() — metadata cache crate paths.""" + + def _meta(self, **crates) -> dict: + return {"modules": {}, "crates": crates} + + def test_crate_from_cache(self): + meta = self._meta( + my_crate={ + "version": "1.0.0", + "purl": "pkg:cargo/my_crate@1.0.0", + "license": "MIT", + "description": "My crate", + "supplier": "Me", + } + ) + comp = resolve_component("my_crate", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["version"], "1.0.0") + self.assertEqual(comp["license"], "MIT") + self.assertEqual(comp["purl"], "pkg:cargo/my_crate@1.0.0") + + def test_hyphen_to_underscore_lookup(self): + """Bazel uses hyphens; Cargo.lock uses underscores — both must resolve.""" + meta = self._meta( + my_crate={ + "version": "1.0.0", + "purl": "pkg:cargo/my_crate@1.0.0", + "license": "MIT", + } + ) + comp = resolve_component("my-crate", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["version"], "1.0.0") + + def test_crate_checksum_propagated(self): + meta = self._meta( + serde={ + "version": "1.0.228", + "purl": "pkg:cargo/serde@1.0.228", + "checksum": "abc123", + } + ) + comp = resolve_component("serde", meta) + self.assertEqual(comp["checksum"], "abc123") + + +class TestResolveComponentCrateUniverse(unittest.TestCase): + """resolve_component() — crate_universe bzlmod and legacy formats.""" + + def _meta(self, **crates) -> dict: + return {"modules": {}, "crates": crates} + + def test_bzlmod_format_simple_name(self): + """rules_rust++crate+crate_index__serde-1.0.228 → serde 1.0.228.""" + meta = self._meta( + serde={ + "license": "Apache-2.0 OR MIT", + "description": "A serialization framework", + "supplier": "David Tolnay", + } + ) + comp = resolve_component("rules_rust++crate+crate_index__serde-1.0.228", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "serde") + self.assertEqual(comp["version"], "1.0.228") + self.assertEqual(comp["purl"], "pkg:cargo/serde@1.0.228") + self.assertEqual(comp["license"], "Apache-2.0 OR MIT") + + def test_bzlmod_format_complex_name_with_platform_suffix(self): + """iceoryx2-bb-lock-free-qnx8-0.7.0 → name=iceoryx2-bb-lock-free-qnx8, version=0.7.0.""" + meta = self._meta() + comp = resolve_component( + "rules_rust++crate+crate_index__iceoryx2-bb-lock-free-qnx8-0.7.0", meta + ) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "iceoryx2-bb-lock-free-qnx8") + self.assertEqual(comp["version"], "0.7.0") + self.assertEqual(comp["purl"], "pkg:cargo/iceoryx2-bb-lock-free-qnx8@0.7.0") + + def test_bzlmod_format_without_cache_entry_still_resolves(self): + """Crate repos resolve even with no metadata cache entry.""" + meta = self._meta() + comp = resolve_component("rules_rust++crate+crate_index__tokio-1.28.0", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "tokio") + self.assertEqual(comp["version"], "1.28.0") + + def test_legacy_crates_io_format(self): + """crates_io__tokio-1.10.0 → tokio 1.10.0.""" + meta = self._meta() + comp = resolve_component("crates_io__tokio-1.10.0", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "tokio") + self.assertEqual(comp["version"], "1.10.0") + self.assertEqual(comp["purl"], "pkg:cargo/tokio@1.10.0") + + def test_legacy_format_metadata_enrichment(self): + """Legacy crate repos pick up metadata from cache when available.""" + meta = self._meta( + tokio={ + "license": "MIT", + "description": "Async runtime", + "supplier": "Tokio Contributors", + } + ) + comp = resolve_component("crates_io__tokio-1.10.0", meta) + self.assertEqual(comp["license"], "MIT") + self.assertEqual(comp["description"], "Async runtime") + + +class TestResolveComponentSpecialCases(unittest.TestCase): + """resolve_component() — score_ prefix, dot sub-library, and unknown fallback.""" + + def test_score_prefixed_repo(self): + meta = {"modules": {}} + comp = resolve_component("score_communication", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "score_communication") + self.assertIn("eclipse-score", comp["purl"]) + self.assertEqual(comp["supplier"], "Eclipse Foundation") + + def test_dot_sub_library_inherits_from_parent_module(self): + """boost.config+ must inherit version and license from the boost parent.""" + meta = { + "modules": { + "boost": { + "version": "1.87.0", + "purl": "pkg:generic/boost@1.87.0", + "license": "BSL-1.0", + "supplier": "Boost.org", + "sha256": "abc123", + } + } + } + comp = resolve_component("boost.config+", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "boost.config") + self.assertEqual(comp["version"], "1.87.0") + self.assertEqual(comp["license"], "BSL-1.0") + self.assertEqual(comp["supplier"], "Boost.org") + self.assertEqual(comp["checksum"], "abc123") + + def test_dot_sub_library_inherits_from_parent_http_archive(self): + meta = { + "modules": {}, + "http_archives": { + "mylib": { + "version": "2.0.0", + "purl": "pkg:generic/mylib@2.0.0", + "license": "Apache-2.0", + } + }, + } + comp = resolve_component("mylib.component+", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["version"], "2.0.0") + self.assertEqual(comp["license"], "Apache-2.0") + + def test_unknown_repo_fallback(self): + """Repos that match no known pattern return an 'unknown' placeholder.""" + meta = {"modules": {}} + comp = resolve_component("some_unknown_lib", meta) + self.assertIsNotNone(comp) + self.assertEqual(comp["name"], "some_unknown_lib") + self.assertEqual(comp["version"], "unknown") + self.assertIn("some_unknown_lib", comp["purl"]) + + def test_returns_dict_not_none_for_all_repo_types(self): + """resolve_component always returns a dict, never None (all paths covered).""" + meta = { + "modules": { + "boost": {"version": "1.87.0", "purl": "pkg:generic/boost@1.87.0"} + }, + "http_archives": {}, + "git_repositories": {}, + "crates": {}, + } + for repo_name in [ + "boost", + "score_kyron", + "boost.config+", + "rules_rust++crate+crate_index__serde-1.0.228", + "crates_io__tokio-1.10.0", + "total_unknown_xyz", + ]: + with self.subTest(repo=repo_name): + comp = resolve_component(repo_name, meta) + self.assertIsNotNone( + comp, f"resolve_component returned None for {repo_name!r}" + ) + self.assertIsInstance(comp, dict) + + +# --------------------------------------------------------------------------- +# deduplicate_components +# --------------------------------------------------------------------------- + + +class TestDeduplicateComponents(unittest.TestCase): + """deduplicate_components() — dedup with metadata preference.""" + + def test_no_duplicates_unchanged(self): + components = [ + {"name": "serde", "version": "1.0.0"}, + {"name": "tokio", "version": "2.0.0"}, + ] + result = deduplicate_components(components) + self.assertEqual(len(result), 2) + + def test_exact_duplicate_keeps_first(self): + components = [ + {"name": "serde", "version": "1.0.0"}, + {"name": "serde", "version": "1.0.0"}, + ] + result = deduplicate_components(components) + self.assertEqual(len(result), 1) + + def test_prefers_known_version_over_unknown(self): + """When one entry has version='unknown' and the other has a real version, keep real.""" + components = [ + {"name": "serde", "version": "unknown"}, + {"name": "serde", "version": "1.0.228"}, + ] + result = deduplicate_components(components) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["version"], "1.0.228") + + def test_prefers_entry_with_license(self): + """When one entry has no license and the other does, keep the licensed one.""" + components = [ + {"name": "serde", "version": "1.0.0", "license": ""}, + {"name": "serde", "version": "1.0.0", "license": "MIT"}, + ] + result = deduplicate_components(components) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["license"], "MIT") + + def test_empty_components(self): + self.assertEqual(deduplicate_components([]), []) + + def test_three_duplicates_kept_correctly(self): + components = [ + {"name": "foo", "version": "unknown", "license": ""}, + {"name": "foo", "version": "1.0", "license": ""}, + {"name": "foo", "version": "1.0", "license": "MIT"}, + ] + result = deduplicate_components(components) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]["version"], "1.0") + + +# --------------------------------------------------------------------------- +# parse_module_bazel_files +# --------------------------------------------------------------------------- + + +class TestParseModuleBazelFiles(unittest.TestCase): + """parse_module_bazel_files() — MODULE.bazel version extraction.""" + + def _write(self, content: str) -> str: + fd, path = tempfile.mkstemp(suffix=".bazel") + with os.fdopen(fd, "w") as f: + f.write(content) + self.addCleanup(os.unlink, path) + return path + + def test_basic_extraction(self): + path = self._write( + 'module(\n name = "my_module",\n version = "1.2.3",\n)' + ) + result = parse_module_bazel_files([path]) + self.assertIn("my_module", result) + self.assertEqual(result["my_module"]["version"], "1.2.3") + self.assertEqual(result["my_module"]["purl"], "pkg:generic/my_module@1.2.3") + + def test_missing_file_gracefully_skipped(self): + result = parse_module_bazel_files(["/nonexistent/path/MODULE.bazel"]) + self.assertEqual(result, {}) + + def test_no_module_block_skipped(self): + path = self._write("# no module() call here\n") + result = parse_module_bazel_files([path]) + self.assertEqual(result, {}) + + def test_multiple_files_merged(self): + path_a = self._write('module(name = "lib_a", version = "1.0.0")') + path_b = self._write('module(name = "lib_b", version = "2.0.0")') + result = parse_module_bazel_files([path_a, path_b]) + self.assertIn("lib_a", result) + self.assertEqual(result["lib_a"]["version"], "1.0.0") + self.assertIn("lib_b", result) + self.assertEqual(result["lib_b"]["version"], "2.0.0") + + def test_multiline_module_block(self): + content = ( + "module(\n" + ' name = "score_communication",\n' + ' version = "0.3.0",\n' + " compatibility_level = 1,\n" + ")\n" + ) + path = self._write(content) + result = parse_module_bazel_files([path]) + self.assertIn("score_communication", result) + self.assertEqual(result["score_communication"]["version"], "0.3.0") + + def test_empty_list(self): + result = parse_module_bazel_files([]) + self.assertEqual(result, {}) + + +# --------------------------------------------------------------------------- +# parse_module_lockfiles +# --------------------------------------------------------------------------- + + +class TestParseModuleLockfiles(unittest.TestCase): + """parse_module_lockfiles() — MODULE.bazel.lock version + checksum extraction.""" + + def _write(self, data: dict) -> str: + fd, path = tempfile.mkstemp(suffix=".lock") + with os.fdopen(fd, "w") as f: + json.dump(data, f) + self.addCleanup(os.unlink, path) + return path + + def test_basic_version_extraction(self): + lockfile = { + "registryFileHashes": { + "https://bcr.bazel.build/modules/boost/1.87.0/MODULE.bazel": "sha256-abc", + } + } + path = self._write(lockfile) + result = parse_module_lockfiles([path]) + self.assertIn("boost", result) + self.assertEqual(result["boost"]["version"], "1.87.0") + self.assertEqual(result["boost"]["purl"], "pkg:generic/boost@1.87.0") + + def test_sha256_from_source_json(self): + """source.json hash is surfaced as sha256 for CycloneDX hashes.""" + lockfile = { + "registryFileHashes": { + "https://bcr.bazel.build/modules/nlohmann_json/3.11.3/MODULE.bazel": "sha256-abc", + "https://bcr.bazel.build/modules/nlohmann_json/3.11.3/source.json": "sha256-deadbeef", + } + } + path = self._write(lockfile) + result = parse_module_lockfiles([path]) + self.assertIn("nlohmann_json", result) + self.assertEqual(result["nlohmann_json"]["sha256"], "sha256-deadbeef") + + def test_ambiguous_version_skipped(self): + """Modules with more than one observed version are excluded to avoid guessing.""" + lockfile = { + "registryFileHashes": { + "https://bcr.bazel.build/modules/boost/1.83.0/MODULE.bazel": "sha256-a", + "https://bcr.bazel.build/modules/boost/1.87.0/MODULE.bazel": "sha256-b", + } + } + path = self._write(lockfile) + result = parse_module_lockfiles([path]) + self.assertNotIn("boost", result) + + def test_missing_file_gracefully_skipped(self): + result = parse_module_lockfiles(["/nonexistent/path/MODULE.bazel.lock"]) + self.assertEqual(result, {}) + + def test_malformed_json_skipped(self): + fd, path = tempfile.mkstemp(suffix=".lock") + with os.fdopen(fd, "w") as f: + f.write("not valid json {{{") + self.addCleanup(os.unlink, path) + result = parse_module_lockfiles([path]) + self.assertEqual(result, {}) + + def test_empty_lockfile_skipped(self): + path = self._write({}) + result = parse_module_lockfiles([path]) + self.assertEqual(result, {}) + + def test_multiple_lockfiles_merged(self): + lockfile_a = { + "registryFileHashes": { + "https://bcr.bazel.build/modules/boost/1.87.0/MODULE.bazel": "sha256-a", + } + } + lockfile_b = { + "registryFileHashes": { + "https://bcr.bazel.build/modules/abseil-cpp/20230802.0/MODULE.bazel": "sha256-b", + } + } + path_a = self._write(lockfile_a) + path_b = self._write(lockfile_b) + result = parse_module_lockfiles([path_a, path_b]) + self.assertIn("boost", result) + self.assertIn("abseil-cpp", result) + + def test_version_purl_consistent(self): + lockfile = { + "registryFileHashes": { + "https://bcr.bazel.build/modules/googletest/1.14.0/MODULE.bazel": "sha256-x", + } + } + path = self._write(lockfile) + result = parse_module_lockfiles([path]) + gt = result["googletest"] + self.assertIn(gt["version"], gt["purl"]) + + +# --------------------------------------------------------------------------- +# mark_missing_cpp_descriptions +# --------------------------------------------------------------------------- + + +class TestMarkMissingCppDescriptions(unittest.TestCase): + """mark_missing_cpp_descriptions() — 'Missing' marker for C++ libs.""" + + def test_library_without_description_marked_missing(self): + """Non-Rust libraries with no description receive 'Missing' as placeholder.""" + components = [ + { + "name": "nlohmann-json", + "type": "library", + "description": "", + "purl": "pkg:generic/nlohmann-json@3.11.3", + } + ] + result = mark_missing_cpp_descriptions(components) + self.assertEqual(result[0]["description"], "Missing") + + def test_cargo_crate_not_marked_missing(self): + """Rust crates (pkg:cargo/) must not receive 'Missing' — no cdxgen scan for them.""" + components = [ + { + "name": "serde", + "type": "library", + "description": "", + "purl": "pkg:cargo/serde@1.0.228", + } + ] + result = mark_missing_cpp_descriptions(components) + self.assertEqual(result[0]["description"], "") + + def test_existing_description_preserved(self): + components = [ + { + "name": "foo", + "type": "library", + "description": "JSON library", + "purl": "pkg:generic/foo@1.0", + } + ] + result = mark_missing_cpp_descriptions(components) + self.assertEqual(result[0]["description"], "JSON library") + + def test_non_library_type_not_marked(self): + """Applications and non-library types must not have 'Missing' injected.""" + components = [ + { + "name": "myapp", + "type": "application", + "description": "", + "purl": "pkg:generic/myapp@1.0", + } + ] + result = mark_missing_cpp_descriptions(components) + self.assertEqual(result[0]["description"], "") + + def test_mixed_components_handled_independently(self): + components = [ + { + "name": "cpp-lib", + "type": "library", + "description": "", + "purl": "pkg:generic/cpp-lib@1.0", + }, + { + "name": "rust-crate", + "type": "library", + "description": "", + "purl": "pkg:cargo/rust-crate@0.5", + }, + { + "name": "already-described", + "type": "library", + "description": "Has description", + "purl": "pkg:generic/already-described@2.0", + }, + ] + result = mark_missing_cpp_descriptions(components) + cpp = next(c for c in result if c["name"] == "cpp-lib") + rust = next(c for c in result if c["name"] == "rust-crate") + described = next(c for c in result if c["name"] == "already-described") + self.assertEqual(cpp["description"], "Missing") + self.assertEqual(rust["description"], "") + self.assertEqual(described["description"], "Has description") + + +# --------------------------------------------------------------------------- +# main() — end-to-end integration +# --------------------------------------------------------------------------- + + +class TestMain(unittest.TestCase): + """End-to-end integration tests for main(), covering the full SBOM pipeline.""" + + _DEFAULT_CONFIG = { + "component_name": "my_app", + "component_version": "1.0.0", + "producer_name": "Eclipse Foundation", + "namespace": "https://eclipse.dev/score", + } + + _DEFAULT_INPUT = { + "external_repos": ["nlohmann_json"], + "exclude_patterns": [], + "config": _DEFAULT_CONFIG, + "dep_module_files": [], + "module_lockfiles": [], + "external_dep_edges": [], + } + + _DEFAULT_METADATA = { + "modules": { + "nlohmann_json": { + "version": "3.11.3", + "purl": "pkg:generic/nlohmann_json@3.11.3", + "license": "MIT", + "supplier": "Niels Lohmann", + } + } + } + + def setUp(self): + self.tmpdir = tempfile.mkdtemp() + self._input_path = os.path.join(self.tmpdir, "input.json") + self._metadata_path = os.path.join(self.tmpdir, "metadata.json") + self._spdx_path = os.path.join(self.tmpdir, "output.spdx.json") + self._cdx_path = os.path.join(self.tmpdir, "output.cdx.json") + + def tearDown(self): + shutil.rmtree(self.tmpdir, ignore_errors=True) + + def _write_files(self, input_data=None, metadata=None): + with open(self._input_path, "w") as f: + json.dump(input_data if input_data is not None else self._DEFAULT_INPUT, f) + with open(self._metadata_path, "w") as f: + json.dump(metadata if metadata is not None else self._DEFAULT_METADATA, f) + + def _run(self, input_data=None, metadata=None, extra_args=None): + """Write fixtures and run main(), returning the exit code.""" + self._write_files(input_data=input_data, metadata=metadata) + argv = [ + "sbom_generator.py", + "--input", + self._input_path, + "--metadata", + self._metadata_path, + "--spdx-output", + self._spdx_path, + "--cyclonedx-output", + self._cdx_path, + ] + if extra_args: + argv.extend(extra_args) + with unittest.mock.patch("sys.argv", argv): + return main() + + # ----------------------------------------------------------------------- + # Basic pipeline + # ----------------------------------------------------------------------- + + def test_returns_zero(self): + self.assertEqual(self._run(), 0) + + def test_writes_valid_spdx(self): + self._run() + with open(self._spdx_path) as f: + spdx = json.load(f) + self.assertEqual(spdx["spdxVersion"], "SPDX-2.3") + self.assertIn("packages", spdx) + self.assertIn("relationships", spdx) + + def test_writes_valid_cyclonedx(self): + self._run() + with open(self._cdx_path) as f: + cdx = json.load(f) + self.assertEqual(cdx["bomFormat"], "CycloneDX") + self.assertEqual(cdx["specVersion"], "1.6") + self.assertIn("components", cdx) + + def test_component_appears_in_spdx(self): + """A registered dependency appears as a package in SPDX output.""" + self._run() + with open(self._spdx_path) as f: + spdx = json.load(f) + names = [p["name"] for p in spdx["packages"]] + self.assertIn("nlohmann_json", names) + + # ----------------------------------------------------------------------- + # Root component filtering + # ----------------------------------------------------------------------- + + def test_root_component_not_in_deps(self): + """component_name must not appear as a dependency in the SPDX output.""" + input_data = { + **self._DEFAULT_INPUT, + "external_repos": ["nlohmann_json", "my_app"], + } + self._run(input_data=input_data) + with open(self._spdx_path) as f: + spdx = json.load(f) + dep_names = [ + p["name"] + for p in spdx["packages"] + if p.get("SPDXID") != "SPDXRef-RootPackage" + ] + self.assertNotIn("my_app", dep_names) + + # ----------------------------------------------------------------------- + # BCR known licenses + # ----------------------------------------------------------------------- + + def test_bcr_known_license_applied(self): + """boost.* modules receive BSL-1.0 from BCR_KNOWN_LICENSES when no license is set.""" + input_data = {**self._DEFAULT_INPUT, "external_repos": ["boost.config+"]} + metadata = { + "modules": { + "boost.config": { + "version": "1.83.0", + "purl": "pkg:generic/boost.config@1.83.0", + } + } + } + self._run(input_data=input_data, metadata=metadata) + with open(self._spdx_path) as f: + spdx = json.load(f) + pkg = next( + (p for p in spdx["packages"] if p.get("name") == "boost.config"), None + ) + self.assertIsNotNone(pkg) + self.assertIn("BSL-1.0", pkg.get("licenseConcluded", "")) + + # ----------------------------------------------------------------------- + # crate_universe repos + # ----------------------------------------------------------------------- + + def test_crate_universe_repo_resolves(self): + """A bzlmod crate_universe repo resolves and appears as a package in SPDX output.""" + repo = "rules_rust++crate+crate_index__serde-1.0.228" + input_data = {**self._DEFAULT_INPUT, "external_repos": [repo]} + metadata = { + "crates": { + "serde": { + "version": "1.0.228", + "purl": "pkg:cargo/serde@1.0.228", + "license": "MIT OR Apache-2.0", + } + } + } + self._run(input_data=input_data, metadata=metadata) + with open(self._spdx_path) as f: + spdx = json.load(f) + names = [p["name"] for p in spdx["packages"]] + self.assertIn("serde", names) + + # ----------------------------------------------------------------------- + # Exclude patterns + # ----------------------------------------------------------------------- + + def test_exclude_patterns_remove_repos(self): + """Repos matching exclude_patterns are absent from SPDX output.""" + input_data = { + "external_repos": ["nlohmann_json", "cc_toolchain"], + "exclude_patterns": ["cc_toolchain"], + "config": self._DEFAULT_CONFIG, + "dep_module_files": [], + "module_lockfiles": [], + "external_dep_edges": [], + } + self._run(input_data=input_data) + with open(self._spdx_path) as f: + spdx = json.load(f) + names = [p["name"] for p in spdx["packages"]] + self.assertNotIn("cc_toolchain", names) + + # ----------------------------------------------------------------------- + # Auto-detected component version + # ----------------------------------------------------------------------- + + def test_auto_detect_component_version(self): + """component_version is inferred from metadata.modules when absent from config.""" + config = { + "component_name": "my_app", + "producer_name": "Eclipse Foundation", + "namespace": "https://eclipse.dev/score", + } + input_data = {**self._DEFAULT_INPUT, "config": config, "external_repos": []} + metadata = { + "modules": { + "my_app": {"version": "2.5.0", "purl": "pkg:generic/my_app@2.5.0"} + } + } + self._run(input_data=input_data, metadata=metadata) + with open(self._spdx_path) as f: + spdx = json.load(f) + root_pkg = next( + p for p in spdx["packages"] if p.get("SPDXID") == "SPDXRef-RootPackage" + ) + self.assertEqual(root_pkg["versionInfo"], "2.5.0") + + # ----------------------------------------------------------------------- + # dep_module_files + # ----------------------------------------------------------------------- + + def test_dep_module_files_version_in_output(self): + """Versions parsed from dep_module_files appear in the SPDX packages.""" + module_bazel = os.path.join(self.tmpdir, "dep_MODULE.bazel") + with open(module_bazel, "w") as f: + f.write('module(name = "zlib", version = "1.3.1")\n') + input_data = { + "external_repos": ["zlib"], + "exclude_patterns": [], + "config": self._DEFAULT_CONFIG, + "dep_module_files": [module_bazel], + "module_lockfiles": [], + "external_dep_edges": [], + } + self._run(input_data=input_data, metadata={}) + with open(self._spdx_path) as f: + spdx = json.load(f) + zlib_pkg = next((p for p in spdx["packages"] if p.get("name") == "zlib"), None) + self.assertIsNotNone(zlib_pkg) + self.assertEqual(zlib_pkg.get("versionInfo"), "1.3.1") + + # ----------------------------------------------------------------------- + # module_lockfiles + # ----------------------------------------------------------------------- + + def test_module_lockfiles_version_in_output(self): + """Versions extracted from MODULE.bazel.lock appear in SPDX packages.""" + lockfile = os.path.join(self.tmpdir, "MODULE.bazel.lock") + lock_data = { + "registryFileHashes": { + "https://bcr.bazel.build/modules/zlib/1.3.1/MODULE.bazel": "sha256:abc" + } + } + with open(lockfile, "w") as f: + json.dump(lock_data, f) + input_data = { + "external_repos": ["zlib"], + "exclude_patterns": [], + "config": self._DEFAULT_CONFIG, + "dep_module_files": [], + "module_lockfiles": [lockfile], + "external_dep_edges": [], + } + self._run(input_data=input_data, metadata={}) + with open(self._spdx_path) as f: + spdx = json.load(f) + zlib_pkg = next((p for p in spdx["packages"] if p.get("name") == "zlib"), None) + self.assertIsNotNone(zlib_pkg) + self.assertEqual(zlib_pkg.get("versionInfo"), "1.3.1") + + # ----------------------------------------------------------------------- + # --crates-cache + # ----------------------------------------------------------------------- + + def test_crates_cache_enriches_crate(self): + """--crates-cache provides license and version data for resolved crate repos.""" + cache = { + "serde": { + "version": "1.0.228", + "purl": "pkg:cargo/serde@1.0.228", + "license": "MIT OR Apache-2.0", + "description": "A serialization framework", + } + } + cache_path = os.path.join(self.tmpdir, "crates_cache.json") + with open(cache_path, "w") as f: + json.dump(cache, f) + repo = "rules_rust++crate+crate_index__serde-1.0.228" + input_data = {**self._DEFAULT_INPUT, "external_repos": [repo]} + self._run( + input_data=input_data, + metadata={}, + extra_args=["--crates-cache", cache_path], + ) + with open(self._spdx_path) as f: + spdx = json.load(f) + serde_pkg = next( + (p for p in spdx["packages"] if p.get("name") == "serde"), None + ) + self.assertIsNotNone(serde_pkg) + self.assertEqual(serde_pkg.get("versionInfo"), "1.0.228") + + # ----------------------------------------------------------------------- + # --cdxgen-sbom + # ----------------------------------------------------------------------- + + def test_cdxgen_sbom_enriches_cpp_description(self): + """--cdxgen-sbom fills in description for C++ components from cdxgen data.""" + cdxgen = { + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "components": [ + { + "name": "nlohmann_json", + "version": "3.11.3", + "purl": "pkg:generic/nlohmann_json@3.11.3", + "licenses": [{"license": {"id": "MIT"}}], + "description": "JSON for Modern C++", + } + ], + } + cdxgen_path = os.path.join(self.tmpdir, "cdxgen.cdx.json") + with open(cdxgen_path, "w") as f: + json.dump(cdxgen, f) + metadata = { + "modules": { + "nlohmann_json": { + "version": "3.11.3", + "purl": "pkg:generic/nlohmann_json@3.11.3", + "license": "MIT", + } + } + } + self._run(metadata=metadata, extra_args=["--cdxgen-sbom", cdxgen_path]) + with open(self._cdx_path) as f: + cdx = json.load(f) + comp = next( + (c for c in cdx["components"] if c.get("name") == "nlohmann_json"), None + ) + self.assertIsNotNone(comp) + self.assertEqual(comp.get("description"), "JSON for Modern C++") + + # ----------------------------------------------------------------------- + # Output file selection + # ----------------------------------------------------------------------- + + def test_only_spdx_output_does_not_create_cdx(self): + """Passing only --spdx-output must not create a CycloneDX file.""" + self._write_files() + argv = [ + "sbom_generator.py", + "--input", + self._input_path, + "--metadata", + self._metadata_path, + "--spdx-output", + self._spdx_path, + ] + with unittest.mock.patch("sys.argv", argv): + rc = main() + self.assertEqual(rc, 0) + self.assertTrue(os.path.exists(self._spdx_path)) + self.assertFalse(os.path.exists(self._cdx_path)) + + def test_only_cdx_output_does_not_create_spdx(self): + """Passing only --cyclonedx-output must not create an SPDX file.""" + self._write_files() + argv = [ + "sbom_generator.py", + "--input", + self._input_path, + "--metadata", + self._metadata_path, + "--cyclonedx-output", + self._cdx_path, + ] + with unittest.mock.patch("sys.argv", argv): + rc = main() + self.assertEqual(rc, 0) + self.assertTrue(os.path.exists(self._cdx_path)) + self.assertFalse(os.path.exists(self._spdx_path)) diff --git a/sbom/tests/test_spdx_formatter.py b/sbom/tests/test_spdx_formatter.py index 1df7dba..fe50ff3 100644 --- a/sbom/tests/test_spdx_formatter.py +++ b/sbom/tests/test_spdx_formatter.py @@ -1,4 +1,41 @@ -"""Tests for SPDX 2.3 formatter.""" +"""Tests for the SPDX 2.3 JSON formatter. + +What this file tests +--------------------- +Document structure + - spdxVersion = "SPDX-2.3", dataLicense = "CC0-1.0", + SPDXID = "SPDXRef-DOCUMENT". + - creationInfo: created timestamp, creators list contains + "Organization: " and "Tool: score-sbom-generator". + - documentNamespace is present. + +Package representation + - One root package + one package per component. + - PURL emitted as externalRef with + referenceCategory = "PACKAGE-MANAGER", referenceType = "purl". + - SHA-256 checksum emitted in checksums[] when provided. + - checksums field absent when no checksum is available. + +Relationships + - DESCRIBES: SPDXRef-DOCUMENT → root package (exactly one). + - DEPENDS_ON: root package → each component (one per component). + +LicenseRef-* declarations + - hasExtractedLicensingInfos is populated for every LicenseRef-* identifier + that appears in licenseConcluded or licenseDeclared. + - Each entry carries licenseId and extractedText. + +_normalize_spdx_license() unit tests + - or → OR, and → AND, with → WITH. + - Already-uppercase expressions unchanged. + - GPL-2.0-or-later unchanged (hyphen-delimited "or" must not be uppercased). + - Mixed compound expressions normalised correctly. + - End-to-end: lowercase "or" in component input → uppercase in SPDX output. + +Bazel target : //sbom/tests:test_spdx_formatter +Run : bazel test //sbom/tests:test_spdx_formatter + pytest sbom/tests/test_spdx_formatter.py -v +""" import unittest from datetime import datetime, timezone diff --git a/sbom/tests/test_spdx_to_github_snapshot.py b/sbom/tests/test_spdx_to_github_snapshot.py index 512e2dc..87f9831 100644 --- a/sbom/tests/test_spdx_to_github_snapshot.py +++ b/sbom/tests/test_spdx_to_github_snapshot.py @@ -1,4 +1,36 @@ -"""Tests for SPDX 2.3 → GitHub Dependency Submission snapshot conversion.""" +"""Tests for SPDX 2.3 → GitHub Dependency Submission snapshot conversion. + +What this file tests +--------------------- +Top-level snapshot fields + - version = 0, sha, ref, job, detector, scanned, manifests all present. + - detector.name = "score-sbom-generator"; version and url also present. + - job.correlator and job.id match what was passed. + +Package filtering + - Packages without a PURL are excluded from the resolved map. + - The root package (DESCRIBES target) is excluded from resolved. + +Direct vs. indirect dependency classification + - Package reached via root DEPENDS_ON → "direct". + - Package reached via a non-root DEPENDS_ON → "indirect". + - Misclassification is silent in the output, making this test critical: + GitHub Dependabot uses the relationship field to scope alerts. + +Package URL preservation + - package_url in the snapshot entry equals the PURL from the SPDX package. + +Manifest naming and structure + - The manifest key is the SPDX document name. + - Empty SPDX document produces an empty resolved dict. + +pkg:generic/ PURLs (BCR C++ modules) + - pkg:generic/ PURLs are accepted and included in the resolved map. + +Bazel target : //sbom/tests:test_spdx_to_github_snapshot +Run : bazel test //sbom/tests:test_spdx_to_github_snapshot + pytest sbom/tests/test_spdx_to_github_snapshot.py -v +""" import unittest