From 59885fa0776c9bcc2417f0fa6883f4088fccc2a5 Mon Sep 17 00:00:00 2001 From: Viktor Petersson Date: Sat, 31 Jan 2026 10:13:49 +0100 Subject: [PATCH 1/8] Use distribution platform as supplier instead of package author This refactors the enrichment module to set the supplier field to the distribution platform (PyPI, npm, crates.io, etc.) rather than the package author/maintainer. This better reflects NTIA semantics where "supplier" means the entity distributing the software. Changes: - Add PURL_TYPE_TO_SUPPLIER mapping in purl.py with 17 platform entries - Add get_supplier_for_purl() helper for unified supplier resolution - Update all enrichment sources to use centralized mapping: - pypi.py: "Python Package Index (PyPI)" - cratesio.py: "crates.io" - pubdev.py: "pub.dev" - conan.py: "Conan Center" - depsdev.py: Uses mapping based on PURL type - ecosystems.py: Uses mapping based on PURL type - Author info preserved in maintainer_name field (maps to SPDX originator) - Update tests to reflect new supplier behavior Co-Authored-By: Claude Opus 4.5 --- sbomify_action/_enrichment/sources/conan.py | 13 +++-- .../_enrichment/sources/cratesio.py | 9 ++-- sbomify_action/_enrichment/sources/depsdev.py | 16 ++++-- .../_enrichment/sources/ecosystems.py | 24 +++------ sbomify_action/_enrichment/sources/pubdev.py | 18 +++---- sbomify_action/_enrichment/sources/purl.py | 52 +++++++++++++++++++ sbomify_action/_enrichment/sources/pypi.py | 7 +-- tests/test_conan_source.py | 7 ++- tests/test_cratesio_source.py | 9 ++-- tests/test_enrichment_module.py | 36 +++++++++---- 10 files changed, 132 insertions(+), 59 deletions(-) diff --git a/sbomify_action/_enrichment/sources/conan.py b/sbomify_action/_enrichment/sources/conan.py index 44c96ba..dc32325 100644 --- a/sbomify_action/_enrichment/sources/conan.py +++ b/sbomify_action/_enrichment/sources/conan.py @@ -13,6 +13,7 @@ from ..metadata import NormalizedMetadata from ..sanitization import normalize_vcs_url +from .purl import PURL_TYPE_TO_SUPPLIER # Simple in-memory cache _cache: Dict[str, Optional[NormalizedMetadata]] = {} @@ -227,15 +228,17 @@ def _extract_metadata_from_graph(self, package_name: str, graph: Any) -> Optiona if repository_url: field_sources["repository_url"] = self.name - # Use author as supplier if available - supplier = author if author else None - if supplier: - field_sources["supplier"] = self.name + # Supplier is always the distribution platform + field_sources["supplier"] = self.name + + # Preserve author info as maintainer_name + maintainer_name = author if author else None metadata = NormalizedMetadata( description=description, licenses=licenses, - supplier=supplier, + supplier=PURL_TYPE_TO_SUPPLIER["conan"], + maintainer_name=maintainer_name, homepage=homepage, repository_url=repository_url, registry_url=f"https://conan.io/center/recipes/{package_name}", diff --git a/sbomify_action/_enrichment/sources/cratesio.py b/sbomify_action/_enrichment/sources/cratesio.py index 690efb3..6ce1972 100644 --- a/sbomify_action/_enrichment/sources/cratesio.py +++ b/sbomify_action/_enrichment/sources/cratesio.py @@ -11,6 +11,7 @@ from ..license_utils import normalize_license_list from ..metadata import NormalizedMetadata from ..sanitization import normalize_vcs_url +from .purl import PURL_TYPE_TO_SUPPLIER CRATESIO_API_BASE = "https://crates.io/api/v1/crates" DEFAULT_TIMEOUT = 10 # seconds @@ -161,8 +162,8 @@ def _normalize_response( field_sources["description"] = self.name if licenses: field_sources["licenses"] = self.name - if maintainer_name: - field_sources["supplier"] = self.name + # Supplier is always the distribution platform + field_sources["supplier"] = self.name if homepage: field_sources["homepage"] = self.name if repository_url: @@ -174,9 +175,7 @@ def _normalize_response( description=description, licenses=licenses, license_texts=license_texts, - # supplier is the NTIA-required field; maintainer_name provides additional detail. - # For crates.io, the publisher (published_by) serves as both. - supplier=maintainer_name, + supplier=PURL_TYPE_TO_SUPPLIER["cargo"], homepage=homepage, repository_url=repository_url, documentation_url=documentation, diff --git a/sbomify_action/_enrichment/sources/depsdev.py b/sbomify_action/_enrichment/sources/depsdev.py index ac6c12e..1e2decb 100644 --- a/sbomify_action/_enrichment/sources/depsdev.py +++ b/sbomify_action/_enrichment/sources/depsdev.py @@ -12,6 +12,7 @@ from ..metadata import NormalizedMetadata from ..sanitization import normalize_vcs_url from ..utils import get_qualified_name +from .purl import PURL_TYPE_TO_SUPPLIER DEPSDEV_API_BASE = "https://api.deps.dev/v3" DEFAULT_TIMEOUT = 10 # seconds - deps.dev is generally fast @@ -108,7 +109,7 @@ def fetch(self, purl: PackageURL, session: requests.Session) -> Optional[Normali metadata = None if response.status_code == 200: data = response.json() - metadata = self._normalize_response(purl.name, data) + metadata = self._normalize_response(purl.name, purl.type, data) elif response.status_code == 404: logger.debug(f"Package not found in deps.dev: {purl}") else: @@ -131,12 +132,15 @@ def fetch(self, purl: PackageURL, session: requests.Session) -> Optional[Normali _cache[cache_key] = None return None - def _normalize_response(self, package_name: str, data: Dict[str, Any]) -> Optional[NormalizedMetadata]: + def _normalize_response( + self, package_name: str, purl_type: str, data: Dict[str, Any] + ) -> Optional[NormalizedMetadata]: """ Normalize deps.dev API response to NormalizedMetadata. Args: package_name: Name of the package + purl_type: PURL type (e.g., "pypi", "npm", "cargo") data: Raw deps.dev API response Returns: @@ -179,17 +183,23 @@ def _normalize_response(self, package_name: str, data: Dict[str, Any]) -> Option if repository_url: repository_url = normalize_vcs_url(repository_url) + # Get supplier from PURL type mapping + supplier = PURL_TYPE_TO_SUPPLIER.get(purl_type) + # Build field_sources for attribution - field_sources = {} + field_sources: dict[str, str] = {} if licenses: field_sources["licenses"] = self.name if homepage: field_sources["homepage"] = self.name if repository_url: field_sources["repository_url"] = self.name + if supplier: + field_sources["supplier"] = self.name metadata = NormalizedMetadata( licenses=licenses, + supplier=supplier, homepage=homepage, repository_url=repository_url, source=self.name, diff --git a/sbomify_action/_enrichment/sources/ecosystems.py b/sbomify_action/_enrichment/sources/ecosystems.py index af8887a..1a566bb 100644 --- a/sbomify_action/_enrichment/sources/ecosystems.py +++ b/sbomify_action/_enrichment/sources/ecosystems.py @@ -11,6 +11,7 @@ from ..metadata import NormalizedMetadata from ..sanitization import normalize_vcs_url from ..utils import purl_to_string +from .purl import PURL_TYPE_TO_SUPPLIER ECOSYSTEMS_API_BASE = "https://packages.ecosyste.ms/api/v1" DEFAULT_TIMEOUT = 15 # seconds - ecosyste.ms can be slower @@ -87,9 +88,9 @@ def fetch(self, purl: PackageURL, session: requests.Session) -> Optional[Normali data = response.json() # API returns an array, take first result if isinstance(data, list) and len(data) > 0: - metadata = self._normalize_response(data[0]) + metadata = self._normalize_response(purl.type, data[0]) elif isinstance(data, dict): - metadata = self._normalize_response(data) + metadata = self._normalize_response(purl.type, data) else: logger.debug(f"No package data found in ecosyste.ms for: {purl_str}") elif response.status_code == 404: @@ -119,11 +120,12 @@ def fetch(self, purl: PackageURL, session: requests.Session) -> Optional[Normali _cache[cache_key] = None return None - def _normalize_response(self, data: Dict[str, Any]) -> Optional[NormalizedMetadata]: + def _normalize_response(self, purl_type: str, data: Dict[str, Any]) -> Optional[NormalizedMetadata]: """ Normalize ecosyste.ms API response to NormalizedMetadata. Args: + purl_type: PURL type (e.g., "pypi", "npm", "cargo") data: Raw ecosyste.ms API response Returns: @@ -151,20 +153,8 @@ def _normalize_response(self, data: Dict[str, Any]) -> Optional[NormalizedMetada maintainer_name = first_maintainer.get("name") or first_maintainer.get("login") maintainer_email = first_maintainer.get("email") - # Extract supplier from maintainer or repo owner - # NEVER use ecosystem name as supplier - "pypi", "npm", etc. are platforms, not suppliers - supplier = None - # Priority 1: Maintainer name or login (already extracted above) - if maintainer_name: - supplier = maintainer_name - # Priority 2: Repo owner name or login - elif data.get("repo_metadata") and data["repo_metadata"].get("owner"): - owner = data["repo_metadata"]["owner"] - if isinstance(owner, dict): - supplier = owner.get("name") or owner.get("login") - elif isinstance(owner, str): - supplier = owner - # Do NOT fall back to data["ecosystem"] - it's just the platform name + # Supplier is the distribution platform based on PURL type + supplier = PURL_TYPE_TO_SUPPLIER.get(purl_type) # Extract issue tracker URL from repo metadata issue_tracker_url = None diff --git a/sbomify_action/_enrichment/sources/pubdev.py b/sbomify_action/_enrichment/sources/pubdev.py index e32a356..693c7eb 100644 --- a/sbomify_action/_enrichment/sources/pubdev.py +++ b/sbomify_action/_enrichment/sources/pubdev.py @@ -12,6 +12,7 @@ from ..metadata import NormalizedMetadata from ..sanitization import normalize_vcs_url from ..utils import parse_author_string +from .purl import PURL_TYPE_TO_SUPPLIER PUBDEV_API_BASE = "https://pub.dev/api/packages" DEFAULT_TIMEOUT = 10 # seconds - pub.dev is generally fast @@ -134,8 +135,7 @@ def _normalize_response(self, package_name: str, data: Dict[str, Any]) -> Option documentation_url = pubspec.get("documentation") issue_tracker_url = pubspec.get("issue_tracker") - # Extract publisher/author info using shared utility - supplier = None + # Extract author info for maintainer_name field maintainer_name = None maintainer_email = None @@ -143,17 +143,15 @@ def _normalize_response(self, package_name: str, data: Dict[str, Any]) -> Option authors = pubspec.get("authors") if authors and isinstance(authors, list) and len(authors) > 0: maintainer_name, maintainer_email = parse_author_string(authors[0]) - supplier = maintainer_name elif pubspec.get("author"): maintainer_name, maintainer_email = parse_author_string(pubspec["author"]) - supplier = maintainer_name # Check for publisher in the top-level response (newer pub.dev API) - # Publisher takes precedence over author for supplier + # Use publisher ID as maintainer_name if available if data.get("publisher"): publisher_id = data["publisher"].get("publisherId") - if publisher_id: - supplier = publisher_id + if publisher_id and not maintainer_name: + maintainer_name = publisher_id logger.debug(f"Successfully fetched pub.dev metadata for: {package_name}") @@ -163,8 +161,8 @@ def _normalize_response(self, package_name: str, data: Dict[str, Any]) -> Option field_sources["description"] = self.name if licenses: field_sources["licenses"] = self.name - if supplier: - field_sources["supplier"] = self.name + # Supplier is always the distribution platform + field_sources["supplier"] = self.name if homepage: field_sources["homepage"] = self.name if repository_url: @@ -178,7 +176,7 @@ def _normalize_response(self, package_name: str, data: Dict[str, Any]) -> Option description=description, licenses=licenses, license_texts=license_texts, - supplier=supplier, + supplier=PURL_TYPE_TO_SUPPLIER["pub"], homepage=homepage, repository_url=repository_url, documentation_url=documentation_url, diff --git a/sbomify_action/_enrichment/sources/purl.py b/sbomify_action/_enrichment/sources/purl.py index e5ba2cb..4ac397d 100644 --- a/sbomify_action/_enrichment/sources/purl.py +++ b/sbomify_action/_enrichment/sources/purl.py @@ -49,6 +49,58 @@ "chainguard": "Chainguard, Inc.", } +# Mapping of PURL type to distribution platform supplier name +# For language packages, the distribution platform (not the author) is the supplier +PURL_TYPE_TO_SUPPLIER: dict[str, str] = { + # Language package registries + "pypi": "Python Package Index (PyPI)", + "npm": "npm", + "cargo": "crates.io", + "maven": "Maven Central", + "gem": "RubyGems.org", + "nuget": "NuGet Gallery", + "golang": "Go Modules", + "pub": "pub.dev", + "conan": "Conan Center", + "composer": "Packagist", + "hex": "Hex.pm", + "cocoapods": "CocoaPods", + "conda": "Anaconda", + "hackage": "Hackage", + "swift": "Swift Package Registry", + # Container registries + "docker": "Docker Hub", + "oci": "OCI Registry", +} + + +def get_supplier_for_purl(purl: PackageURL) -> str | None: + """Get the appropriate supplier for a PURL. + + For OS packages (deb, rpm, apk), uses NAMESPACE_TO_SUPPLIER based on the + distribution namespace (e.g., debian, ubuntu, alpine). + + For language packages (pypi, npm, cargo, etc.), uses PURL_TYPE_TO_SUPPLIER + to return the distribution platform as the supplier. + + Args: + purl: Parsed PackageURL + + Returns: + Supplier name or None if not found + """ + # OS packages use namespace-based supplier (distribution name) + if purl.type in OS_PACKAGE_TYPES and purl.namespace: + supplier = NAMESPACE_TO_SUPPLIER.get(purl.namespace.lower()) + if supplier: + return supplier + # Fallback for unknown namespaces + return f"{purl.namespace.title()} Project" + + # Language packages use type-based supplier (platform name) + return PURL_TYPE_TO_SUPPLIER.get(purl.type) + + # Mapping of PURL type/namespace to package tracker URL templates PACKAGE_TRACKER_URLS: Dict[str, Dict[str, str]] = { "deb": { diff --git a/sbomify_action/_enrichment/sources/pypi.py b/sbomify_action/_enrichment/sources/pypi.py index 242bb45..ae0021b 100644 --- a/sbomify_action/_enrichment/sources/pypi.py +++ b/sbomify_action/_enrichment/sources/pypi.py @@ -12,6 +12,7 @@ from ..metadata import NormalizedMetadata from ..sanitization import normalize_vcs_url from ..utils import parse_author_string +from .purl import PURL_TYPE_TO_SUPPLIER PYPI_API_BASE = "https://pypi.org/pypi" DEFAULT_TIMEOUT = 10 # seconds - PyPI is fast @@ -169,8 +170,8 @@ def _normalize_response(self, package_name: str, data: Dict[str, Any]) -> Normal field_sources["description"] = self.name if licenses: field_sources["licenses"] = self.name - if maintainer_name: - field_sources["supplier"] = self.name + # Supplier is always the distribution platform + field_sources["supplier"] = self.name if homepage: field_sources["homepage"] = self.name if repository_url: @@ -184,7 +185,7 @@ def _normalize_response(self, package_name: str, data: Dict[str, Any]) -> Normal description=info.get("summary"), licenses=licenses, license_texts=license_texts, - supplier=maintainer_name, # Use author/maintainer as supplier + supplier=PURL_TYPE_TO_SUPPLIER["pypi"], homepage=homepage, repository_url=repository_url, documentation_url=documentation_url, diff --git a/tests/test_conan_source.py b/tests/test_conan_source.py index 899d63c..ce1dd0d 100644 --- a/tests/test_conan_source.py +++ b/tests/test_conan_source.py @@ -191,7 +191,7 @@ def test_fetch_package_not_found(self, mock_session): assert metadata is None def test_fetch_with_author(self, mock_session): - """Test that author is used as supplier.""" + """Test that author is preserved as maintainer_name.""" source = ConanSource() purl = PackageURL.from_string("pkg:conan/testpkg@1.0.0") @@ -221,7 +221,10 @@ def test_fetch_with_author(self, mock_session): metadata = source.fetch(purl, mock_session) assert metadata is not None - assert metadata.supplier == "Test Author" + # Supplier is always the distribution platform + assert metadata.supplier == "Conan Center" + # Author is preserved as maintainer_name + assert metadata.maintainer_name == "Test Author" class TestConanSourceCaching: diff --git a/tests/test_cratesio_source.py b/tests/test_cratesio_source.py index 92e69d6..d43b23c 100644 --- a/tests/test_cratesio_source.py +++ b/tests/test_cratesio_source.py @@ -111,7 +111,7 @@ def test_fetch_success_with_version(self, mock_session): assert len(metadata.licenses) == 1 assert "MIT" in metadata.licenses[0] assert "Apache-2.0" in metadata.licenses[0] - assert metadata.supplier == "David Tolnay" + assert metadata.supplier == "crates.io" assert metadata.maintainer_name == "David Tolnay" assert metadata.homepage == "https://serde.rs" assert metadata.documentation_url == "https://docs.rs/serde" @@ -152,8 +152,8 @@ def test_fetch_success_without_version(self, mock_session): assert "github.com/tokio-rs/tokio" in metadata.repository_url # No license without version-specific endpoint assert metadata.licenses == [] - # No published_by without version-specific endpoint - assert metadata.supplier is None + # Supplier is always the distribution platform + assert metadata.supplier == "crates.io" # Verify API was called with crate URL (no version) mock_session.get.assert_called_once() @@ -452,7 +452,8 @@ def test_field_sources_partial(self, mock_session): assert metadata is not None assert metadata.field_sources.get("description") == "crates.io" assert "licenses" not in metadata.field_sources - assert "supplier" not in metadata.field_sources + # Supplier is always present (distribution platform) + assert metadata.field_sources.get("supplier") == "crates.io" assert "homepage" not in metadata.field_sources diff --git a/tests/test_enrichment_module.py b/tests/test_enrichment_module.py index 079c20f..612ce56 100644 --- a/tests/test_enrichment_module.py +++ b/tests/test_enrichment_module.py @@ -281,7 +281,9 @@ def test_fetch_success(self, mock_session): assert metadata.description == "A high-level Python web framework" assert metadata.homepage == "https://www.djangoproject.com/" assert "BSD-3-Clause" in metadata.licenses - assert metadata.supplier == "Django Software Foundation" + # Supplier is the distribution platform, not the author + assert metadata.supplier == "Python Package Index (PyPI)" + assert metadata.maintainer_name == "Django Software Foundation" assert metadata.repository_url == "git+https://github.com/django/django" def test_fetch_not_found(self, mock_session): @@ -335,8 +337,11 @@ def test_fetch_author_from_email_field(self, mock_session): metadata = source.fetch(purl, mock_session) assert metadata is not None - assert metadata.supplier == "Peter Linss", ( - f"Expected 'Peter Linss' extracted from author_email, got: {metadata.supplier}" + # Supplier is always the distribution platform + assert metadata.supplier == "Python Package Index (PyPI)" + # Author name extracted from email field is preserved in maintainer_name + assert metadata.maintainer_name == "Peter Linss", ( + f"Expected 'Peter Linss' extracted from author_email, got: {metadata.maintainer_name}" ) def test_fetch_author_from_maintainer_email_field(self, mock_session): @@ -361,8 +366,11 @@ def test_fetch_author_from_maintainer_email_field(self, mock_session): metadata = source.fetch(purl, mock_session) assert metadata is not None - assert metadata.supplier == "Jane Doe", ( - f"Expected 'Jane Doe' extracted from maintainer_email, got: {metadata.supplier}" + # Supplier is always the distribution platform + assert metadata.supplier == "Python Package Index (PyPI)" + # Author name extracted from maintainer_email is preserved in maintainer_name + assert metadata.maintainer_name == "Jane Doe", ( + f"Expected 'Jane Doe' extracted from maintainer_email, got: {metadata.maintainer_name}" ) def test_fetch_prefers_direct_author_over_email(self, mock_session): @@ -385,8 +393,11 @@ def test_fetch_prefers_direct_author_over_email(self, mock_session): metadata = source.fetch(purl, mock_session) assert metadata is not None - assert metadata.supplier == "Direct Author", ( - f"Expected 'Direct Author' from author field, got: {metadata.supplier}" + # Supplier is always the distribution platform + assert metadata.supplier == "Python Package Index (PyPI)" + # Direct author is preferred for maintainer_name + assert metadata.maintainer_name == "Direct Author", ( + f"Expected 'Direct Author' from author field, got: {metadata.maintainer_name}" ) @@ -455,7 +466,10 @@ def test_fetch_success(self, mock_session): assert metadata.homepage == "https://github.com/dart-lang/http" assert metadata.repository_url == "git+https://github.com/dart-lang/http" assert metadata.issue_tracker_url == "https://github.com/dart-lang/http/issues" - assert metadata.supplier == "dart.dev" + # Supplier is the distribution platform + assert metadata.supplier == "pub.dev" + # Publisher ID is preserved in maintainer_name + assert metadata.maintainer_name == "dart.dev" assert metadata.registry_url == "https://pub.dev/packages/http" assert metadata.source == "pub.dev" @@ -484,7 +498,8 @@ def test_fetch_with_author(self, mock_session): assert metadata is not None assert metadata.maintainer_name == "John Doe" assert metadata.maintainer_email == "john@example.com" - assert metadata.supplier == "John Doe" + # Supplier is always the distribution platform + assert metadata.supplier == "pub.dev" def test_fetch_with_authors_list(self, mock_session): """Test metadata fetch with authors list field.""" @@ -1025,7 +1040,8 @@ def test_enrich_cyclonedx_sbom(self, tmp_path): result = json.load(f) assert result["components"][0]["description"] == "Django web framework" - assert result["components"][0]["publisher"] == "Django Software Foundation" + # Publisher is the distribution platform + assert result["components"][0]["publisher"] == "Python Package Index (PyPI)" def test_enrich_spdx_sbom(self, tmp_path): """Test enriching an SPDX SBOM end-to-end.""" From f14bd0ba30ba4383c6e3223799b33cc848148aec Mon Sep 17 00:00:00 2001 From: Viktor Petersson Date: Sat, 31 Jan 2026 10:20:13 +0100 Subject: [PATCH 2/8] Update NTIA compliance tests for platform-as-supplier change Update test assertions to expect distribution platform as supplier: - test_pypi_author_email_without_author: expects "Python Package Index (PyPI)" - test_ecosystems_uses_platform_as_supplier: renamed from test_ecosystems_does_not_use_platform_as_supplier Co-Authored-By: Claude Opus 4.5 --- tests/test_ntia_compliance.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/tests/test_ntia_compliance.py b/tests/test_ntia_compliance.py index 5b20470..fe424e8 100644 --- a/tests/test_ntia_compliance.py +++ b/tests/test_ntia_compliance.py @@ -1708,10 +1708,10 @@ def mock_get(url, *args, **kwargs): with open(output_file) as f: enriched_data = json.load(f) - # Verify the component got supplier from author_email + # Verify the component got supplier as distribution platform component = enriched_data["components"][0] - assert component.get("publisher") == "Test Author", ( - f"Expected publisher 'Test Author' from author_email, got: {component.get('publisher')}" + assert component.get("publisher") == "Python Package Index (PyPI)", ( + f"Expected publisher 'Python Package Index (PyPI)', got: {component.get('publisher')}" ) def test_lockfile_components_have_version(self, tmp_path): @@ -1892,26 +1892,27 @@ def test_self_referencing_component_gets_supplier(self, tmp_path): f"Self-referencing component should inherit publisher from root. Got: {self_component.get('publisher')}" ) - def test_ecosystems_does_not_use_platform_as_supplier(self, tmp_path): - """Test that ecosyste.ms doesn't use platform name (pypi, npm) as supplier. + def test_ecosystems_uses_platform_as_supplier(self, tmp_path): + """Test that ecosyste.ms uses distribution platform as supplier. - Registry/platform names are not valid suppliers - they're distribution channels. + The distribution platform (PyPI, npm, etc.) is the supplier, not the + individual package author/maintainer. """ import requests from packageurl import PackageURL from sbomify_action._enrichment.sources.ecosystems import EcosystemsSource - # Create mock response with ecosystem but no maintainer name + # Create mock response with ecosystem and maintainer mock_response = Mock() mock_response.status_code = 200 mock_response.json.return_value = [ { - "ecosystem": "pypi", # Should NOT be used as supplier + "ecosystem": "pypi", "description": "Test package", "normalized_licenses": ["MIT"], "maintainers": [ - {"login": "testuser", "name": None} # No name, only login + {"login": "testuser", "name": None} # Maintainer info preserved in maintainer_name ], } ] @@ -1923,7 +1924,10 @@ def test_ecosystems_does_not_use_platform_as_supplier(self, tmp_path): purl = PackageURL.from_string("pkg:pypi/test-package@1.0.0") metadata = source.fetch(purl, session) - # Supplier should be the maintainer login, NOT "pypi" + # Supplier should be the distribution platform assert metadata is not None - assert metadata.supplier != "pypi", "Should not use ecosystem name as supplier" - assert metadata.supplier == "testuser", f"Should use maintainer login as supplier. Got: {metadata.supplier}" + assert metadata.supplier == "Python Package Index (PyPI)", ( + f"Should use platform as supplier. Got: {metadata.supplier}" + ) + # Maintainer info is preserved separately + assert metadata.maintainer_name == "testuser" From eda54b1d959d15a1a3e72e2a01469fdff1a1ea19 Mon Sep 17 00:00:00 2001 From: Viktor Petersson Date: Sat, 31 Jan 2026 10:37:20 +0100 Subject: [PATCH 3/8] Fix test isolation from local sbomify.json file Tests that mock the sbomify API were being affected by the local sbomify.json file loaded by the JsonConfigProvider. Add patches to disable the JsonConfigProvider in these tests to ensure proper isolation. Affected tests: - test_augmentation_module.py: 6 tests - test_container_sbom_ntia_compliance.py: 1 test - test_schema_compliance.py: 2 tests Co-Authored-By: Claude Opus 4.5 --- tests/test_augmentation_module.py | 36 ++++++++++++++++---- tests/test_container_sbom_ntia_compliance.py | 11 +++++- tests/test_schema_compliance.py | 26 +++++++++++--- 3 files changed, 62 insertions(+), 11 deletions(-) diff --git a/tests/test_augmentation_module.py b/tests/test_augmentation_module.py index d7ed0af..8ec41da 100644 --- a/tests/test_augmentation_module.py +++ b/tests/test_augmentation_module.py @@ -151,9 +151,15 @@ def test_component_overrides(self, sample_cyclonedx_bom, sample_backend_metadata assert enriched_bom.metadata.component.name == "overridden-name" assert enriched_bom.metadata.component.version == "2.0.0" + @patch("sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file") @patch("sbomify_action._augmentation.providers.sbomify_api.requests.get") - def test_fetch_augmentation_metadata(self, mock_get, sample_backend_metadata_with_mixed_licenses): + def test_fetch_augmentation_metadata( + self, mock_get, mock_find_config, sample_backend_metadata_with_mixed_licenses + ): """Test fetching metadata from providers (sbomify API).""" + # Disable json-config provider to isolate sbomify API test + mock_find_config.return_value = None + # Setup mock mock_response = Mock() mock_response.ok = True @@ -172,11 +178,15 @@ def test_fetch_augmentation_metadata(self, mock_get, sample_backend_metadata_wit assert result["supplier"] == sample_backend_metadata_with_mixed_licenses["supplier"] assert result["authors"] == sample_backend_metadata_with_mixed_licenses["authors"] + @patch("sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file") @patch("sbomify_action._augmentation.providers.sbomify_api.requests.get") def test_augment_sbom_from_file_cyclonedx( - self, mock_get, sample_cyclonedx_bom, sample_backend_metadata_with_mixed_licenses + self, mock_get, mock_find_config, sample_cyclonedx_bom, sample_backend_metadata_with_mixed_licenses ): """Test augmenting SBOM from file (CycloneDX).""" + # Disable json-config provider to isolate sbomify API test + mock_find_config.return_value = None + # Setup mock mock_response = Mock() mock_response.ok = True @@ -391,9 +401,13 @@ def test_spdx_component_overrides(self, spdx_document): assert enriched_doc.packages[0].name == "overridden-spdx-name" assert enriched_doc.packages[0].version == "2.0.0-spdx" + @patch("sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file") @patch("sbomify_action._augmentation.providers.sbomify_api.requests.get") - def test_augment_sbom_from_file_spdx(self, mock_get, spdx_document): + def test_augment_sbom_from_file_spdx(self, mock_get, mock_find_config, spdx_document): """Test augmenting SPDX SBOM from file.""" + # Disable json-config provider to isolate sbomify API test + mock_find_config.return_value = None + backend_data = { "supplier": {"name": "SPDX Supplier"}, "authors": [{"name": "SPDX Author"}], @@ -1259,12 +1273,15 @@ def test_invalid_json_error(self, mock_get): assert "Invalid JSON in SBOM file" in str(exc_info.value) + @patch("sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file") @patch.dict(os.environ, {}, clear=True) @patch("sbomify_action._augmentation.providers.sbomify_api.requests.get") - def test_api_connection_error(self, mock_get): + def test_api_connection_error(self, mock_get, mock_find_config): """Test handling of API connection errors (provider returns None, not exception).""" import requests + # Disable json-config provider to isolate API error test + mock_find_config.return_value = None mock_get.side_effect = requests.exceptions.ConnectionError("Connection failed") # With the provider architecture, API errors are caught and logged, @@ -1278,12 +1295,15 @@ def test_api_connection_error(self, mock_get): # Provider catches the error and returns None, which results in empty dict assert result == {} + @patch("sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file") @patch.dict(os.environ, {}, clear=True) @patch("sbomify_action._augmentation.providers.sbomify_api.requests.get") - def test_api_timeout_error(self, mock_get): + def test_api_timeout_error(self, mock_get, mock_find_config): """Test handling of API timeout errors (provider returns None, not exception).""" import requests + # Disable json-config provider to isolate API error test + mock_find_config.return_value = None mock_get.side_effect = requests.exceptions.Timeout("Timeout") # With the provider architecture, API errors are caught and logged @@ -1296,10 +1316,14 @@ def test_api_timeout_error(self, mock_get): # Provider catches the error and returns None, which results in empty dict assert result == {} + @patch("sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file") @patch.dict(os.environ, {}, clear=True) @patch("sbomify_action._augmentation.providers.sbomify_api.requests.get") - def test_api_404_error(self, mock_get): + def test_api_404_error(self, mock_get, mock_find_config): """Test handling of API 404 errors (provider returns None, not exception).""" + # Disable json-config provider to isolate API error test + mock_find_config.return_value = None + mock_response = Mock() mock_response.ok = False mock_response.status_code = 404 diff --git a/tests/test_container_sbom_ntia_compliance.py b/tests/test_container_sbom_ntia_compliance.py index bdadcff..273b504 100644 --- a/tests/test_container_sbom_ntia_compliance.py +++ b/tests/test_container_sbom_ntia_compliance.py @@ -269,7 +269,16 @@ def test_augmented_trivy_cyclonedx(self, image, tmp_path, mock_backend_response) mock_api_response.ok = True mock_api_response.json.return_value = mock_backend_response - with patch("sbomify_action._augmentation.providers.sbomify_api.requests.get", return_value=mock_api_response): + with ( + patch( + "sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file", + return_value=None, + ), + patch( + "sbomify_action._augmentation.providers.sbomify_api.requests.get", + return_value=mock_api_response, + ), + ): sbom_format = augment_sbom_from_file( str(sbom_path), str(output_file), diff --git a/tests/test_schema_compliance.py b/tests/test_schema_compliance.py index 27263f1..db653a8 100644 --- a/tests/test_schema_compliance.py +++ b/tests/test_schema_compliance.py @@ -82,12 +82,21 @@ def test_cyclonedx_full_flow_compliance(version, tmp_path): "lifecycle_phase": "build", # CISA 2025 Generation Context } - # Mock the sbomify API provider + # Mock the sbomify API provider and disable json-config provider mock_api_response = Mock() mock_api_response.ok = True mock_api_response.json.return_value = augmentation_data - with patch("sbomify_action._augmentation.providers.sbomify_api.requests.get", return_value=mock_api_response): + with ( + patch( + "sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file", + return_value=None, + ), + patch( + "sbomify_action._augmentation.providers.sbomify_api.requests.get", + return_value=mock_api_response, + ), + ): augment_sbom_from_file( input_file=str(input_file), output_file=str(augmented_file), @@ -207,12 +216,21 @@ def test_spdx_full_flow_compliance(version, tmp_path): "lifecycle_phase": "build", # CISA 2025 Generation Context } - # Mock the sbomify API provider + # Mock the sbomify API provider and disable json-config provider mock_api_response = Mock() mock_api_response.ok = True mock_api_response.json.return_value = augmentation_data - with patch("sbomify_action._augmentation.providers.sbomify_api.requests.get", return_value=mock_api_response): + with ( + patch( + "sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file", + return_value=None, + ), + patch( + "sbomify_action._augmentation.providers.sbomify_api.requests.get", + return_value=mock_api_response, + ), + ): augment_sbom_from_file( input_file=str(input_file), output_file=str(augmented_file), From 62108927576f6933e8c544fb695388aa643ea161 Mon Sep 17 00:00:00 2001 From: Viktor Petersson Date: Sat, 31 Jan 2026 10:39:31 +0100 Subject: [PATCH 4/8] Linting --- tests/test_augmentation_module.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/test_augmentation_module.py b/tests/test_augmentation_module.py index 8ec41da..59601fc 100644 --- a/tests/test_augmentation_module.py +++ b/tests/test_augmentation_module.py @@ -153,9 +153,7 @@ def test_component_overrides(self, sample_cyclonedx_bom, sample_backend_metadata @patch("sbomify_action._augmentation.providers.json_config.JsonConfigProvider._find_config_file") @patch("sbomify_action._augmentation.providers.sbomify_api.requests.get") - def test_fetch_augmentation_metadata( - self, mock_get, mock_find_config, sample_backend_metadata_with_mixed_licenses - ): + def test_fetch_augmentation_metadata(self, mock_get, mock_find_config, sample_backend_metadata_with_mixed_licenses): """Test fetching metadata from providers (sbomify API).""" # Disable json-config provider to isolate sbomify API test mock_find_config.return_value = None From 22f4f0b81239ca147796bb3f1fc180ff1c5f1f09 Mon Sep 17 00:00:00 2001 From: Viktor Petersson Date: Sat, 31 Jan 2026 11:00:30 +0100 Subject: [PATCH 5/8] Add component.supplier field for distribution platform (NTIA compliance) CycloneDX components now have both publisher and supplier fields: - publisher = package author/maintainer (e.g., "Django Software Foundation") - supplier = distribution platform (e.g., "Python Package Index (PyPI)") This addresses sbomqs comp_with_supplier compliance requirement. Changes: - Import OrganizationalEntity for CycloneDX supplier field - Set component.supplier to distribution platform from NormalizedMetadata - For OS packages (deb/rpm/apk), set maintainer_name = supplier so publisher shows the distribution name (e.g., "Debian Project") - Update tests for correct publisher/supplier expectations Co-Authored-By: Claude Opus 4.5 --- sbomify_action/_enrichment/sources/purl.py | 4 ++++ sbomify_action/enrichment.py | 16 ++++++++++++---- tests/test_enrichment_module.py | 4 ++-- tests/test_ntia_compliance.py | 14 +++++++++----- 4 files changed, 27 insertions(+), 11 deletions(-) diff --git a/sbomify_action/_enrichment/sources/purl.py b/sbomify_action/_enrichment/sources/purl.py index 4ac397d..befc5c5 100644 --- a/sbomify_action/_enrichment/sources/purl.py +++ b/sbomify_action/_enrichment/sources/purl.py @@ -180,11 +180,15 @@ def fetch(self, purl: PackageURL, session: requests.Session) -> Optional[Normali field_sources = {} if supplier: field_sources["supplier"] = self.name + # For OS packages, the distribution is also the maintainer/publisher + field_sources["maintainer_name"] = self.name if homepage: field_sources["homepage"] = self.name return NormalizedMetadata( supplier=supplier, + # For OS packages, distribution is the publisher (maintainer_name -> component.publisher) + maintainer_name=supplier, homepage=homepage, source=self.name, field_sources=field_sources, diff --git a/sbomify_action/enrichment.py b/sbomify_action/enrichment.py index c2bd031..264d529 100644 --- a/sbomify_action/enrichment.py +++ b/sbomify_action/enrichment.py @@ -58,6 +58,7 @@ from cyclonedx.model import ExternalReference, ExternalReferenceType, Property, XsUri from cyclonedx.model.bom import Bom from cyclonedx.model.component import Component, ComponentType +from cyclonedx.model.contact import OrganizationalEntity from cyclonedx.model.license import LicenseExpression from spdx_tools.spdx.model import ( Actor, @@ -417,12 +418,19 @@ def _apply_metadata_to_cyclonedx_component( component.licenses.add(license_expr) added_fields.append("license") - # Publisher (sanitized) - if not component.publisher and metadata.supplier: + # Publisher - use maintainer_name (author), not supplier (distribution platform) + if not component.publisher and metadata.maintainer_name: + sanitized_publisher = sanitize_supplier(metadata.maintainer_name) + if sanitized_publisher: + component.publisher = sanitized_publisher + added_fields.append("publisher") + + # Supplier - use supplier (distribution platform like PyPI, npm, etc.) + if not component.supplier and metadata.supplier: sanitized_supplier = sanitize_supplier(metadata.supplier) if sanitized_supplier: - component.publisher = sanitized_supplier - added_fields.append("publisher") + component.supplier = OrganizationalEntity(name=sanitized_supplier) + added_fields.append("supplier") # External references helper (with URL sanitization) def _add_external_ref(ref_type: ExternalReferenceType, url: str, field_name: str = "url") -> bool: diff --git a/tests/test_enrichment_module.py b/tests/test_enrichment_module.py index b29d280..9d8f772 100644 --- a/tests/test_enrichment_module.py +++ b/tests/test_enrichment_module.py @@ -1040,8 +1040,8 @@ def test_enrich_cyclonedx_sbom(self, tmp_path): result = json.load(f) assert result["components"][0]["description"] == "Django web framework" - # Publisher is the distribution platform - assert result["components"][0]["publisher"] == "Python Package Index (PyPI)" + # Publisher is the package author (maintainer_name), not distribution platform + assert result["components"][0]["publisher"] == "Django Software Foundation" def test_enrich_spdx_sbom(self, tmp_path): """Test enriching an SPDX SBOM end-to-end.""" diff --git a/tests/test_ntia_compliance.py b/tests/test_ntia_compliance.py index 2b00e03..dbdf264 100644 --- a/tests/test_ntia_compliance.py +++ b/tests/test_ntia_compliance.py @@ -418,7 +418,7 @@ def test_debian_package_purl_fallback(self, tmp_path): print(f" {component['name']}: publisher = {component.get('publisher')}") def test_alpine_package_purl_fallback(self, tmp_path): - """Test that Alpine packages get supplier from PURL namespace.""" + """Test that Alpine packages get supplier from PURL namespace when other sources fail.""" clear_cache() sbom_data = { @@ -449,7 +449,11 @@ def test_alpine_package_purl_fallback(self, tmp_path): # Mock API responses to 404 (simulating no data - force PURL fallback) mock_response = Mock() mock_response.status_code = 404 - with patch("requests.Session.get", return_value=mock_response): + with ( + patch("requests.Session.get", return_value=mock_response), + # Also disable LicenseDB so PURL fallback is truly tested + patch("sbomify_action._enrichment.sources.license_db.LicenseDBSource.fetch", return_value=None), + ): enrich_sbom(str(input_file), str(output_file)) with open(output_file) as f: @@ -1675,10 +1679,10 @@ def mock_get(url, *args, **kwargs): with open(output_file) as f: enriched_data = json.load(f) - # Verify the component got supplier as distribution platform + # Verify the component got publisher from author_email (extracted name: "Test Author") component = enriched_data["components"][0] - assert component.get("publisher") == "Python Package Index (PyPI)", ( - f"Expected publisher 'Python Package Index (PyPI)', got: {component.get('publisher')}" + assert component.get("publisher") == "Test Author", ( + f"Expected publisher 'Test Author' from author_email, got: {component.get('publisher')}" ) def test_lockfile_components_have_version(self, tmp_path): From bcf9065ba099beaa91f1bfb355d801e1f632bd15 Mon Sep 17 00:00:00 2001 From: Viktor Petersson Date: Sat, 31 Jan 2026 12:05:28 +0100 Subject: [PATCH 6/8] Add hash enrichment from lockfiles to SBOMs Extract cryptographic hashes from lockfiles and add them to SBOM components. This addresses the gap where SBOM generators don't capture hashes that exist in lockfiles. Supported lockfiles: - Python: uv.lock, Pipfile.lock, poetry.lock - Rust: Cargo.lock - Dart: pubspec.lock - JavaScript: package-lock.json, yarn.lock, pnpm-lock.yaml Key features: - One hash per package (prefers wheel over sdist, universal over platform-specific) - Deduplicates by (name, version) across nested dependencies - Feature parity between CycloneDX and SPDX formats - Audit trail integration for compliance tracking - Non-fatal errors (warns but continues pipeline) Co-Authored-By: Claude Opus 4.5 --- sbomify_action/_hash_enrichment/__init__.py | 41 ++ sbomify_action/_hash_enrichment/enricher.py | 345 ++++++++++++ sbomify_action/_hash_enrichment/models.py | 210 ++++++++ .../_hash_enrichment/parsers/__init__.py | 21 + .../_hash_enrichment/parsers/cargo_lock.py | 61 +++ .../_hash_enrichment/parsers/package_lock.py | 146 ++++++ .../_hash_enrichment/parsers/pipfile_lock.py | 118 +++++ .../_hash_enrichment/parsers/pnpm_lock.py | 206 ++++++++ .../_hash_enrichment/parsers/poetry_lock.py | 132 +++++ .../_hash_enrichment/parsers/pubspec_lock.py | 73 +++ .../_hash_enrichment/parsers/uv_lock.py | 114 ++++ .../_hash_enrichment/parsers/yarn_lock.py | 227 ++++++++ sbomify_action/_hash_enrichment/protocol.py | 84 +++ sbomify_action/_hash_enrichment/registry.py | 87 ++++ sbomify_action/cli/main.py | 29 ++ sbomify_action/console.py | 10 + sbomify_action/enrichment.py | 15 +- tests/test_hash_enrichment.py | 490 ++++++++++++++++++ 18 files changed, 2406 insertions(+), 3 deletions(-) create mode 100644 sbomify_action/_hash_enrichment/__init__.py create mode 100644 sbomify_action/_hash_enrichment/enricher.py create mode 100644 sbomify_action/_hash_enrichment/models.py create mode 100644 sbomify_action/_hash_enrichment/parsers/__init__.py create mode 100644 sbomify_action/_hash_enrichment/parsers/cargo_lock.py create mode 100644 sbomify_action/_hash_enrichment/parsers/package_lock.py create mode 100644 sbomify_action/_hash_enrichment/parsers/pipfile_lock.py create mode 100644 sbomify_action/_hash_enrichment/parsers/pnpm_lock.py create mode 100644 sbomify_action/_hash_enrichment/parsers/poetry_lock.py create mode 100644 sbomify_action/_hash_enrichment/parsers/pubspec_lock.py create mode 100644 sbomify_action/_hash_enrichment/parsers/uv_lock.py create mode 100644 sbomify_action/_hash_enrichment/parsers/yarn_lock.py create mode 100644 sbomify_action/_hash_enrichment/protocol.py create mode 100644 sbomify_action/_hash_enrichment/registry.py create mode 100644 tests/test_hash_enrichment.py diff --git a/sbomify_action/_hash_enrichment/__init__.py b/sbomify_action/_hash_enrichment/__init__.py new file mode 100644 index 0000000..47b18dc --- /dev/null +++ b/sbomify_action/_hash_enrichment/__init__.py @@ -0,0 +1,41 @@ +"""Lockfile hash extraction and SBOM enrichment. + +This module provides functionality to extract cryptographic hashes from +lockfiles and add them to SBOM components. It supports both CycloneDX +and SPDX formats. + +Supported lockfile formats: +- Python: uv.lock, Pipfile.lock, poetry.lock +- Rust: Cargo.lock +- Dart: pubspec.lock +- JavaScript: package-lock.json, yarn.lock, pnpm-lock.yaml + +Example usage: + from sbomify_action._hash_enrichment import enrich_sbom_with_hashes + + stats = enrich_sbom_with_hashes( + sbom_file="sbom.json", + lock_file="uv.lock", + ) + print(f"Added {stats['hashes_added']} hashes") +""" + +from .enricher import HashEnricher, create_default_registry, enrich_sbom_with_hashes +from .models import HashAlgorithm, PackageHash, normalize_package_name +from .protocol import LockfileHashParser +from .registry import ParserRegistry + +__all__ = [ + # Main API + "enrich_sbom_with_hashes", + # Classes for advanced usage + "HashEnricher", + "ParserRegistry", + "LockfileHashParser", + # Models + "PackageHash", + "HashAlgorithm", + "normalize_package_name", + # Factory + "create_default_registry", +] diff --git a/sbomify_action/_hash_enrichment/enricher.py b/sbomify_action/_hash_enrichment/enricher.py new file mode 100644 index 0000000..c9a1bcf --- /dev/null +++ b/sbomify_action/_hash_enrichment/enricher.py @@ -0,0 +1,345 @@ +"""Hash enrichment orchestration for SBOMs.""" + +import json +from pathlib import Path +from typing import Any + +from cyclonedx.model import HashAlgorithm as CdxHashAlgorithm +from cyclonedx.model import HashType +from cyclonedx.model.bom import Bom + +from ..console import get_audit_trail +from ..logging_config import logger +from ..serialization import serialize_cyclonedx_bom +from .models import HashAlgorithm, PackageHash, normalize_package_name +from .parsers import ( + CargoLockParser, + PackageLockParser, + PipfileLockParser, + PnpmLockParser, + PoetryLockParser, + PubspecLockParser, + UvLockParser, + YarnLockParser, +) +from .registry import ParserRegistry + + +def create_default_registry() -> ParserRegistry: + """Create registry with all default parsers.""" + registry = ParserRegistry() + + # Python parsers + registry.register(UvLockParser()) + registry.register(PipfileLockParser()) + registry.register(PoetryLockParser()) + + # Rust + registry.register(CargoLockParser()) + + # Dart + registry.register(PubspecLockParser()) + + # JavaScript/Node.js + registry.register(PackageLockParser()) + registry.register(YarnLockParser()) + registry.register(PnpmLockParser()) + + return registry + + +# Mapping from our HashAlgorithm to CycloneDX HashAlgorithm +_CDX_ALG_MAP = { + HashAlgorithm.MD5: CdxHashAlgorithm.MD5, + HashAlgorithm.SHA1: CdxHashAlgorithm.SHA_1, + HashAlgorithm.SHA256: CdxHashAlgorithm.SHA_256, + HashAlgorithm.SHA384: CdxHashAlgorithm.SHA_384, + HashAlgorithm.SHA512: CdxHashAlgorithm.SHA_512, + HashAlgorithm.SHA3_256: CdxHashAlgorithm.SHA3_256, + HashAlgorithm.SHA3_384: CdxHashAlgorithm.SHA3_384, + HashAlgorithm.SHA3_512: CdxHashAlgorithm.SHA3_512, + HashAlgorithm.BLAKE2B_256: CdxHashAlgorithm.BLAKE2B_256, + HashAlgorithm.BLAKE2B_384: CdxHashAlgorithm.BLAKE2B_384, + HashAlgorithm.BLAKE2B_512: CdxHashAlgorithm.BLAKE2B_512, + HashAlgorithm.BLAKE3: CdxHashAlgorithm.BLAKE3, +} + + +class HashEnricher: + """Orchestrates hash enrichment from lockfiles to SBOMs.""" + + def __init__(self, registry: ParserRegistry | None = None) -> None: + self._registry = registry or create_default_registry() + + def enrich_cyclonedx( + self, + bom: Bom, + lock_file_path: Path, + overwrite_existing: bool = False, + ) -> dict[str, int]: + """Enrich CycloneDX BOM components with hashes from lockfile. + + Args: + bom: CycloneDX BOM to enrich (modified in place) + lock_file_path: Path to the lockfile + overwrite_existing: If True, replace existing hashes + + Returns: + Statistics dict with enrichment results. + """ + stats = { + "lockfile_packages": 0, + "sbom_components": 0, + "components_matched": 0, + "hashes_added": 0, + "hashes_skipped": 0, + } + + # Parse lockfile + lockfile_hashes = self._registry.parse_lockfile(lock_file_path) + stats["lockfile_packages"] = len(set((h.name, h.version) for h in lockfile_hashes)) + + if not lockfile_hashes: + logger.debug("No hashes found in lockfile") + return stats + + # Build lookup table by normalized (name, version) + parser = self._registry.get_parser_for(lock_file_path.name) + ecosystem = parser.ecosystem if parser else "unknown" + hash_lookup = self._build_hash_lookup(lockfile_hashes, ecosystem) + + # Process components + if not bom.components: + return stats + + stats["sbom_components"] = len(bom.components) + + for component in bom.components: + if not component.name or not component.version: + continue + + # Try to match component to lockfile hashes + normalized_name = normalize_package_name(component.name, ecosystem) + key = (normalized_name, component.version) + + pkg_hashes = hash_lookup.get(key) + if not pkg_hashes: + continue + + stats["components_matched"] += 1 + + # Check if component already has hashes + if component.hashes and not overwrite_existing: + stats["hashes_skipped"] += len(pkg_hashes) + continue + + # Add hashes to component + if overwrite_existing: + component.hashes = set() + + for pkg_hash in pkg_hashes: + cdx_alg = _CDX_ALG_MAP.get(pkg_hash.algorithm) + if cdx_alg is None: + continue + + # Check if this exact hash already exists + existing = any(h.alg == cdx_alg and h.content == pkg_hash.value for h in (component.hashes or [])) + if existing: + stats["hashes_skipped"] += 1 + continue + + hash_type = HashType(alg=cdx_alg, content=pkg_hash.value) + if component.hashes is None: + component.hashes = set() + component.hashes.add(hash_type) + stats["hashes_added"] += 1 + + # Record to audit trail + audit = get_audit_trail() + component_id = component.purl or f"{component.name}@{component.version}" + audit.record_hash_added(str(component_id), pkg_hash.algorithm.value, source="lockfile") + + return stats + + def enrich_spdx( + self, + spdx_data: dict[str, Any], + lock_file_path: Path, + overwrite_existing: bool = False, + ) -> dict[str, int]: + """Enrich SPDX document packages with checksums from lockfile. + + Args: + spdx_data: SPDX JSON dict to enrich (modified in place) + lock_file_path: Path to the lockfile + overwrite_existing: If True, replace existing checksums + + Returns: + Statistics dict with enrichment results. + """ + stats = { + "lockfile_packages": 0, + "sbom_components": 0, + "components_matched": 0, + "hashes_added": 0, + "hashes_skipped": 0, + } + + # Parse lockfile + lockfile_hashes = self._registry.parse_lockfile(lock_file_path) + stats["lockfile_packages"] = len(set((h.name, h.version) for h in lockfile_hashes)) + + if not lockfile_hashes: + logger.debug("No hashes found in lockfile") + return stats + + # Build lookup table + parser = self._registry.get_parser_for(lock_file_path.name) + ecosystem = parser.ecosystem if parser else "unknown" + hash_lookup = self._build_hash_lookup(lockfile_hashes, ecosystem) + + # Process packages + packages = spdx_data.get("packages", []) + stats["sbom_components"] = len(packages) + + for package in packages: + name = package.get("name") + version = package.get("versionInfo") + + if not name or not version: + continue + + # Try to match package to lockfile hashes + normalized_name = normalize_package_name(name, ecosystem) + key = (normalized_name, version) + + pkg_hashes = hash_lookup.get(key) + if not pkg_hashes: + continue + + stats["components_matched"] += 1 + + # Check if package already has checksums + checksums = package.get("checksums", []) + if checksums and not overwrite_existing: + stats["hashes_skipped"] += len(pkg_hashes) + continue + + # Add checksums to package + if overwrite_existing: + checksums = [] + + for pkg_hash in pkg_hashes: + spdx_alg = pkg_hash.algorithm.spdx_alg + + # Check if this exact checksum already exists + existing = any( + c.get("algorithm") == spdx_alg and c.get("checksumValue") == pkg_hash.value for c in checksums + ) + if existing: + stats["hashes_skipped"] += 1 + continue + + checksums.append( + { + "algorithm": spdx_alg, + "checksumValue": pkg_hash.value, + } + ) + stats["hashes_added"] += 1 + + # Record to audit trail + audit = get_audit_trail() + component_id = package.get("SPDXID") or f"{name}@{version}" + audit.record_hash_added(component_id, pkg_hash.algorithm.value, source="lockfile") + + package["checksums"] = checksums + + return stats + + def _build_hash_lookup( + self, + hashes: list[PackageHash], + ecosystem: str, + ) -> dict[tuple[str, str], list[PackageHash]]: + """Build lookup table from list of hashes.""" + lookup: dict[tuple[str, str], list[PackageHash]] = {} + + for h in hashes: + normalized_name = normalize_package_name(h.name, ecosystem) + key = (normalized_name, h.version) + + if key not in lookup: + lookup[key] = [] + lookup[key].append(h) + + return lookup + + +def enrich_sbom_with_hashes( + sbom_file: str, + lock_file: str, + overwrite_existing: bool = False, +) -> dict[str, int]: + """Enrich SBOM file with hashes extracted from lockfile. + + This is the main public API for hash enrichment. + + Args: + sbom_file: Path to SBOM file (modified in place) + lock_file: Path to lockfile to extract hashes from + overwrite_existing: If True, replace existing hashes + + Returns: + Statistics dict with: + - lockfile_packages: Number of packages found in lockfile + - sbom_components: Number of components in SBOM + - components_matched: Number of components matched to lockfile + - hashes_added: Number of hashes added + - hashes_skipped: Number of hashes skipped (already present) + """ + sbom_path = Path(sbom_file) + lock_path = Path(lock_file) + + # Load SBOM + with sbom_path.open("r") as f: + sbom_data = json.load(f) + + enricher = HashEnricher() + + # Detect format and enrich + if sbom_data.get("bomFormat") == "CycloneDX": + # CycloneDX format + bom = Bom.from_json(sbom_data) + stats = enricher.enrich_cyclonedx(bom, lock_path, overwrite_existing) + + # Serialize back + spec_version = sbom_data.get("specVersion", "1.6") + serialized = serialize_cyclonedx_bom(bom, spec_version) + with sbom_path.open("w") as f: + f.write(serialized) + + elif sbom_data.get("spdxVersion"): + # SPDX format + stats = enricher.enrich_spdx(sbom_data, lock_path, overwrite_existing) + + # Write back + with sbom_path.open("w") as f: + json.dump(sbom_data, f, indent=2) + + else: + logger.warning("Unknown SBOM format, skipping hash enrichment") + return { + "lockfile_packages": 0, + "sbom_components": 0, + "components_matched": 0, + "hashes_added": 0, + "hashes_skipped": 0, + } + + logger.info( + f"Hash enrichment: {stats['hashes_added']} hash(es) added to " + f"{stats['components_matched']}/{stats['sbom_components']} component(s)" + ) + + return stats diff --git a/sbomify_action/_hash_enrichment/models.py b/sbomify_action/_hash_enrichment/models.py new file mode 100644 index 0000000..ed0be54 --- /dev/null +++ b/sbomify_action/_hash_enrichment/models.py @@ -0,0 +1,210 @@ +"""Data models for lockfile hash extraction.""" + +from dataclasses import dataclass +from enum import Enum + + +class HashAlgorithm(Enum): + """Supported hash algorithms with CycloneDX/SPDX mappings. + + Algorithm names follow CycloneDX conventions (with hyphens). + Use cyclonedx_alg and spdx_alg properties for format-specific names. + """ + + MD5 = "MD5" + SHA1 = "SHA-1" + SHA256 = "SHA-256" + SHA384 = "SHA-384" + SHA512 = "SHA-512" + SHA3_256 = "SHA3-256" + SHA3_384 = "SHA3-384" + SHA3_512 = "SHA3-512" + BLAKE2B_256 = "BLAKE2b-256" + BLAKE2B_384 = "BLAKE2b-384" + BLAKE2B_512 = "BLAKE2b-512" + BLAKE3 = "BLAKE3" + + @property + def cyclonedx_alg(self) -> str: + """Return CycloneDX algorithm name.""" + return self.value + + @property + def spdx_alg(self) -> str: + """Return SPDX algorithm name. + + SPDX uses specific algorithm names: + - SHA-1 → SHA1, SHA-256 → SHA256, etc. (no hyphen) + - SHA3-256 → SHA3-256 (keep hyphen for SHA3) + - BLAKE2b-256 → BLAKE2b-256 (keep format with lowercase 'b') + - BLAKE3 → BLAKE3 + """ + # BLAKE algorithms keep their exact format + if self.value.startswith("BLAKE"): + return self.value + # SHA3 keeps the hyphen + if self.value.startswith("SHA3"): + return self.value + # SHA-1, SHA-256, etc. remove hyphen + return self.value.replace("-", "") + + @classmethod + def from_prefix(cls, prefix: str) -> "HashAlgorithm | None": + """Parse algorithm from common hash prefixes. + + Args: + prefix: Hash prefix like 'sha256', 'sha512', 'md5', etc. + + Returns: + HashAlgorithm if recognized, None otherwise. + """ + mapping = { + "md5": cls.MD5, + "sha1": cls.SHA1, + "sha-1": cls.SHA1, + "sha256": cls.SHA256, + "sha-256": cls.SHA256, + "sha384": cls.SHA384, + "sha-384": cls.SHA384, + "sha512": cls.SHA512, + "sha-512": cls.SHA512, + "sha3-256": cls.SHA3_256, + "sha3-384": cls.SHA3_384, + "sha3-512": cls.SHA3_512, + "blake2b-256": cls.BLAKE2B_256, + "blake2b-384": cls.BLAKE2B_384, + "blake2b-512": cls.BLAKE2B_512, + "blake3": cls.BLAKE3, + } + return mapping.get(prefix.lower()) + + +@dataclass +class PackageHash: + """Hash extracted from a lockfile. + + Represents a single hash value for a specific package version. + A package may have multiple hashes (e.g., for sdist and wheels, + or multiple algorithms). + """ + + name: str + version: str + algorithm: HashAlgorithm + value: str # Hex-encoded hash value + artifact_type: str = "unknown" # e.g., "sdist", "wheel", "tarball" + + @classmethod + def from_prefixed( + cls, + name: str, + version: str, + prefixed_hash: str, + artifact_type: str = "unknown", + ) -> "PackageHash | None": + """Parse hash from prefixed format like 'sha256:abc123...'. + + Args: + name: Package name + version: Package version + prefixed_hash: Hash string with algorithm prefix (e.g., 'sha256:abc...') + artifact_type: Type of artifact (sdist, wheel, etc.) + + Returns: + PackageHash if parsing succeeds, None otherwise. + """ + if ":" not in prefixed_hash: + return None + + prefix, value = prefixed_hash.split(":", 1) + algorithm = HashAlgorithm.from_prefix(prefix) + if algorithm is None: + return None + + return cls( + name=name, + version=version, + algorithm=algorithm, + value=value, + artifact_type=artifact_type, + ) + + @classmethod + def from_sri( + cls, + name: str, + version: str, + sri_hash: str, + artifact_type: str = "unknown", + ) -> "PackageHash | None": + """Parse hash from SRI (Subresource Integrity) format. + + SRI format: algorithm-base64hash (e.g., 'sha512-abc...=') + Used by npm, yarn, pnpm. + + Args: + name: Package name + version: Package version + sri_hash: SRI hash string (e.g., 'sha512-abc...') + artifact_type: Type of artifact + + Returns: + PackageHash if parsing succeeds, None otherwise. + """ + import base64 + + if "-" not in sri_hash: + return None + + prefix, b64_value = sri_hash.split("-", 1) + algorithm = HashAlgorithm.from_prefix(prefix) + if algorithm is None: + return None + + try: + # Convert base64 to hex + hex_value = base64.b64decode(b64_value).hex() + except Exception: + return None + + return cls( + name=name, + version=version, + algorithm=algorithm, + value=hex_value, + artifact_type=artifact_type, + ) + + +def normalize_package_name(name: str, ecosystem: str) -> str: + """Normalize package name for matching across lockfile and SBOM. + + Different ecosystems have different normalization rules: + - PyPI: case-insensitive, underscores/hyphens/dots are equivalent + - npm: case-insensitive (scoped packages normalize scope and name separately) + - Cargo: case-insensitive, hyphens and underscores equivalent + - pub (Dart): case-insensitive, underscores only (no hyphens allowed) + + Args: + name: Package name to normalize + ecosystem: Ecosystem identifier (pypi, npm, cargo, pub, etc.) + + Returns: + Normalized package name for comparison. + """ + if ecosystem == "pypi": + # PEP 503: normalize by lowercasing and replacing separators + return name.lower().replace("-", "_").replace(".", "_") + elif ecosystem == "npm": + # npm is case-insensitive for both scoped and unscoped packages + # Scoped: @scope/name -> @scope/name (lowercased) + return name.lower() + elif ecosystem == "cargo": + # Cargo is case-insensitive, hyphens and underscores equivalent + return name.lower().replace("-", "_") + elif ecosystem == "pub": + # Dart pub packages are lowercase with underscores only + return name.lower() + else: + # Default: lowercase + return name.lower() diff --git a/sbomify_action/_hash_enrichment/parsers/__init__.py b/sbomify_action/_hash_enrichment/parsers/__init__.py new file mode 100644 index 0000000..60d5196 --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/__init__.py @@ -0,0 +1,21 @@ +"""Lockfile hash parsers for various ecosystems.""" + +from .cargo_lock import CargoLockParser +from .package_lock import PackageLockParser +from .pipfile_lock import PipfileLockParser +from .pnpm_lock import PnpmLockParser +from .poetry_lock import PoetryLockParser +from .pubspec_lock import PubspecLockParser +from .uv_lock import UvLockParser +from .yarn_lock import YarnLockParser + +__all__ = [ + "CargoLockParser", + "PackageLockParser", + "PipfileLockParser", + "PnpmLockParser", + "PoetryLockParser", + "PubspecLockParser", + "UvLockParser", + "YarnLockParser", +] diff --git a/sbomify_action/_hash_enrichment/parsers/cargo_lock.py b/sbomify_action/_hash_enrichment/parsers/cargo_lock.py new file mode 100644 index 0000000..d877878 --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/cargo_lock.py @@ -0,0 +1,61 @@ +"""Parser for Cargo.lock files (Rust).""" + +from pathlib import Path + +import tomllib + +from ..models import HashAlgorithm, PackageHash + + +class CargoLockParser: + """Parser for Cargo.lock files. + + Cargo.lock is a TOML file with [[package]] sections: + [[package]] + name = "serde" + version = "1.0.193" + checksum = "abc123..." # Always SHA256, no prefix + """ + + name = "cargo-lock" + supported_files = ("Cargo.lock",) + ecosystem = "cargo" + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse Cargo.lock and extract hashes. + + Args: + lock_file_path: Path to Cargo.lock file + + Returns: + List of PackageHash objects for all packages with checksums. + """ + with lock_file_path.open("rb") as f: + data = tomllib.load(f) + + hashes: list[PackageHash] = [] + packages = data.get("package", []) + + for pkg in packages: + name = pkg.get("name") + version = pkg.get("version") + checksum = pkg.get("checksum") + + if not name or not version or not checksum: + continue + + # Cargo checksums are always SHA256, no prefix + hashes.append( + PackageHash( + name=name, + version=version, + algorithm=HashAlgorithm.SHA256, + value=checksum, + artifact_type="crate", + ) + ) + + return hashes diff --git a/sbomify_action/_hash_enrichment/parsers/package_lock.py b/sbomify_action/_hash_enrichment/parsers/package_lock.py new file mode 100644 index 0000000..35a7279 --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/package_lock.py @@ -0,0 +1,146 @@ +"""Parser for package-lock.json files (npm).""" + +import json +from pathlib import Path + +from ..models import PackageHash + + +class PackageLockParser: + """Parser for package-lock.json files. + + package-lock.json v2/v3 is a JSON file with structure: + { + "packages": { + "node_modules/package-name": { + "version": "1.2.3", + "integrity": "sha512-base64hash..." + } + } + } + + v1 uses "dependencies" instead of "packages". + """ + + name = "npm-package-lock" + supported_files = ("package-lock.json",) + ecosystem = "npm" + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse package-lock.json and extract hashes. + + Returns one hash per unique (name, version) combination. + + Args: + lock_file_path: Path to package-lock.json file + + Returns: + List of PackageHash objects (one per package@version). + """ + with lock_file_path.open("r") as f: + data = json.load(f) + + hashes: list[PackageHash] = [] + seen: set[tuple[str, str]] = set() # (name, version) + + # Try v2/v3 format first (packages) + packages = data.get("packages", {}) + if packages: + for pkg_path, pkg_data in packages.items(): + if not pkg_path or not isinstance(pkg_data, dict): + continue + + # Skip the root package (empty path) + if pkg_path == "": + continue + + # Extract package name from path (e.g., "node_modules/@scope/name") + name = self._extract_package_name(pkg_path) + if not name: + continue + + version = pkg_data.get("version") + integrity = pkg_data.get("integrity") + + if not version or not integrity: + continue + + # Deduplicate by (name, version) + key = (name, version) + if key in seen: + continue + seen.add(key) + + pkg_hash = PackageHash.from_sri( + name=name, + version=version, + sri_hash=integrity, + artifact_type="tarball", + ) + if pkg_hash: + hashes.append(pkg_hash) + + # Fall back to v1 format (dependencies) + if not hashes: + dependencies = data.get("dependencies", {}) + hashes.extend(self._parse_dependencies(dependencies, seen=set())) + + return hashes + + def _extract_package_name(self, pkg_path: str) -> str | None: + """Extract package name from node_modules path.""" + # Handle paths like "node_modules/@scope/name" or "node_modules/name" + if not pkg_path.startswith("node_modules/"): + return None + + name_part = pkg_path[len("node_modules/") :] + + # Handle nested node_modules (take the last one) + if "node_modules/" in name_part: + name_part = name_part.rsplit("node_modules/", 1)[-1] + + return name_part if name_part else None + + def _parse_dependencies( + self, dependencies: dict, seen: set[tuple[str, str]] | None = None + ) -> list[PackageHash]: + """Parse v1 format dependencies recursively. + + Deduplicates by (name, version) to avoid returning multiple hashes + for the same package@version at different nesting levels. + """ + if seen is None: + seen = set() + + hashes: list[PackageHash] = [] + + for name, pkg_data in dependencies.items(): + if not isinstance(pkg_data, dict): + continue + + version = pkg_data.get("version") + integrity = pkg_data.get("integrity") + + if version and integrity: + # Deduplicate by (name, version) + key = (name, version) + if key not in seen: + seen.add(key) + pkg_hash = PackageHash.from_sri( + name=name, + version=version, + sri_hash=integrity, + artifact_type="tarball", + ) + if pkg_hash: + hashes.append(pkg_hash) + + # Recurse into nested dependencies + nested = pkg_data.get("dependencies", {}) + if nested: + hashes.extend(self._parse_dependencies(nested, seen)) + + return hashes diff --git a/sbomify_action/_hash_enrichment/parsers/pipfile_lock.py b/sbomify_action/_hash_enrichment/parsers/pipfile_lock.py new file mode 100644 index 0000000..f8f7921 --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/pipfile_lock.py @@ -0,0 +1,118 @@ +"""Parser for Pipfile.lock files (Python Pipenv).""" + +import json +from pathlib import Path + +from ..models import PackageHash + + +class PipfileLockParser: + """Parser for Pipfile.lock files. + + Pipfile.lock is a JSON file with structure: + { + "default": { + "package-name": { + "hashes": ["sha256:...", "sha256:..."], + "version": "==1.2.3" + } + }, + "develop": { ... } + } + """ + + name = "pipfile-lock" + supported_files = ("Pipfile.lock",) + ecosystem = "pypi" + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse Pipfile.lock and extract hashes. + + For each package, returns ONE hash. If multiple algorithms are present, + prefers SHA-512 > SHA-384 > SHA-256. + + Args: + lock_file_path: Path to Pipfile.lock file + + Returns: + List of PackageHash objects (one per package). + """ + with lock_file_path.open("r") as f: + data = json.load(f) + + hashes: list[PackageHash] = [] + seen_packages: set[tuple[str, str]] = set() # (name, version) + + # Process both default and develop sections + for section in ["default", "develop"]: + packages = data.get(section, {}) + for name, pkg_data in packages.items(): + if not isinstance(pkg_data, dict): + continue + + # Version has == prefix, e.g., "==5.1.1" + version = pkg_data.get("version", "") + if version.startswith("=="): + version = version[2:] + elif version.startswith("="): + version = version[1:] + + if not version: + continue + + # Skip if we've already processed this package + pkg_key = (name.lower(), version) + if pkg_key in seen_packages: + continue + seen_packages.add(pkg_key) + + # Hashes are prefixed, e.g., "sha256:abc123..." + # Pick the best hash (prefer stronger algorithms) + pkg_hashes = pkg_data.get("hashes", []) + best_hash = self._select_best_hash(pkg_hashes) + if best_hash: + pkg_hash = PackageHash.from_prefixed( + name=name, + version=version, + prefixed_hash=best_hash, + artifact_type="wheel", # Pipenv typically installs wheels + ) + if pkg_hash: + hashes.append(pkg_hash) + + return hashes + + @staticmethod + def _select_best_hash(hash_strings: list[str]) -> str | None: + """Select the best hash from available hashes. + + Prefers stronger algorithms: SHA-512 > SHA-384 > SHA-256 > SHA-1 > MD5. + """ + if not hash_strings: + return None + + # Priority order (higher is better) + priority = { + "sha512": 5, + "sha384": 4, + "sha256": 3, + "sha1": 2, + "md5": 1, + } + + best_hash = None + best_priority = 0 + + for hash_str in hash_strings: + if ":" not in hash_str: + continue + prefix = hash_str.split(":")[0].lower() + hash_priority = priority.get(prefix, 0) + if hash_priority > best_priority: + best_priority = hash_priority + best_hash = hash_str + + return best_hash or (hash_strings[0] if hash_strings else None) diff --git a/sbomify_action/_hash_enrichment/parsers/pnpm_lock.py b/sbomify_action/_hash_enrichment/parsers/pnpm_lock.py new file mode 100644 index 0000000..b12f4bc --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/pnpm_lock.py @@ -0,0 +1,206 @@ +"""Parser for pnpm-lock.yaml files (pnpm).""" + +from pathlib import Path + +import yaml + +from ..models import PackageHash + + +class PnpmLockParser: + """Parser for pnpm-lock.yaml files. + + pnpm-lock.yaml v6+ structure: + packages: + /@scope/name@1.2.3: + resolution: {integrity: sha512-...} + ... + + Or for newer versions (v9+): + packages: + '@scope/name@1.2.3': + resolution: + integrity: sha512-... + + Or snapshots format: + snapshots: + package@version: + ... + """ + + name = "pnpm-lock" + supported_files = ("pnpm-lock.yaml",) + ecosystem = "npm" + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse pnpm-lock.yaml and extract hashes. + + Args: + lock_file_path: Path to pnpm-lock.yaml file + + Returns: + List of PackageHash objects for all packages with integrity hashes. + """ + with lock_file_path.open("r") as f: + data = yaml.safe_load(f) + + hashes: list[PackageHash] = [] + + if not isinstance(data, dict): + return hashes + + seen: set[tuple[str, str]] = set() + + # Try packages section (v5-v8) + packages = data.get("packages", {}) + if packages: + hashes.extend(self._parse_packages(packages, seen)) + + # Try snapshots section (v9+) + snapshots = data.get("snapshots", {}) + if snapshots and not hashes: + hashes.extend(self._parse_snapshots(snapshots, data, seen)) + + return hashes + + def _parse_packages(self, packages: dict, seen: set[tuple[str, str]] | None = None) -> list[PackageHash]: + """Parse packages section. + + Deduplicates by (name, version) to return one hash per package@version. + """ + if seen is None: + seen = set() + + hashes: list[PackageHash] = [] + + for pkg_key, pkg_data in packages.items(): + if not isinstance(pkg_data, dict): + continue + + # Extract name and version from key + # Formats: "/@scope/name@1.2.3" or "/name@1.2.3" or "@scope/name@1.2.3" + name, version = self._parse_package_key(pkg_key) + if not name or not version: + continue + + # Deduplicate by (name, version) + key = (name, version) + if key in seen: + continue + seen.add(key) + + # Get integrity from resolution + resolution = pkg_data.get("resolution", {}) + if isinstance(resolution, dict): + integrity = resolution.get("integrity") + else: + integrity = None + + if not integrity: + continue + + pkg_hash = PackageHash.from_sri( + name=name, + version=version, + sri_hash=integrity, + artifact_type="tarball", + ) + if pkg_hash: + hashes.append(pkg_hash) + + return hashes + + def _parse_snapshots( + self, snapshots: dict, data: dict, seen: set[tuple[str, str]] | None = None + ) -> list[PackageHash]: + """Parse snapshots section (pnpm v9+). + + In v9+, the integrity is in the packages section keyed by name@version, + while snapshots just reference them. + """ + if seen is None: + seen = set() + + hashes: list[PackageHash] = [] + packages = data.get("packages", {}) + + for snap_key in snapshots: + # Parse name and version from snapshot key + name, version = self._parse_package_key(snap_key) + if not name or not version: + continue + + # Deduplicate by (name, version) + key = (name, version) + if key in seen: + continue + seen.add(key) + + # Look up integrity in packages section + # Try different key formats + pkg_data = None + for key_format in [f"{name}@{version}", f"/{name}@{version}"]: + if key_format in packages: + pkg_data = packages[key_format] + break + + if not pkg_data or not isinstance(pkg_data, dict): + continue + + resolution = pkg_data.get("resolution", {}) + if isinstance(resolution, dict): + integrity = resolution.get("integrity") + else: + integrity = None + + if not integrity: + continue + + pkg_hash = PackageHash.from_sri( + name=name, + version=version, + sri_hash=integrity, + artifact_type="tarball", + ) + if pkg_hash: + hashes.append(pkg_hash) + + return hashes + + @staticmethod + def _parse_package_key(key: str) -> tuple[str | None, str | None]: + """Parse package name and version from pnpm key. + + Formats: + - "/@scope/name@1.2.3" + - "/name@1.2.3" + - "@scope/name@1.2.3" + - "name@1.2.3" + - "/@scope/name@1.2.3(peer@2.0.0)" # with peer deps + """ + # Remove leading slash if present + if key.startswith("/"): + key = key[1:] + + # Remove peer dependency suffix if present + if "(" in key: + key = key.split("(")[0] + + # Find the @ that separates name from version + if key.startswith("@"): + # Scoped package: @scope/name@version + at_pos = key.find("@", 1) + else: + # Unscoped package: name@version + at_pos = key.find("@") + + if at_pos == -1: + return None, None + + name = key[:at_pos] + version = key[at_pos + 1 :] + + return name, version diff --git a/sbomify_action/_hash_enrichment/parsers/poetry_lock.py b/sbomify_action/_hash_enrichment/parsers/poetry_lock.py new file mode 100644 index 0000000..af31473 --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/poetry_lock.py @@ -0,0 +1,132 @@ +"""Parser for poetry.lock files (Python Poetry).""" + +from pathlib import Path + +import tomllib + +from ..models import PackageHash + + +class PoetryLockParser: + """Parser for poetry.lock files. + + poetry.lock is a TOML file with [[package]] sections: + [[package]] + name = "django" + version = "5.1.1" + + [[package.files]] + file = "django-5.1.1-py3-none-any.whl" + hash = "sha256:abc123..." + + Or in newer versions: + [package.files] + "django-5.1.1-py3-none-any.whl" = "sha256:abc123..." + """ + + name = "poetry-lock" + supported_files = ("poetry.lock",) + ecosystem = "pypi" + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse poetry.lock and extract hashes. + + For each package, returns ONE hash with preference: + 1. Universal wheel (py3-none-any) if available + 2. First wheel hash if any wheels exist + 3. sdist hash as fallback + + Args: + lock_file_path: Path to poetry.lock file + + Returns: + List of PackageHash objects (one per package). + """ + with lock_file_path.open("rb") as f: + data = tomllib.load(f) + + hashes: list[PackageHash] = [] + packages = data.get("package", []) + + for pkg in packages: + name = pkg.get("name") + version = pkg.get("version") + + if not name or not version: + continue + + # Collect all file entries with their hashes + file_entries: list[tuple[str, str]] = [] # (filename, hash) + + files = pkg.get("files", []) + + # Handle files array format (newer poetry versions) + if isinstance(files, list): + for file_entry in files: + if isinstance(file_entry, dict): + hash_str = file_entry.get("hash") + filename = file_entry.get("file", "") + if hash_str and filename: + file_entries.append((filename, hash_str)) + + # Handle dict format (some poetry versions) + elif isinstance(files, dict): + for filename, hash_str in files.items(): + if isinstance(hash_str, str): + file_entries.append((filename, hash_str)) + + # Select the best hash + best = self._select_best_file_hash(file_entries) + if best: + filename, hash_str = best + artifact_type = self._detect_artifact_type(filename) + pkg_hash = PackageHash.from_prefixed( + name=name, + version=version, + prefixed_hash=hash_str, + artifact_type=artifact_type, + ) + if pkg_hash: + hashes.append(pkg_hash) + + return hashes + + def _select_best_file_hash(self, file_entries: list[tuple[str, str]]) -> tuple[str, str] | None: + """Select the best file hash from available entries. + + Prefers universal wheels (py3-none-any) over platform-specific ones, + and wheels over sdists. + """ + if not file_entries: + return None + + universal_wheel = None + first_wheel = None + first_sdist = None + + for filename, hash_str in file_entries: + is_wheel = filename.endswith(".whl") + is_sdist = filename.endswith((".tar.gz", ".tar.bz2", ".zip")) + + if is_wheel: + if first_wheel is None: + first_wheel = (filename, hash_str) + if "py3-none-any" in filename or "py2.py3-none-any" in filename: + universal_wheel = (filename, hash_str) + break # Found universal, stop searching + elif is_sdist and first_sdist is None: + first_sdist = (filename, hash_str) + + return universal_wheel or first_wheel or first_sdist + + @staticmethod + def _detect_artifact_type(filename: str) -> str: + """Detect artifact type from filename.""" + if filename.endswith(".whl"): + return "wheel" + elif filename.endswith((".tar.gz", ".tar.bz2", ".zip")): + return "sdist" + return "unknown" diff --git a/sbomify_action/_hash_enrichment/parsers/pubspec_lock.py b/sbomify_action/_hash_enrichment/parsers/pubspec_lock.py new file mode 100644 index 0000000..a2ccf10 --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/pubspec_lock.py @@ -0,0 +1,73 @@ +"""Parser for pubspec.lock files (Dart/Flutter).""" + +from pathlib import Path + +import yaml + +from ..models import HashAlgorithm, PackageHash + + +class PubspecLockParser: + """Parser for pubspec.lock files. + + pubspec.lock is a YAML file with structure: + packages: + package_name: + dependency: "direct main" + description: + name: package_name + sha256: abc123... + url: "https://pub.dev" + source: hosted + version: "1.2.3" + """ + + name = "pubspec-lock" + supported_files = ("pubspec.lock",) + ecosystem = "pub" + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse pubspec.lock and extract hashes. + + Args: + lock_file_path: Path to pubspec.lock file + + Returns: + List of PackageHash objects for all packages with hashes. + """ + with lock_file_path.open("r") as f: + data = yaml.safe_load(f) + + hashes: list[PackageHash] = [] + packages = data.get("packages", {}) + + for name, pkg_data in packages.items(): + if not isinstance(pkg_data, dict): + continue + + version = pkg_data.get("version") + if version and version.startswith('"') and version.endswith('"'): + version = version[1:-1] + + if not version: + continue + + # Hash is in description.sha256 + description = pkg_data.get("description", {}) + if isinstance(description, dict): + sha256 = description.get("sha256") + if sha256: + hashes.append( + PackageHash( + name=name, + version=version, + algorithm=HashAlgorithm.SHA256, + value=sha256, + artifact_type="package", + ) + ) + + return hashes diff --git a/sbomify_action/_hash_enrichment/parsers/uv_lock.py b/sbomify_action/_hash_enrichment/parsers/uv_lock.py new file mode 100644 index 0000000..3c614e1 --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/uv_lock.py @@ -0,0 +1,114 @@ +"""Parser for uv.lock files (Python uv package manager).""" + +from pathlib import Path + +import tomllib + +from ..models import PackageHash + + +class UvLockParser: + """Parser for uv.lock files. + + uv.lock is a TOML file with [[package]] sections containing: + - name, version + - sdist = { hash = "sha256:...", ... } + - wheels = [{ hash = "sha256:...", ... }, ...] + """ + + name = "uv-lock" + supported_files = ("uv.lock",) + ecosystem = "pypi" + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse uv.lock and extract hashes. + + For each package, returns ONE hash with preference: + 1. Universal wheel (py3-none-any) if available + 2. First wheel hash if any wheels exist + 3. sdist hash as fallback + + Args: + lock_file_path: Path to uv.lock file + + Returns: + List of PackageHash objects (one per package). + """ + with lock_file_path.open("rb") as f: + data = tomllib.load(f) + + hashes: list[PackageHash] = [] + packages = data.get("package", []) + + for pkg in packages: + name = pkg.get("name") + version = pkg.get("version") + + if not name or not version: + continue + + # Try to find the best wheel hash (prefer universal wheels) + wheels = pkg.get("wheels", []) + wheel_hash = self._select_best_wheel_hash(wheels) + + if wheel_hash: + pkg_hash = PackageHash.from_prefixed( + name=name, + version=version, + prefixed_hash=wheel_hash, + artifact_type="wheel", + ) + if pkg_hash: + hashes.append(pkg_hash) + continue + + # Fall back to sdist hash if no wheels + sdist = pkg.get("sdist") + if sdist and isinstance(sdist, dict): + hash_str = sdist.get("hash") + if hash_str: + pkg_hash = PackageHash.from_prefixed( + name=name, + version=version, + prefixed_hash=hash_str, + artifact_type="sdist", + ) + if pkg_hash: + hashes.append(pkg_hash) + + return hashes + + def _select_best_wheel_hash(self, wheels: list) -> str | None: + """Select the best wheel hash from available wheels. + + Prefers universal wheels (py3-none-any) over platform-specific ones. + """ + if not wheels: + return None + + universal_hash = None + first_hash = None + + for wheel in wheels: + if not isinstance(wheel, dict): + continue + + hash_str = wheel.get("hash") + if not hash_str: + continue + + # Track first valid hash as fallback + if first_hash is None: + first_hash = hash_str + + # Check if this is a universal wheel + url = wheel.get("url", "") + filename = wheel.get("filename", "") or url.split("/")[-1] if url else "" + if "py3-none-any" in filename or "py2.py3-none-any" in filename: + universal_hash = hash_str + break # Found universal, stop searching + + return universal_hash or first_hash diff --git a/sbomify_action/_hash_enrichment/parsers/yarn_lock.py b/sbomify_action/_hash_enrichment/parsers/yarn_lock.py new file mode 100644 index 0000000..30e058c --- /dev/null +++ b/sbomify_action/_hash_enrichment/parsers/yarn_lock.py @@ -0,0 +1,227 @@ +"""Parser for yarn.lock files (Yarn v1 and v2+).""" + +import re +from pathlib import Path + +from ..models import PackageHash + + +class YarnLockParser: + """Parser for yarn.lock files. + + Yarn v1 format (custom text format): + package-name@^1.0.0: + version "1.2.3" + resolved "https://..." + integrity sha512-base64... + + Yarn v2+ (Berry) uses YAML format: + "package-name@npm:^1.0.0": + version: 1.2.3 + resolution: "package-name@npm:1.2.3" + checksum: sha512-base64... + """ + + name = "yarn-lock" + supported_files = ("yarn.lock",) + ecosystem = "npm" + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse yarn.lock and extract hashes. + + Args: + lock_file_path: Path to yarn.lock file + + Returns: + List of PackageHash objects for all packages with integrity hashes. + """ + content = lock_file_path.read_text() + + # Detect format by checking for YAML-style markers + if content.startswith("# This file is generated by running"): + # Yarn v2+ (Berry) YAML format + return self._parse_berry_format(content) + else: + # Yarn v1 custom format + return self._parse_v1_format(content) + + def _parse_v1_format(self, content: str) -> list[PackageHash]: + """Parse Yarn v1 lockfile format. + + Deduplicates by (name, version) to return one hash per package@version. + """ + hashes: list[PackageHash] = [] + seen: set[tuple[str, str]] = set() + + # Pattern for package entries + # package-name@^1.0.0, package-name@~1.0.0: + # version "1.2.3" + # ... + # integrity sha512-... + + current_name: str | None = None + current_version: str | None = None + current_integrity: str | None = None + + def _add_package() -> None: + nonlocal current_name, current_version, current_integrity + if current_name and current_version and current_integrity: + key = (current_name, current_version) + if key not in seen: + seen.add(key) + pkg_hash = PackageHash.from_sri( + name=current_name, + version=current_version, + sri_hash=current_integrity, + artifact_type="tarball", + ) + if pkg_hash: + hashes.append(pkg_hash) + + for line in content.split("\n"): + # Package header line (e.g., "lodash@^4.17.21:") + if line and not line.startswith(" ") and not line.startswith("#"): + # Save previous package if complete + _add_package() + + # Parse new package name from header + current_name = self._extract_name_from_header(line) + current_version = None + current_integrity = None + + # Version line + elif line.strip().startswith('version "'): + match = re.search(r'version "([^"]+)"', line) + if match: + current_version = match.group(1) + + # Integrity line + elif line.strip().startswith("integrity "): + match = re.search(r"integrity (sha\d+-[A-Za-z0-9+/=]+)", line) + if match: + current_integrity = match.group(1) + + # Don't forget the last package + _add_package() + + return hashes + + def _parse_berry_format(self, content: str) -> list[PackageHash]: + """Parse Yarn v2+ (Berry) YAML lockfile format. + + Deduplicates by (name, version) to return one hash per package@version. + """ + import yaml + + hashes: list[PackageHash] = [] + seen: set[tuple[str, str]] = set() + + try: + data = yaml.safe_load(content) + except yaml.YAMLError: + return hashes + + if not isinstance(data, dict): + return hashes + + for key, pkg_data in data.items(): + if not isinstance(pkg_data, dict): + continue + + # Skip metadata keys + if key.startswith("__"): + continue + + # Extract package name from key (e.g., "lodash@npm:^4.17.21") + name = self._extract_name_from_berry_key(key) + if not name: + continue + + version = pkg_data.get("version") + checksum = pkg_data.get("checksum") + + if not version or not checksum: + continue + + # Deduplicate by (name, version) + pkg_key = (name, str(version)) + if pkg_key in seen: + continue + seen.add(pkg_key) + + # Berry checksums may have a prefix like "10/sha512-..." + # or just "sha512-..." + if "/" in checksum: + checksum = checksum.split("/", 1)[-1] + + pkg_hash = PackageHash.from_sri( + name=name, + version=str(version), + sri_hash=checksum, + artifact_type="tarball", + ) + if pkg_hash: + hashes.append(pkg_hash) + + return hashes + + @staticmethod + def _extract_name_from_header(header: str) -> str | None: + """Extract package name from v1 header line.""" + # Handle formats like: + # "lodash@^4.17.21:" + # '"@scope/name@^1.0.0":' + # "lodash@^4.17.21, lodash@~4.17.0:" + + header = header.strip().rstrip(":") + + # Remove quotes if present + if header.startswith('"') and header.endswith('"'): + header = header[1:-1] + + # Take first specifier if multiple + if ", " in header: + header = header.split(", ")[0] + + # Find the last @ that's not the start of a scoped package + if header.startswith("@"): + # Scoped package: @scope/name@version + at_pos = header.find("@", 1) + else: + # Unscoped package: name@version + at_pos = header.find("@") + + if at_pos == -1: + return None + + return header[:at_pos] + + @staticmethod + def _extract_name_from_berry_key(key: str) -> str | None: + """Extract package name from Berry key.""" + # Handle formats like: + # "lodash@npm:^4.17.21" + # "@scope/name@npm:^1.0.0" + + # Remove quotes if present + if key.startswith('"') and key.endswith('"'): + key = key[1:-1] + + # Find @npm: or @workspace: etc. + for protocol in ["@npm:", "@workspace:", "@patch:", "@portal:", "@link:"]: + if protocol in key: + return key.split(protocol)[0] + + # Fall back to finding last @ for version + if key.startswith("@"): + at_pos = key.find("@", 1) + else: + at_pos = key.find("@") + + if at_pos == -1: + return None + + return key[:at_pos] diff --git a/sbomify_action/_hash_enrichment/protocol.py b/sbomify_action/_hash_enrichment/protocol.py new file mode 100644 index 0000000..fea7740 --- /dev/null +++ b/sbomify_action/_hash_enrichment/protocol.py @@ -0,0 +1,84 @@ +"""Protocol definition for lockfile hash parsers.""" + +from pathlib import Path +from typing import Protocol + +from .models import PackageHash + + +class LockfileHashParser(Protocol): + """Protocol for lockfile hash extraction plugins. + + Each parser implements this protocol to extract hashes from a + specific lockfile format. Parsers are registered with ParserRegistry + and selected based on the lockfile name. + + Example: + class UvLockParser: + name = "uv-lock" + supported_files = ("uv.lock",) + + def supports(self, lock_file_name: str) -> bool: + return lock_file_name in self.supported_files + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + # Parse uv.lock and return hashes + ... + """ + + @property + def name(self) -> str: + """Human-readable name of this parser. + + Used for logging and diagnostics. + Examples: "uv-lock", "cargo-lock", "npm-package-lock" + """ + ... + + @property + def supported_files(self) -> tuple[str, ...]: + """Lock file names this parser handles. + + Each entry is a filename (not a path), e.g., "uv.lock", "Cargo.lock". + A parser may support multiple filenames. + """ + ... + + @property + def ecosystem(self) -> str: + """Ecosystem identifier for package name normalization. + + Used by normalize_package_name() for matching. + Examples: "pypi", "npm", "cargo", "pub" + """ + ... + + def supports(self, lock_file_name: str) -> bool: + """Check if this parser can handle the given lockfile. + + Args: + lock_file_name: Filename (not full path) to check + + Returns: + True if this parser can parse the file. + """ + ... + + def parse(self, lock_file_path: Path) -> list[PackageHash]: + """Parse lockfile and extract all package hashes. + + Implementations should: + 1. Read the lockfile + 2. Extract package name, version, and hash(es) for each package + 3. Return PackageHash objects for all found hashes + + Args: + lock_file_path: Full path to the lockfile + + Returns: + List of PackageHash objects. Empty list if no hashes found. + + Raises: + FileProcessingError: If lockfile cannot be read or parsed. + """ + ... diff --git a/sbomify_action/_hash_enrichment/registry.py b/sbomify_action/_hash_enrichment/registry.py new file mode 100644 index 0000000..5de6761 --- /dev/null +++ b/sbomify_action/_hash_enrichment/registry.py @@ -0,0 +1,87 @@ +"""Registry for lockfile hash parsers.""" + +from pathlib import Path + +from ..logging_config import logger +from .models import PackageHash +from .protocol import LockfileHashParser + + +class ParserRegistry: + """Registry for lockfile hash parsers. + + Manages parser instances and dispatches parsing to the appropriate + parser based on lockfile name. + + Example: + registry = ParserRegistry() + registry.register(UvLockParser()) + registry.register(CargoLockParser()) + + hashes = registry.parse_lockfile(Path("uv.lock")) + """ + + def __init__(self) -> None: + self._parsers: list[LockfileHashParser] = [] + + def register(self, parser: LockfileHashParser) -> None: + """Register a parser. + + Args: + parser: Parser instance implementing LockfileHashParser protocol. + """ + self._parsers.append(parser) + logger.debug(f"Registered hash parser: {parser.name} for {parser.supported_files}") + + def get_parser_for(self, lock_file_name: str) -> LockfileHashParser | None: + """Get the parser that supports this lockfile. + + Args: + lock_file_name: Filename (not full path) to find parser for + + Returns: + Parser instance if found, None otherwise. + """ + for parser in self._parsers: + if parser.supports(lock_file_name): + return parser + return None + + def parse_lockfile(self, lock_file_path: Path) -> list[PackageHash]: + """Parse a lockfile using the appropriate parser. + + Args: + lock_file_path: Full path to the lockfile + + Returns: + List of PackageHash objects extracted from the lockfile. + Empty list if no parser found or no hashes extracted. + """ + lock_file_name = lock_file_path.name + parser = self.get_parser_for(lock_file_name) + + if parser is None: + logger.debug(f"No hash parser found for lockfile: {lock_file_name}") + return [] + + logger.debug(f"Using {parser.name} to parse {lock_file_name}") + try: + hashes = parser.parse(lock_file_path) + logger.debug(f"Extracted {len(hashes)} hash(es) from {lock_file_name}") + return hashes + except Exception as e: + logger.warning(f"Failed to parse {lock_file_name} for hashes: {e}") + return [] + + @property + def registered_parsers(self) -> list[str]: + """Get names of all registered parsers.""" + return [p.name for p in self._parsers] + + @property + def supported_files(self) -> set[str]: + """Get all supported lockfile names.""" + result: set[str] = set() + for parser in self._parsers: + result.update(parser.supported_files) + return result diff --git a/sbomify_action/cli/main.py b/sbomify_action/cli/main.py index a2ed36d..6f5ed78 100644 --- a/sbomify_action/cli/main.py +++ b/sbomify_action/cli/main.py @@ -1071,6 +1071,35 @@ def run_pipeline(config: Config) -> None: ) # Don't fail the entire process for additional packages injection issues + # Step 1.5: Hash Enrichment from Lockfile (if lockfile was used for generation) + if config.lock_file: + _log_step_header(1.5, "Hash Enrichment from Lockfile") + try: + from sbomify_action._hash_enrichment import enrich_sbom_with_hashes + + logger.info(f"Extracting hashes from lockfile: {config.lock_file}") + + stats = enrich_sbom_with_hashes( + sbom_file=STEP_1_FILE, + lock_file=config.lock_file, + overwrite_existing=False, + ) + + if stats["hashes_added"] > 0: + logger.info( + f"Added {stats['hashes_added']} hash(es) to " + f"{stats['components_matched']}/{stats['sbom_components']} component(s)" + ) + else: + logger.info("No additional hashes to add from lockfile") + + _log_step_end(1.5) + + except Exception as e: + logger.warning(f"Hash enrichment failed (non-fatal): {e}") + _log_step_end(1.5, success=False) + # Don't fail the entire process for hash enrichment issues + # Step 2: Augmentation if config.augment: _log_step_header(2, "SBOM Augmentation with Backend Metadata") diff --git a/sbomify_action/console.py b/sbomify_action/console.py index 7f89c8f..c56cd29 100644 --- a/sbomify_action/console.py +++ b/sbomify_action/console.py @@ -625,6 +625,16 @@ def record_component_enriched( self._add_entry("ENRICHMENT", "added", field_name, component=purl, source=source) self._enrichment_count += len(fields_added) + def record_hash_added( + self, + component: str, + algorithm: str, + source: str = "lockfile", + ) -> None: + """Record a hash added to a component from lockfile.""" + self._add_entry("ENRICHMENT", "added", f"hash.{algorithm}", component=component, source=source) + self._enrichment_count += 1 + # ========================================================================== # Sanitization Recording (Legacy compatibility + new interface) # ========================================================================== diff --git a/sbomify_action/enrichment.py b/sbomify_action/enrichment.py index c2bd031..c0a411c 100644 --- a/sbomify_action/enrichment.py +++ b/sbomify_action/enrichment.py @@ -492,7 +492,9 @@ def _is_spdx_license_empty(license_value) -> bool: return False -def _apply_metadata_to_spdx_package(package: Package, metadata: NormalizedMetadata) -> List[str]: +def _apply_metadata_to_spdx_package( + package: Package, metadata: NormalizedMetadata, source: str = "unknown" +) -> List[str]: """ Apply NormalizedMetadata to an SPDX package. @@ -501,11 +503,14 @@ def _apply_metadata_to_spdx_package(package: Package, metadata: NormalizedMetada Args: package: Package to enrich metadata: Normalized metadata to apply + source: Data source name for audit trail Returns: List of added field names for logging """ added_fields = [] + audit_trail = get_audit_trail() + purl_str = package.spdx_id or package.name # Description (sanitized) if not package.description and metadata.description: @@ -635,6 +640,10 @@ def _add_external_ref(category: ExternalPackageRefCategory, ref_type: str, locat package.comment = cle_comment added_fields.append("comment (CLE)") + # Record to audit trail if any fields were added + if added_fields: + audit_trail.record_component_enriched(purl_str, added_fields, source) + return added_fields @@ -1009,10 +1018,10 @@ def _enrich_spdx_document_with_plugin_architecture(document: Document, enricher: if purl_str: metadata = enricher.fetch_metadata(purl_str, merge_results=True) if metadata and metadata.has_data(): - added_fields = _apply_metadata_to_spdx_package(package, metadata) + primary_source = metadata.source.split(", ")[0] if metadata.source else "unknown" + added_fields = _apply_metadata_to_spdx_package(package, metadata, source=primary_source) if added_fields: enrichment_source = metadata.source - primary_source = metadata.source.split(", ")[0] if metadata.source else "unknown" stats["sources"][primary_source] = stats["sources"].get(primary_source, 0) + 1 if added_fields: diff --git a/tests/test_hash_enrichment.py b/tests/test_hash_enrichment.py new file mode 100644 index 0000000..26d8901 --- /dev/null +++ b/tests/test_hash_enrichment.py @@ -0,0 +1,490 @@ +"""Tests for the hash enrichment subsystem.""" + +import json +import tempfile +from pathlib import Path + +import pytest + +from sbomify_action._hash_enrichment import ( + HashAlgorithm, + HashEnricher, + PackageHash, + create_default_registry, + enrich_sbom_with_hashes, + normalize_package_name, +) +from sbomify_action._hash_enrichment.parsers import ( + CargoLockParser, + PipfileLockParser, + PubspecLockParser, + UvLockParser, +) + + +class TestHashAlgorithm: + """Tests for HashAlgorithm enum.""" + + def test_from_prefix_sha256(self): + """Test parsing sha256 prefix.""" + assert HashAlgorithm.from_prefix("sha256") == HashAlgorithm.SHA256 + assert HashAlgorithm.from_prefix("SHA256") == HashAlgorithm.SHA256 + assert HashAlgorithm.from_prefix("sha-256") == HashAlgorithm.SHA256 + + def test_from_prefix_sha512(self): + """Test parsing sha512 prefix.""" + assert HashAlgorithm.from_prefix("sha512") == HashAlgorithm.SHA512 + assert HashAlgorithm.from_prefix("SHA512") == HashAlgorithm.SHA512 + + def test_from_prefix_unknown(self): + """Test unknown prefix returns None.""" + assert HashAlgorithm.from_prefix("unknown") is None + assert HashAlgorithm.from_prefix("") is None + + def test_cyclonedx_alg(self): + """Test CycloneDX algorithm name conversion.""" + assert HashAlgorithm.SHA256.cyclonedx_alg == "SHA-256" + assert HashAlgorithm.SHA512.cyclonedx_alg == "SHA-512" + assert HashAlgorithm.SHA1.cyclonedx_alg == "SHA-1" + + def test_spdx_alg(self): + """Test SPDX algorithm name conversion.""" + assert HashAlgorithm.SHA256.spdx_alg == "SHA256" + assert HashAlgorithm.SHA512.spdx_alg == "SHA512" + assert HashAlgorithm.SHA1.spdx_alg == "SHA1" + + +class TestPackageHash: + """Tests for PackageHash dataclass.""" + + def test_from_prefixed_sha256(self): + """Test parsing sha256:... format.""" + pkg_hash = PackageHash.from_prefixed( + "django", "5.1.1", "sha256:abc123def456" + ) + assert pkg_hash is not None + assert pkg_hash.name == "django" + assert pkg_hash.version == "5.1.1" + assert pkg_hash.algorithm == HashAlgorithm.SHA256 + assert pkg_hash.value == "abc123def456" + + def test_from_prefixed_sha512(self): + """Test parsing sha512:... format.""" + pkg_hash = PackageHash.from_prefixed( + "requests", "2.31.0", "sha512:fedcba987654" + ) + assert pkg_hash is not None + assert pkg_hash.algorithm == HashAlgorithm.SHA512 + assert pkg_hash.value == "fedcba987654" + + def test_from_prefixed_no_colon(self): + """Test that missing colon returns None.""" + assert PackageHash.from_prefixed("pkg", "1.0", "nocolon") is None + + def test_from_prefixed_unknown_algorithm(self): + """Test that unknown algorithm returns None.""" + assert PackageHash.from_prefixed("pkg", "1.0", "unknown:hash") is None + + def test_from_sri_sha512(self): + """Test parsing SRI format (sha512-base64).""" + # Valid SHA512 SRI hash + pkg_hash = PackageHash.from_sri( + "lodash", + "4.17.21", + "sha512-Dh4h7PEF7IU9JNcohnrXBhPCFmOkaTB0sqNhnBvTnWa1iMM3I7tGbHJCToDjymPCSQeKs0e6uUKFAOfuQwWdDQ==", + ) + assert pkg_hash is not None + assert pkg_hash.name == "lodash" + assert pkg_hash.version == "4.17.21" + assert pkg_hash.algorithm == HashAlgorithm.SHA512 + # Value should be hex-encoded + assert len(pkg_hash.value) == 128 # SHA512 = 64 bytes = 128 hex chars + + def test_from_sri_invalid(self): + """Test that invalid SRI returns None.""" + assert PackageHash.from_sri("pkg", "1.0", "nohyphen") is None + assert PackageHash.from_sri("pkg", "1.0", "unknown-base64") is None + + +class TestNormalizePackageName: + """Tests for package name normalization.""" + + def test_pypi_normalization(self): + """Test PyPI name normalization.""" + assert normalize_package_name("Django", "pypi") == "django" + assert normalize_package_name("django-rest-framework", "pypi") == "django_rest_framework" + assert normalize_package_name("Pillow", "pypi") == "pillow" + assert normalize_package_name("zope.interface", "pypi") == "zope_interface" + + def test_npm_normalization(self): + """Test npm name normalization.""" + assert normalize_package_name("Lodash", "npm") == "lodash" + # Scoped packages are also case-insensitive + assert normalize_package_name("@types/Node", "npm") == "@types/node" + assert normalize_package_name("@Scope/Package", "npm") == "@scope/package" + + def test_cargo_normalization(self): + """Test Cargo name normalization.""" + assert normalize_package_name("serde-json", "cargo") == "serde_json" + assert normalize_package_name("Tokio", "cargo") == "tokio" + + +class TestUvLockParser: + """Tests for uv.lock parser.""" + + @pytest.fixture + def uv_lock_content(self): + return ''' +version = 1 + +[[package]] +name = "django" +version = "5.1.1" +sdist = { url = "...", hash = "sha256:abc123def", size = 100 } +wheels = [ + { url = "...", hash = "sha256:wheel1hash", size = 50 }, + { url = "...", hash = "sha256:wheel2hash", size = 50 }, +] + +[[package]] +name = "requests" +version = "2.31.0" +sdist = { url = "...", hash = "sha256:reqhash", size = 100 } +''' + + def test_parse_uv_lock(self, uv_lock_content, tmp_path): + """Test parsing uv.lock file.""" + lock_file = tmp_path / "uv.lock" + lock_file.write_text(uv_lock_content) + + parser = UvLockParser() + assert parser.supports("uv.lock") + assert not parser.supports("Cargo.lock") + + hashes = parser.parse(lock_file) + + # Should have 2 hashes: 1 per package (wheel preferred over sdist) + assert len(hashes) == 2 + + django_hash = next(h for h in hashes if h.name == "django") + # Should prefer wheel over sdist + assert django_hash.artifact_type == "wheel" + assert django_hash.value == "wheel1hash" + assert django_hash.algorithm == HashAlgorithm.SHA256 + + # requests only has sdist + requests_hash = next(h for h in hashes if h.name == "requests") + assert requests_hash.artifact_type == "sdist" + assert requests_hash.value == "reqhash" + + def test_parse_real_uv_lock(self): + """Test parsing actual uv.lock from test data.""" + lock_file = Path("tests/test-data/uv.lock") + if not lock_file.exists(): + pytest.skip("Test data file not found") + + parser = UvLockParser() + hashes = parser.parse(lock_file) + + assert len(hashes) > 0 + # All hashes should be SHA256 (uv uses sha256) + assert all(h.algorithm == HashAlgorithm.SHA256 for h in hashes) + + +class TestPipfileLockParser: + """Tests for Pipfile.lock parser.""" + + def test_parse_real_pipfile_lock(self): + """Test parsing actual Pipfile.lock from test data.""" + lock_file = Path("tests/test-data/Pipfile.lock") + if not lock_file.exists(): + pytest.skip("Test data file not found") + + parser = PipfileLockParser() + assert parser.supports("Pipfile.lock") + + hashes = parser.parse(lock_file) + + assert len(hashes) > 0 + # Should find django + django_hashes = [h for h in hashes if h.name == "django"] + assert len(django_hashes) > 0 + assert django_hashes[0].version == "5.1.1" + + +class TestCargoLockParser: + """Tests for Cargo.lock parser.""" + + def test_parse_real_cargo_lock(self): + """Test parsing actual Cargo.lock from test data.""" + lock_file = Path("tests/test-data/Cargo.lock") + if not lock_file.exists(): + pytest.skip("Test data file not found") + + parser = CargoLockParser() + assert parser.supports("Cargo.lock") + + hashes = parser.parse(lock_file) + + assert len(hashes) > 0 + # All Cargo hashes are SHA256 + assert all(h.algorithm == HashAlgorithm.SHA256 for h in hashes) + # All should be crate type + assert all(h.artifact_type == "crate" for h in hashes) + + +class TestPubspecLockParser: + """Tests for pubspec.lock parser.""" + + def test_parse_real_pubspec_lock(self): + """Test parsing actual pubspec.lock from test data.""" + lock_file = Path("tests/test-data/pubspec.lock") + if not lock_file.exists(): + pytest.skip("Test data file not found") + + parser = PubspecLockParser() + assert parser.supports("pubspec.lock") + + hashes = parser.parse(lock_file) + + assert len(hashes) > 0 + # All pubspec hashes are SHA256 + assert all(h.algorithm == HashAlgorithm.SHA256 for h in hashes) + + +class TestHashEnricher: + """Tests for HashEnricher class.""" + + @pytest.fixture + def sample_cyclonedx_sbom(self): + """Create a sample CycloneDX SBOM.""" + return { + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "version": 1, + "components": [ + { + "type": "library", + "name": "django", + "version": "5.1.1", + "purl": "pkg:pypi/django@5.1.1", + }, + { + "type": "library", + "name": "requests", + "version": "2.31.0", + "purl": "pkg:pypi/requests@2.31.0", + }, + ], + } + + @pytest.fixture + def sample_spdx_sbom(self): + """Create a sample SPDX SBOM.""" + return { + "spdxVersion": "SPDX-2.3", + "SPDXID": "SPDXRef-DOCUMENT", + "name": "test-sbom", + "packages": [ + { + "SPDXID": "SPDXRef-Package-django", + "name": "django", + "versionInfo": "5.1.1", + }, + { + "SPDXID": "SPDXRef-Package-requests", + "name": "requests", + "versionInfo": "2.31.0", + }, + ], + } + + @pytest.fixture + def sample_uv_lock(self, tmp_path): + """Create a sample uv.lock file.""" + content = ''' +version = 1 + +[[package]] +name = "django" +version = "5.1.1" +sdist = { hash = "sha256:abc123" } + +[[package]] +name = "requests" +version = "2.31.0" +sdist = { hash = "sha256:def456" } +''' + lock_file = tmp_path / "uv.lock" + lock_file.write_text(content) + return lock_file + + def test_enrich_cyclonedx_adds_hashes( + self, sample_cyclonedx_sbom, sample_uv_lock, tmp_path + ): + """Test that CycloneDX components get hashes added.""" + # Write SBOM to file + sbom_file = tmp_path / "sbom.json" + sbom_file.write_text(json.dumps(sample_cyclonedx_sbom)) + + # Enrich + stats = enrich_sbom_with_hashes( + sbom_file=str(sbom_file), + lock_file=str(sample_uv_lock), + ) + + assert stats["hashes_added"] == 2 + assert stats["components_matched"] == 2 + + # Verify hashes in output + enriched = json.loads(sbom_file.read_text()) + django_comp = next( + c for c in enriched["components"] if c["name"] == "django" + ) + assert "hashes" in django_comp + assert len(django_comp["hashes"]) == 1 + assert django_comp["hashes"][0]["alg"] == "SHA-256" + assert django_comp["hashes"][0]["content"] == "abc123" + + def test_enrich_spdx_adds_checksums( + self, sample_spdx_sbom, sample_uv_lock, tmp_path + ): + """Test that SPDX packages get checksums added.""" + # Write SBOM to file + sbom_file = tmp_path / "sbom.json" + sbom_file.write_text(json.dumps(sample_spdx_sbom)) + + # Enrich + stats = enrich_sbom_with_hashes( + sbom_file=str(sbom_file), + lock_file=str(sample_uv_lock), + ) + + assert stats["hashes_added"] == 2 + assert stats["components_matched"] == 2 + + # Verify checksums in output + enriched = json.loads(sbom_file.read_text()) + django_pkg = next( + p for p in enriched["packages"] if p["name"] == "django" + ) + assert "checksums" in django_pkg + assert len(django_pkg["checksums"]) == 1 + assert django_pkg["checksums"][0]["algorithm"] == "SHA256" + assert django_pkg["checksums"][0]["checksumValue"] == "abc123" + + def test_enrich_skips_existing_hashes(self, tmp_path): + """Test that existing hashes are not overwritten by default.""" + # SBOM with existing hash + sbom = { + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "version": 1, + "components": [ + { + "type": "library", + "name": "django", + "version": "5.1.1", + "hashes": [{"alg": "SHA-256", "content": "existing"}], + }, + ], + } + sbom_file = tmp_path / "sbom.json" + sbom_file.write_text(json.dumps(sbom)) + + # Lock file with different hash + lock_content = ''' +version = 1 + +[[package]] +name = "django" +version = "5.1.1" +sdist = { hash = "sha256:newvalue" } +''' + lock_file = tmp_path / "uv.lock" + lock_file.write_text(lock_content) + + # Enrich without overwrite + stats = enrich_sbom_with_hashes( + sbom_file=str(sbom_file), + lock_file=str(lock_file), + overwrite_existing=False, + ) + + assert stats["hashes_added"] == 0 + assert stats["hashes_skipped"] == 1 + + # Verify original hash preserved + enriched = json.loads(sbom_file.read_text()) + django_comp = enriched["components"][0] + assert django_comp["hashes"][0]["content"] == "existing" + + def test_enrich_with_overwrite(self, tmp_path): + """Test that overwrite_existing=True replaces hashes.""" + # SBOM with existing hash + sbom = { + "bomFormat": "CycloneDX", + "specVersion": "1.6", + "version": 1, + "components": [ + { + "type": "library", + "name": "django", + "version": "5.1.1", + "hashes": [{"alg": "SHA-256", "content": "existing"}], + }, + ], + } + sbom_file = tmp_path / "sbom.json" + sbom_file.write_text(json.dumps(sbom)) + + # Lock file with different hash + lock_content = ''' +version = 1 + +[[package]] +name = "django" +version = "5.1.1" +sdist = { hash = "sha256:newvalue" } +''' + lock_file = tmp_path / "uv.lock" + lock_file.write_text(lock_content) + + # Enrich with overwrite + stats = enrich_sbom_with_hashes( + sbom_file=str(sbom_file), + lock_file=str(lock_file), + overwrite_existing=True, + ) + + assert stats["hashes_added"] == 1 + + # Verify new hash replaced old + enriched = json.loads(sbom_file.read_text()) + django_comp = enriched["components"][0] + assert len(django_comp["hashes"]) == 1 + assert django_comp["hashes"][0]["content"] == "newvalue" + + +class TestParserRegistry: + """Tests for ParserRegistry.""" + + def test_default_registry_has_parsers(self): + """Test that default registry has all expected parsers.""" + registry = create_default_registry() + + # Check some expected parsers + assert registry.get_parser_for("uv.lock") is not None + assert registry.get_parser_for("Pipfile.lock") is not None + assert registry.get_parser_for("poetry.lock") is not None + assert registry.get_parser_for("Cargo.lock") is not None + assert registry.get_parser_for("pubspec.lock") is not None + assert registry.get_parser_for("package-lock.json") is not None + assert registry.get_parser_for("yarn.lock") is not None + assert registry.get_parser_for("pnpm-lock.yaml") is not None + + def test_registry_returns_none_for_unknown(self): + """Test that registry returns None for unknown lockfiles.""" + registry = create_default_registry() + assert registry.get_parser_for("unknown.lock") is None + assert registry.get_parser_for("requirements.txt") is None From af82b19d179fa5e279c1ecf348a0659131f24c04 Mon Sep 17 00:00:00 2001 From: Viktor Petersson Date: Sat, 31 Jan 2026 12:14:32 +0100 Subject: [PATCH 7/8] Fix linting: formatting and unused imports Co-Authored-By: Claude Opus 4.5 --- .../_hash_enrichment/parsers/package_lock.py | 4 +- tests/test_hash_enrichment.py | 42 +++++++------------ 2 files changed, 15 insertions(+), 31 deletions(-) diff --git a/sbomify_action/_hash_enrichment/parsers/package_lock.py b/sbomify_action/_hash_enrichment/parsers/package_lock.py index 35a7279..79ba303 100644 --- a/sbomify_action/_hash_enrichment/parsers/package_lock.py +++ b/sbomify_action/_hash_enrichment/parsers/package_lock.py @@ -104,9 +104,7 @@ def _extract_package_name(self, pkg_path: str) -> str | None: return name_part if name_part else None - def _parse_dependencies( - self, dependencies: dict, seen: set[tuple[str, str]] | None = None - ) -> list[PackageHash]: + def _parse_dependencies(self, dependencies: dict, seen: set[tuple[str, str]] | None = None) -> list[PackageHash]: """Parse v1 format dependencies recursively. Deduplicates by (name, version) to avoid returning multiple hashes diff --git a/tests/test_hash_enrichment.py b/tests/test_hash_enrichment.py index 26d8901..cc51fda 100644 --- a/tests/test_hash_enrichment.py +++ b/tests/test_hash_enrichment.py @@ -1,14 +1,12 @@ """Tests for the hash enrichment subsystem.""" import json -import tempfile from pathlib import Path import pytest from sbomify_action._hash_enrichment import ( HashAlgorithm, - HashEnricher, PackageHash, create_default_registry, enrich_sbom_with_hashes, @@ -59,9 +57,7 @@ class TestPackageHash: def test_from_prefixed_sha256(self): """Test parsing sha256:... format.""" - pkg_hash = PackageHash.from_prefixed( - "django", "5.1.1", "sha256:abc123def456" - ) + pkg_hash = PackageHash.from_prefixed("django", "5.1.1", "sha256:abc123def456") assert pkg_hash is not None assert pkg_hash.name == "django" assert pkg_hash.version == "5.1.1" @@ -70,9 +66,7 @@ def test_from_prefixed_sha256(self): def test_from_prefixed_sha512(self): """Test parsing sha512:... format.""" - pkg_hash = PackageHash.from_prefixed( - "requests", "2.31.0", "sha512:fedcba987654" - ) + pkg_hash = PackageHash.from_prefixed("requests", "2.31.0", "sha512:fedcba987654") assert pkg_hash is not None assert pkg_hash.algorithm == HashAlgorithm.SHA512 assert pkg_hash.value == "fedcba987654" @@ -134,7 +128,7 @@ class TestUvLockParser: @pytest.fixture def uv_lock_content(self): - return ''' + return """ version = 1 [[package]] @@ -150,7 +144,7 @@ def uv_lock_content(self): name = "requests" version = "2.31.0" sdist = { url = "...", hash = "sha256:reqhash", size = 100 } -''' +""" def test_parse_uv_lock(self, uv_lock_content, tmp_path): """Test parsing uv.lock file.""" @@ -302,7 +296,7 @@ def sample_spdx_sbom(self): @pytest.fixture def sample_uv_lock(self, tmp_path): """Create a sample uv.lock file.""" - content = ''' + content = """ version = 1 [[package]] @@ -314,14 +308,12 @@ def sample_uv_lock(self, tmp_path): name = "requests" version = "2.31.0" sdist = { hash = "sha256:def456" } -''' +""" lock_file = tmp_path / "uv.lock" lock_file.write_text(content) return lock_file - def test_enrich_cyclonedx_adds_hashes( - self, sample_cyclonedx_sbom, sample_uv_lock, tmp_path - ): + def test_enrich_cyclonedx_adds_hashes(self, sample_cyclonedx_sbom, sample_uv_lock, tmp_path): """Test that CycloneDX components get hashes added.""" # Write SBOM to file sbom_file = tmp_path / "sbom.json" @@ -338,17 +330,13 @@ def test_enrich_cyclonedx_adds_hashes( # Verify hashes in output enriched = json.loads(sbom_file.read_text()) - django_comp = next( - c for c in enriched["components"] if c["name"] == "django" - ) + django_comp = next(c for c in enriched["components"] if c["name"] == "django") assert "hashes" in django_comp assert len(django_comp["hashes"]) == 1 assert django_comp["hashes"][0]["alg"] == "SHA-256" assert django_comp["hashes"][0]["content"] == "abc123" - def test_enrich_spdx_adds_checksums( - self, sample_spdx_sbom, sample_uv_lock, tmp_path - ): + def test_enrich_spdx_adds_checksums(self, sample_spdx_sbom, sample_uv_lock, tmp_path): """Test that SPDX packages get checksums added.""" # Write SBOM to file sbom_file = tmp_path / "sbom.json" @@ -365,9 +353,7 @@ def test_enrich_spdx_adds_checksums( # Verify checksums in output enriched = json.loads(sbom_file.read_text()) - django_pkg = next( - p for p in enriched["packages"] if p["name"] == "django" - ) + django_pkg = next(p for p in enriched["packages"] if p["name"] == "django") assert "checksums" in django_pkg assert len(django_pkg["checksums"]) == 1 assert django_pkg["checksums"][0]["algorithm"] == "SHA256" @@ -393,14 +379,14 @@ def test_enrich_skips_existing_hashes(self, tmp_path): sbom_file.write_text(json.dumps(sbom)) # Lock file with different hash - lock_content = ''' + lock_content = """ version = 1 [[package]] name = "django" version = "5.1.1" sdist = { hash = "sha256:newvalue" } -''' +""" lock_file = tmp_path / "uv.lock" lock_file.write_text(lock_content) @@ -439,14 +425,14 @@ def test_enrich_with_overwrite(self, tmp_path): sbom_file.write_text(json.dumps(sbom)) # Lock file with different hash - lock_content = ''' + lock_content = """ version = 1 [[package]] name = "django" version = "5.1.1" sdist = { hash = "sha256:newvalue" } -''' +""" lock_file = tmp_path / "uv.lock" lock_file.write_text(lock_content) From f14e53e947d209ab89c308fb4d501e32e4c9e428 Mon Sep 17 00:00:00 2001 From: Viktor Petersson Date: Sat, 31 Jan 2026 12:23:42 +0100 Subject: [PATCH 8/8] Require Python 3.11+ for tomllib module support The hash enrichment parsers use tomllib which is only available in Python 3.11+. Updated requirements and CI workflow accordingly. Co-Authored-By: Claude Opus 4.5 --- .github/workflows/sbomify.yaml | 2 +- pyproject.toml | 2 +- uv.lock | 137 +-------------------------------- 3 files changed, 3 insertions(+), 138 deletions(-) diff --git a/.github/workflows/sbomify.yaml b/.github/workflows/sbomify.yaml index 026ab77..a98a7e8 100644 --- a/.github/workflows/sbomify.yaml +++ b/.github/workflows/sbomify.yaml @@ -53,7 +53,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.11' - name: Install UV run: | diff --git a/pyproject.toml b/pyproject.toml index 3babf0c..f98c0ac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "sbomify-action" version = "0.12" description = "Generate, augment, enrich, and manage SBOMs in your CI/CD pipeline" authors = [{ name = "sbomify", email = "hello@sbomify.com" }] -requires-python = ">=3.10" +requires-python = ">=3.11" readme = "README.md" license = "Apache-2.0" keywords = ["sbom", "cyclonedx", "spdx", "supply-chain", "security", "bom", "software-composition-analysis"] diff --git a/uv.lock b/uv.lock index 48d2673..7101e37 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,6 @@ version = 1 revision = 3 -requires-python = ">=3.10" +requires-python = ">=3.11" [[package]] name = "arrow" @@ -69,14 +69,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" }, - { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" }, - { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" }, - { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" }, - { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" }, - { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" }, - { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" }, - { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" }, { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, @@ -137,22 +129,6 @@ version = "3.4.4" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, - { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, - { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, - { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, - { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, - { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, - { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, - { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, - { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, - { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, - { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, - { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, - { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, - { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, @@ -264,18 +240,6 @@ version = "7.13.1" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/9a/3742e58fd04b233df95c012ee9f3dfe04708a5e1d32613bd2d47d4e1be0d/coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147", size = 218633, upload-time = "2025-12-28T15:40:10.165Z" }, - { url = "https://files.pythonhosted.org/packages/7e/45/7e6bdc94d89cd7c8017ce735cf50478ddfe765d4fbf0c24d71d30ea33d7a/coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d", size = 219147, upload-time = "2025-12-28T15:40:12.069Z" }, - { url = "https://files.pythonhosted.org/packages/f7/38/0d6a258625fd7f10773fe94097dc16937a5f0e3e0cdf3adef67d3ac6baef/coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0", size = 245894, upload-time = "2025-12-28T15:40:13.556Z" }, - { url = "https://files.pythonhosted.org/packages/27/58/409d15ea487986994cbd4d06376e9860e9b157cfbfd402b1236770ab8dd2/coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90", size = 247721, upload-time = "2025-12-28T15:40:15.37Z" }, - { url = "https://files.pythonhosted.org/packages/da/bf/6e8056a83fd7a96c93341f1ffe10df636dd89f26d5e7b9ca511ce3bcf0df/coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d", size = 249585, upload-time = "2025-12-28T15:40:17.226Z" }, - { url = "https://files.pythonhosted.org/packages/f4/15/e1daff723f9f5959acb63cbe35b11203a9df77ee4b95b45fffd38b318390/coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b", size = 246597, upload-time = "2025-12-28T15:40:19.028Z" }, - { url = "https://files.pythonhosted.org/packages/74/a6/1efd31c5433743a6ddbc9d37ac30c196bb07c7eab3d74fbb99b924c93174/coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6", size = 247626, upload-time = "2025-12-28T15:40:20.846Z" }, - { url = "https://files.pythonhosted.org/packages/6d/9f/1609267dd3e749f57fdd66ca6752567d1c13b58a20a809dc409b263d0b5f/coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e", size = 245629, upload-time = "2025-12-28T15:40:22.397Z" }, - { url = "https://files.pythonhosted.org/packages/e2/f6/6815a220d5ec2466383d7cc36131b9fa6ecbe95c50ec52a631ba733f306a/coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae", size = 245901, upload-time = "2025-12-28T15:40:23.836Z" }, - { url = "https://files.pythonhosted.org/packages/ac/58/40576554cd12e0872faf6d2c0eb3bc85f71d78427946ddd19ad65201e2c0/coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29", size = 246505, upload-time = "2025-12-28T15:40:25.421Z" }, - { url = "https://files.pythonhosted.org/packages/3b/77/9233a90253fba576b0eee81707b5781d0e21d97478e5377b226c5b096c0f/coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f", size = 221257, upload-time = "2025-12-28T15:40:27.217Z" }, - { url = "https://files.pythonhosted.org/packages/e0/43/e842ff30c1a0a623ec80db89befb84a3a7aad7bfe44a6ea77d5a3e61fedd/coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1", size = 222191, upload-time = "2025-12-28T15:40:28.916Z" }, { url = "https://files.pythonhosted.org/packages/b4/9b/77baf488516e9ced25fc215a6f75d803493fc3f6a1a1227ac35697910c2a/coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88", size = 218755, upload-time = "2025-12-28T15:40:30.812Z" }, { url = "https://files.pythonhosted.org/packages/d7/cd/7ab01154e6eb79ee2fab76bf4d89e94c6648116557307ee4ebbb85e5c1bf/coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3", size = 219257, upload-time = "2025-12-28T15:40:32.333Z" }, { url = "https://files.pythonhosted.org/packages/01/d5/b11ef7863ffbbdb509da0023fad1e9eda1c0eaea61a6d2ea5b17d4ac706e/coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9", size = 249657, upload-time = "2025-12-28T15:40:34.1Z" }, @@ -368,7 +332,6 @@ version = "46.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } wheels = [ @@ -421,7 +384,6 @@ dependencies = [ { name = "packageurl-python" }, { name = "packaging" }, { name = "pip-requirements-parser" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/18/b4/d6a3eee8622389893480758ada629842b8667e326ec8da311dbc7f5087f4/cyclonedx_bom-7.2.1.tar.gz", hash = "sha256:ead9923a23c71426bcc83ea371c87945b85f76c31728625dde35ecfe0fa2e712", size = 4416994, upload-time = "2025-10-29T15:31:47.238Z" } wheels = [ @@ -487,18 +449,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/02/10/5da547df7a391dcde17f59520a231527b8571e6f46fc8efb02ccb370ab12/docutils-0.22.4-py3-none-any.whl", hash = "sha256:d0013f540772d1420576855455d050a2180186c91c15779301ac2ccb3eeb68de", size = 633196, upload-time = "2025-12-18T19:00:18.077Z" }, ] -[[package]] -name = "exceptiongroup" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, -] - [[package]] name = "fasteners" version = "0.20" @@ -577,15 +527,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] -[[package]] -name = "isodate" -version = "0.7.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/4d/e940025e2ce31a8ce1202635910747e5a87cc3a6a6bb2d00973375014749/isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6", size = 29705, upload-time = "2024-10-08T23:04:11.5Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/aa/0aca39a37d3c7eb941ba736ede56d689e7be91cab5d9ca846bde3999eba6/isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15", size = 22320, upload-time = "2024-10-08T23:04:09.501Z" }, -] - [[package]] name = "isoduration" version = "20.11.0" @@ -749,22 +690,6 @@ version = "6.0.2" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426, upload-time = "2025-09-22T04:04:59.287Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/db/8a/f8192a08237ef2fb1b19733f709db88a4c43bc8ab8357f01cb41a27e7f6a/lxml-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e77dd455b9a16bbd2a5036a63ddbd479c19572af81b624e79ef422f929eef388", size = 8590589, upload-time = "2025-09-22T04:00:10.51Z" }, - { url = "https://files.pythonhosted.org/packages/12/64/27bcd07ae17ff5e5536e8d88f4c7d581b48963817a13de11f3ac3329bfa2/lxml-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d444858b9f07cefff6455b983aea9a67f7462ba1f6cbe4a21e8bf6791bf2153", size = 4629671, upload-time = "2025-09-22T04:00:15.411Z" }, - { url = "https://files.pythonhosted.org/packages/02/5a/a7d53b3291c324e0b6e48f3c797be63836cc52156ddf8f33cd72aac78866/lxml-6.0.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f952dacaa552f3bb8834908dddd500ba7d508e6ea6eb8c52eb2d28f48ca06a31", size = 4999961, upload-time = "2025-09-22T04:00:17.619Z" }, - { url = "https://files.pythonhosted.org/packages/f5/55/d465e9b89df1761674d8672bb3e4ae2c47033b01ec243964b6e334c6743f/lxml-6.0.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:71695772df6acea9f3c0e59e44ba8ac50c4f125217e84aab21074a1a55e7e5c9", size = 5157087, upload-time = "2025-09-22T04:00:19.868Z" }, - { url = "https://files.pythonhosted.org/packages/62/38/3073cd7e3e8dfc3ba3c3a139e33bee3a82de2bfb0925714351ad3d255c13/lxml-6.0.2-cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:17f68764f35fd78d7c4cc4ef209a184c38b65440378013d24b8aecd327c3e0c8", size = 5067620, upload-time = "2025-09-22T04:00:21.877Z" }, - { url = "https://files.pythonhosted.org/packages/4a/d3/1e001588c5e2205637b08985597827d3827dbaaece16348c8822bfe61c29/lxml-6.0.2-cp310-cp310-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:058027e261afed589eddcfe530fcc6f3402d7fd7e89bfd0532df82ebc1563dba", size = 5406664, upload-time = "2025-09-22T04:00:23.714Z" }, - { url = "https://files.pythonhosted.org/packages/20/cf/cab09478699b003857ed6ebfe95e9fb9fa3d3c25f1353b905c9b73cfb624/lxml-6.0.2-cp310-cp310-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8ffaeec5dfea5881d4c9d8913a32d10cfe3923495386106e4a24d45300ef79c", size = 5289397, upload-time = "2025-09-22T04:00:25.544Z" }, - { url = "https://files.pythonhosted.org/packages/a3/84/02a2d0c38ac9a8b9f9e5e1bbd3f24b3f426044ad618b552e9549ee91bd63/lxml-6.0.2-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:f2e3b1a6bb38de0bc713edd4d612969dd250ca8b724be8d460001a387507021c", size = 4772178, upload-time = "2025-09-22T04:00:27.602Z" }, - { url = "https://files.pythonhosted.org/packages/56/87/e1ceadcc031ec4aa605fe95476892d0b0ba3b7f8c7dcdf88fdeff59a9c86/lxml-6.0.2-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d6690ec5ec1cce0385cb20896b16be35247ac8c2046e493d03232f1c2414d321", size = 5358148, upload-time = "2025-09-22T04:00:29.323Z" }, - { url = "https://files.pythonhosted.org/packages/fe/13/5bb6cf42bb228353fd4ac5f162c6a84fd68a4d6f67c1031c8cf97e131fc6/lxml-6.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2a50c3c1d11cad0ebebbac357a97b26aa79d2bcaf46f256551152aa85d3a4d1", size = 5112035, upload-time = "2025-09-22T04:00:31.061Z" }, - { url = "https://files.pythonhosted.org/packages/e4/e2/ea0498552102e59834e297c5c6dff8d8ded3db72ed5e8aad77871476f073/lxml-6.0.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3efe1b21c7801ffa29a1112fab3b0f643628c30472d507f39544fd48e9549e34", size = 4799111, upload-time = "2025-09-22T04:00:33.11Z" }, - { url = "https://files.pythonhosted.org/packages/6a/9e/8de42b52a73abb8af86c66c969b3b4c2a96567b6ac74637c037d2e3baa60/lxml-6.0.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:59c45e125140b2c4b33920d21d83681940ca29f0b83f8629ea1a2196dc8cfe6a", size = 5351662, upload-time = "2025-09-22T04:00:35.237Z" }, - { url = "https://files.pythonhosted.org/packages/28/a2/de776a573dfb15114509a37351937c367530865edb10a90189d0b4b9b70a/lxml-6.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:452b899faa64f1805943ec1c0c9ebeaece01a1af83e130b69cdefeda180bb42c", size = 5314973, upload-time = "2025-09-22T04:00:37.086Z" }, - { url = "https://files.pythonhosted.org/packages/50/a0/3ae1b1f8964c271b5eec91db2043cf8c6c0bce101ebb2a633b51b044db6c/lxml-6.0.2-cp310-cp310-win32.whl", hash = "sha256:1e786a464c191ca43b133906c6903a7e4d56bef376b75d97ccbb8ec5cf1f0a4b", size = 3611953, upload-time = "2025-09-22T04:00:39.224Z" }, - { url = "https://files.pythonhosted.org/packages/d1/70/bd42491f0634aad41bdfc1e46f5cff98825fb6185688dc82baa35d509f1a/lxml-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:dacf3c64ef3f7440e3167aa4b49aa9e0fb99e0aa4f9ff03795640bf94531bcb0", size = 4032695, upload-time = "2025-09-22T04:00:41.402Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d0/05c6a72299f54c2c561a6c6cbb2f512e047fca20ea97a05e57931f194ac4/lxml-6.0.2-cp310-cp310-win_arm64.whl", hash = "sha256:45f93e6f75123f88d7f0cfd90f2d05f441b808562bf0bc01070a00f53f5028b5", size = 3680051, upload-time = "2025-09-22T04:00:43.525Z" }, { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365, upload-time = "2025-09-22T04:00:45.672Z" }, { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793, upload-time = "2025-09-22T04:00:47.783Z" }, { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362, upload-time = "2025-09-22T04:00:49.845Z" }, @@ -853,12 +778,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0f/85/86766dfebfa87bea0ab78e9ff7a4b4b45225df4b4d3b8cc3c03c5cd68464/lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312", size = 3911420, upload-time = "2025-09-22T04:03:32.198Z" }, { url = "https://files.pythonhosted.org/packages/fe/1a/b248b355834c8e32614650b8008c69ffeb0ceb149c793961dd8c0b991bb3/lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca", size = 4406837, upload-time = "2025-09-22T04:03:34.027Z" }, { url = "https://files.pythonhosted.org/packages/92/aa/df863bcc39c5e0946263454aba394de8a9084dbaff8ad143846b0d844739/lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c", size = 3822205, upload-time = "2025-09-22T04:03:36.249Z" }, - { url = "https://files.pythonhosted.org/packages/e7/9c/780c9a8fce3f04690b374f72f41306866b0400b9d0fdf3e17aaa37887eed/lxml-6.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e748d4cf8fef2526bb2a589a417eba0c8674e29ffcb570ce2ceca44f1e567bf6", size = 3939264, upload-time = "2025-09-22T04:04:32.892Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5a/1ab260c00adf645d8bf7dec7f920f744b032f69130c681302821d5debea6/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4ddb1049fa0579d0cbd00503ad8c58b9ab34d1254c77bc6a5576d96ec7853dba", size = 4216435, upload-time = "2025-09-22T04:04:34.907Z" }, - { url = "https://files.pythonhosted.org/packages/f2/37/565f3b3d7ffede22874b6d86be1a1763d00f4ea9fc5b9b6ccb11e4ec8612/lxml-6.0.2-pp310-pypy310_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cb233f9c95f83707dae461b12b720c1af9c28c2d19208e1be03387222151daf5", size = 4325913, upload-time = "2025-09-22T04:04:37.205Z" }, - { url = "https://files.pythonhosted.org/packages/22/ec/f3a1b169b2fb9d03467e2e3c0c752ea30e993be440a068b125fc7dd248b0/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc456d04db0515ce3320d714a1eac7a97774ff0849e7718b492d957da4631dd4", size = 4269357, upload-time = "2025-09-22T04:04:39.322Z" }, - { url = "https://files.pythonhosted.org/packages/77/a2/585a28fe3e67daa1cf2f06f34490d556d121c25d500b10082a7db96e3bcd/lxml-6.0.2-pp310-pypy310_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2613e67de13d619fd283d58bda40bff0ee07739f624ffee8b13b631abf33083d", size = 4412295, upload-time = "2025-09-22T04:04:41.647Z" }, - { url = "https://files.pythonhosted.org/packages/7b/d9/a57dd8bcebd7c69386c20263830d4fa72d27e6b72a229ef7a48e88952d9a/lxml-6.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:24a8e756c982c001ca8d59e87c80c4d9dcd4d9b44a4cbeb8d9be4482c514d41d", size = 3516913, upload-time = "2025-09-22T04:04:43.602Z" }, { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829, upload-time = "2025-09-22T04:04:45.608Z" }, { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277, upload-time = "2025-09-22T04:04:47.754Z" }, { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433, upload-time = "2025-09-22T04:04:49.907Z" }, @@ -885,17 +804,6 @@ version = "3.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, - { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, - { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, - { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, - { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, - { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, - { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, - { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, @@ -1161,12 +1069,10 @@ version = "8.4.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "iniconfig" }, { name = "packaging" }, { name = "pluggy" }, { name = "pygments" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } wheels = [ @@ -1225,15 +1131,6 @@ version = "6.0.3" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, - { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, - { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, - { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, - { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, - { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, - { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, - { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, @@ -1300,7 +1197,6 @@ name = "rdflib" version = "7.5.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "isodate", marker = "python_full_version < '3.11'" }, { name = "pyparsing" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ec/1b/4cd9a29841951371304828d13282e27a5f25993702c7c87dcb7e0604bd25/rdflib-7.5.0.tar.gz", hash = "sha256:663083443908b1830e567350d72e74d9948b310f827966358d76eebdc92bf592", size = 4903859, upload-time = "2025-11-28T05:51:54.562Z" } @@ -1424,20 +1320,6 @@ version = "0.30.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/0c/0c411a0ec64ccb6d104dcabe0e713e05e153a9a2c3c2bd2b32ce412166fe/rpds_py-0.30.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:679ae98e00c0e8d68a7fda324e16b90fd5260945b45d3b824c892cec9eea3288", size = 370490, upload-time = "2025-11-30T20:21:33.256Z" }, - { url = "https://files.pythonhosted.org/packages/19/6a/4ba3d0fb7297ebae71171822554abe48d7cab29c28b8f9f2c04b79988c05/rpds_py-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4cc2206b76b4f576934f0ed374b10d7ca5f457858b157ca52064bdfc26b9fc00", size = 359751, upload-time = "2025-11-30T20:21:34.591Z" }, - { url = "https://files.pythonhosted.org/packages/cd/7c/e4933565ef7f7a0818985d87c15d9d273f1a649afa6a52ea35ad011195ea/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389a2d49eded1896c3d48b0136ead37c48e221b391c052fba3f4055c367f60a6", size = 389696, upload-time = "2025-11-30T20:21:36.122Z" }, - { url = "https://files.pythonhosted.org/packages/5e/01/6271a2511ad0815f00f7ed4390cf2567bec1d4b1da39e2c27a41e6e3b4de/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:32c8528634e1bf7121f3de08fa85b138f4e0dc47657866630611b03967f041d7", size = 403136, upload-time = "2025-11-30T20:21:37.728Z" }, - { url = "https://files.pythonhosted.org/packages/55/64/c857eb7cd7541e9b4eee9d49c196e833128a55b89a9850a9c9ac33ccf897/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f207f69853edd6f6700b86efb84999651baf3789e78a466431df1331608e5324", size = 524699, upload-time = "2025-11-30T20:21:38.92Z" }, - { url = "https://files.pythonhosted.org/packages/9c/ed/94816543404078af9ab26159c44f9e98e20fe47e2126d5d32c9d9948d10a/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:67b02ec25ba7a9e8fa74c63b6ca44cf5707f2fbfadae3ee8e7494297d56aa9df", size = 412022, upload-time = "2025-11-30T20:21:40.407Z" }, - { url = "https://files.pythonhosted.org/packages/61/b5/707f6cf0066a6412aacc11d17920ea2e19e5b2f04081c64526eb35b5c6e7/rpds_py-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0e95f6819a19965ff420f65578bacb0b00f251fefe2c8b23347c37174271f3", size = 390522, upload-time = "2025-11-30T20:21:42.17Z" }, - { url = "https://files.pythonhosted.org/packages/13/4e/57a85fda37a229ff4226f8cbcf09f2a455d1ed20e802ce5b2b4a7f5ed053/rpds_py-0.30.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:a452763cc5198f2f98898eb98f7569649fe5da666c2dc6b5ddb10fde5a574221", size = 404579, upload-time = "2025-11-30T20:21:43.769Z" }, - { url = "https://files.pythonhosted.org/packages/f9/da/c9339293513ec680a721e0e16bf2bac3db6e5d7e922488de471308349bba/rpds_py-0.30.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e0b65193a413ccc930671c55153a03ee57cecb49e6227204b04fae512eb657a7", size = 421305, upload-time = "2025-11-30T20:21:44.994Z" }, - { url = "https://files.pythonhosted.org/packages/f9/be/522cb84751114f4ad9d822ff5a1aa3c98006341895d5f084779b99596e5c/rpds_py-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:858738e9c32147f78b3ac24dc0edb6610000e56dc0f700fd5f651d0a0f0eb9ff", size = 572503, upload-time = "2025-11-30T20:21:46.91Z" }, - { url = "https://files.pythonhosted.org/packages/a2/9b/de879f7e7ceddc973ea6e4629e9b380213a6938a249e94b0cdbcc325bb66/rpds_py-0.30.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:da279aa314f00acbb803da1e76fa18666778e8a8f83484fba94526da5de2cba7", size = 598322, upload-time = "2025-11-30T20:21:48.709Z" }, - { url = "https://files.pythonhosted.org/packages/48/ac/f01fc22efec3f37d8a914fc1b2fb9bcafd56a299edbe96406f3053edea5a/rpds_py-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7c64d38fb49b6cdeda16ab49e35fe0da2e1e9b34bc38bd78386530f218b37139", size = 560792, upload-time = "2025-11-30T20:21:50.024Z" }, - { url = "https://files.pythonhosted.org/packages/e2/da/4e2b19d0f131f35b6146425f846563d0ce036763e38913d917187307a671/rpds_py-0.30.0-cp310-cp310-win32.whl", hash = "sha256:6de2a32a1665b93233cde140ff8b3467bdb9e2af2b91079f0333a0974d12d464", size = 221901, upload-time = "2025-11-30T20:21:51.32Z" }, - { url = "https://files.pythonhosted.org/packages/96/cb/156d7a5cf4f78a7cc571465d8aec7a3c447c94f6749c5123f08438bcf7bc/rpds_py-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:1726859cd0de969f88dc8673bdd954185b9104e05806be64bcd87badbe313169", size = 235823, upload-time = "2025-11-30T20:21:52.505Z" }, { url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" }, { url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" }, { url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" }, @@ -1819,7 +1701,6 @@ dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } wheels = [ @@ -1868,22 +1749,6 @@ version = "0.25.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/7a/28efd1d371f1acd037ac64ed1c5e2b41514a6cc937dd6ab6a13ab9f0702f/zstandard-0.25.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e59fdc271772f6686e01e1b3b74537259800f57e24280be3f29c8a0deb1904dd", size = 795256, upload-time = "2025-09-14T22:15:56.415Z" }, - { url = "https://files.pythonhosted.org/packages/96/34/ef34ef77f1ee38fc8e4f9775217a613b452916e633c4f1d98f31db52c4a5/zstandard-0.25.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4d441506e9b372386a5271c64125f72d5df6d2a8e8a2a45a0ae09b03cb781ef7", size = 640565, upload-time = "2025-09-14T22:15:58.177Z" }, - { url = "https://files.pythonhosted.org/packages/9d/1b/4fdb2c12eb58f31f28c4d28e8dc36611dd7205df8452e63f52fb6261d13e/zstandard-0.25.0-cp310-cp310-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:ab85470ab54c2cb96e176f40342d9ed41e58ca5733be6a893b730e7af9c40550", size = 5345306, upload-time = "2025-09-14T22:16:00.165Z" }, - { url = "https://files.pythonhosted.org/packages/73/28/a44bdece01bca027b079f0e00be3b6bd89a4df180071da59a3dd7381665b/zstandard-0.25.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e05ab82ea7753354bb054b92e2f288afb750e6b439ff6ca78af52939ebbc476d", size = 5055561, upload-time = "2025-09-14T22:16:02.22Z" }, - { url = "https://files.pythonhosted.org/packages/e9/74/68341185a4f32b274e0fc3410d5ad0750497e1acc20bd0f5b5f64ce17785/zstandard-0.25.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:78228d8a6a1c177a96b94f7e2e8d012c55f9c760761980da16ae7546a15a8e9b", size = 5402214, upload-time = "2025-09-14T22:16:04.109Z" }, - { url = "https://files.pythonhosted.org/packages/8b/67/f92e64e748fd6aaffe01e2b75a083c0c4fd27abe1c8747fee4555fcee7dd/zstandard-0.25.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:2b6bd67528ee8b5c5f10255735abc21aa106931f0dbaf297c7be0c886353c3d0", size = 5449703, upload-time = "2025-09-14T22:16:06.312Z" }, - { url = "https://files.pythonhosted.org/packages/fd/e5/6d36f92a197c3c17729a2125e29c169f460538a7d939a27eaaa6dcfcba8e/zstandard-0.25.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4b6d83057e713ff235a12e73916b6d356e3084fd3d14ced499d84240f3eecee0", size = 5556583, upload-time = "2025-09-14T22:16:08.457Z" }, - { url = "https://files.pythonhosted.org/packages/d7/83/41939e60d8d7ebfe2b747be022d0806953799140a702b90ffe214d557638/zstandard-0.25.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9174f4ed06f790a6869b41cba05b43eeb9a35f8993c4422ab853b705e8112bbd", size = 5045332, upload-time = "2025-09-14T22:16:10.444Z" }, - { url = "https://files.pythonhosted.org/packages/b3/87/d3ee185e3d1aa0133399893697ae91f221fda79deb61adbe998a7235c43f/zstandard-0.25.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:25f8f3cd45087d089aef5ba3848cd9efe3ad41163d3400862fb42f81a3a46701", size = 5572283, upload-time = "2025-09-14T22:16:12.128Z" }, - { url = "https://files.pythonhosted.org/packages/0a/1d/58635ae6104df96671076ac7d4ae7816838ce7debd94aecf83e30b7121b0/zstandard-0.25.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3756b3e9da9b83da1796f8809dd57cb024f838b9eeafde28f3cb472012797ac1", size = 4959754, upload-time = "2025-09-14T22:16:14.225Z" }, - { url = "https://files.pythonhosted.org/packages/75/d6/57e9cb0a9983e9a229dd8fd2e6e96593ef2aa82a3907188436f22b111ccd/zstandard-0.25.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:81dad8d145d8fd981b2962b686b2241d3a1ea07733e76a2f15435dfb7fb60150", size = 5266477, upload-time = "2025-09-14T22:16:16.343Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a9/ee891e5edf33a6ebce0a028726f0bbd8567effe20fe3d5808c42323e8542/zstandard-0.25.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a5a419712cf88862a45a23def0ae063686db3d324cec7edbe40509d1a79a0aab", size = 5440914, upload-time = "2025-09-14T22:16:18.453Z" }, - { url = "https://files.pythonhosted.org/packages/58/08/a8522c28c08031a9521f27abc6f78dbdee7312a7463dd2cfc658b813323b/zstandard-0.25.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e7360eae90809efd19b886e59a09dad07da4ca9ba096752e61a2e03c8aca188e", size = 5819847, upload-time = "2025-09-14T22:16:20.559Z" }, - { url = "https://files.pythonhosted.org/packages/6f/11/4c91411805c3f7b6f31c60e78ce347ca48f6f16d552fc659af6ec3b73202/zstandard-0.25.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:75ffc32a569fb049499e63ce68c743155477610532da1eb38e7f24bf7cd29e74", size = 5363131, upload-time = "2025-09-14T22:16:22.206Z" }, - { url = "https://files.pythonhosted.org/packages/ef/d6/8c4bd38a3b24c4c7676a7a3d8de85d6ee7a983602a734b9f9cdefb04a5d6/zstandard-0.25.0-cp310-cp310-win32.whl", hash = "sha256:106281ae350e494f4ac8a80470e66d1fe27e497052c8d9c3b95dc4cf1ade81aa", size = 436469, upload-time = "2025-09-14T22:16:25.002Z" }, - { url = "https://files.pythonhosted.org/packages/93/90/96d50ad417a8ace5f841b3228e93d1bb13e6ad356737f42e2dde30d8bd68/zstandard-0.25.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea9d54cc3d8064260114a0bbf3479fc4a98b21dffc89b3459edd506b69262f6e", size = 506100, upload-time = "2025-09-14T22:16:23.569Z" }, { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" },