From bf99487e1bd0fdb21dbf00750cdddf6af37882e1 Mon Sep 17 00:00:00 2001 From: Pedro Brochado Date: Tue, 4 Feb 2025 13:59:58 -0300 Subject: [PATCH 1/2] Bump pulpcore's bounds to >=3.70, <3,85 and apply plugin-template --- .github/workflows/scripts/install.sh | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/scripts/install.sh b/.github/workflows/scripts/install.sh index 605f7e5a4..677df1d1c 100755 --- a/.github/workflows/scripts/install.sh +++ b/.github/workflows/scripts/install.sh @@ -107,7 +107,7 @@ minio_access_key: "'$MINIO_ACCESS_KEY'"\ minio_secret_key: "'$MINIO_SECRET_KEY'"\ pulp_scenario_settings: null\ pulp_scenario_env: {}\ -test_storages_compat_layer: false\ +test_storages_compat_layer: true\ ' vars/main.yaml export PULP_API_ROOT="/rerouted/djnd/" fi diff --git a/pyproject.toml b/pyproject.toml index 13977dc85..dcadfbfdb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ dependencies = [ "jsonschema>=4.6,<5.0", "libcomps>=0.1.20.post1,<0.2", "productmd~=1.33.0", - "pulpcore>=3.49.11,<3.70", + "pulpcore>=3.70.0,<3.85", "solv~=0.7.21", "aiohttp_xmlrpc~=1.5.0", "importlib-resources~=6.4.0", From 08a510072a3a821d0ebfd852602c24794edd1a9b Mon Sep 17 00:00:00 2001 From: Pedro Brochado Date: Wed, 5 Feb 2025 17:43:31 -0300 Subject: [PATCH 2/2] Add compatibility with pulpcore 3.70 breaking changes This does: * Bumps lower and upper bounds for pulpcore: >=3.70, <3.85 * Removes pulp_smash and replaces it with similar fixtures * Adapt to the new bindings changes caused by openapi-generator bump * Revert the usage of CustomJSONField (to serializer.JSONField) due to bindings changes. --- functest_requirements.txt | 2 +- pulp_rpm/app/fields.py | 7 - pulp_rpm/app/serializers/advisory.py | 3 +- pulp_rpm/app/serializers/comps.py | 27 +- pulp_rpm/app/serializers/modulemd.py | 9 +- pulp_rpm/app/serializers/package.py | 21 +- pulp_rpm/app/serializers/repository.py | 22 +- pulp_rpm/tests/conftest.py | 121 ++++++++- pulp_rpm/tests/functional/api/test_acs.py | 5 +- .../functional/api/test_advisory_upload.py | 5 +- .../functional/api/test_character_encoding.py | 7 +- pulp_rpm/tests/functional/api/test_copy.py | 23 +- .../functional/api/test_crud_content_unit.py | 59 ++-- .../tests/functional/api/test_crud_remotes.py | 57 ++-- pulp_rpm/tests/functional/api/test_domains.py | 20 +- .../functional/api/test_download_policies.py | 14 +- pulp_rpm/tests/functional/api/test_prune.py | 2 - pulp_rpm/tests/functional/api/test_publish.py | 45 ++-- .../tests/functional/api/test_rbac_crud.py | 60 +++-- .../functional/api/test_retention_policy.py | 19 +- pulp_rpm/tests/functional/api/test_sync.py | 253 +++++++++--------- pulp_rpm/tests/functional/conftest.py | 6 +- pulp_rpm/tests/functional/constants.py | 90 ++++--- .../content_handler/test_config_repo.py | 10 +- pulp_rpm/tests/functional/utils.py | 152 +---------- unittest_requirements.txt | 1 + 26 files changed, 514 insertions(+), 526 deletions(-) diff --git a/functest_requirements.txt b/functest_requirements.txt index 32e5c1bd6..450b32f2e 100644 --- a/functest_requirements.txt +++ b/functest_requirements.txt @@ -1,5 +1,4 @@ django # TODO: test_sync.py has a dependency on date parsing functions -git+https://github.com/pulp/pulp-smash.git#egg=pulp-smash productmd>=1.25 pytest<8 dictdiffer @@ -10,3 +9,4 @@ pyzstd requests pytest-xdist pytest-timeout +pytest-custom_exit_code diff --git a/pulp_rpm/app/fields.py b/pulp_rpm/app/fields.py index 354ff950c..54bae202d 100644 --- a/pulp_rpm/app/fields.py +++ b/pulp_rpm/app/fields.py @@ -1,6 +1,4 @@ from rest_framework import serializers -from drf_spectacular.utils import extend_schema_field -from drf_spectacular.types import OpenApiTypes from pulp_rpm.app.constants import ADVISORY_SUM_TYPE_TO_NAME from pulp_rpm.app.models import UpdateReference @@ -75,8 +73,3 @@ def to_representation(self, value): } ) return ret - - -@extend_schema_field(OpenApiTypes.OBJECT) -class CustomJSONField(serializers.JSONField): - """A (drf) JSONField override to force openapi schema to use 'object' type.""" diff --git a/pulp_rpm/app/serializers/advisory.py b/pulp_rpm/app/serializers/advisory.py index eddc7f86f..6c9e99238 100644 --- a/pulp_rpm/app/serializers/advisory.py +++ b/pulp_rpm/app/serializers/advisory.py @@ -5,7 +5,6 @@ import createrepo_c from django.db import IntegrityError, transaction from rest_framework import serializers -from pulp_rpm.app.fields import CustomJSONField from pulpcore.plugin.serializers import ( ModelSerializer, @@ -44,7 +43,7 @@ class UpdateCollectionSerializer(ModelSerializer): help_text=_("Collection short name."), allow_blank=True, allow_null=True ) - module = CustomJSONField(help_text=_("Collection modular NSVCA."), allow_null=True) + module = serializers.JSONField(help_text=_("Collection modular NSVCA."), allow_null=True) packages = UpdateCollectionPackagesField( source="*", read_only=True, help_text=_("List of packages") diff --git a/pulp_rpm/app/serializers/comps.py b/pulp_rpm/app/serializers/comps.py index bc995102b..4b94ea9c5 100644 --- a/pulp_rpm/app/serializers/comps.py +++ b/pulp_rpm/app/serializers/comps.py @@ -1,7 +1,6 @@ from gettext import gettext as _ from rest_framework import serializers -from pulp_rpm.app.fields import CustomJSONField from pulpcore.plugin.models import Repository from pulpcore.plugin.serializers import DetailRelatedField @@ -32,12 +31,14 @@ class PackageGroupSerializer(NoArtifactContentSerializer): ) name = serializers.CharField(help_text=_("PackageGroup name."), allow_blank=True) description = serializers.CharField(help_text=_("PackageGroup description."), allow_blank=True) - packages = CustomJSONField(help_text=_("PackageGroup package list."), allow_null=True) + packages = serializers.JSONField(help_text=_("PackageGroup package list."), allow_null=True) biarch_only = serializers.BooleanField(help_text=_("PackageGroup biarch only."), required=False) - desc_by_lang = CustomJSONField( + desc_by_lang = serializers.JSONField( help_text=_("PackageGroup description by language."), allow_null=True ) - name_by_lang = CustomJSONField(help_text=_("PackageGroup name by language."), allow_null=True) + name_by_lang = serializers.JSONField( + help_text=_("PackageGroup name by language."), allow_null=True + ) digest = serializers.CharField( help_text=_("PackageGroup digest."), ) @@ -72,11 +73,11 @@ class PackageCategorySerializer(NoArtifactContentSerializer): display_order = serializers.IntegerField( help_text=_("Category display order."), allow_null=True ) - group_ids = CustomJSONField(help_text=_("Category group list."), allow_null=True) - desc_by_lang = CustomJSONField( + group_ids = serializers.JSONField(help_text=_("Category group list."), allow_null=True) + desc_by_lang = serializers.JSONField( help_text=_("Category description by language."), allow_null=True ) - name_by_lang = CustomJSONField(help_text=_("Category name by language."), allow_null=True) + name_by_lang = serializers.JSONField(help_text=_("Category name by language."), allow_null=True) digest = serializers.CharField( help_text=_("Category digest."), ) @@ -108,12 +109,14 @@ class PackageEnvironmentSerializer(NoArtifactContentSerializer): display_order = serializers.IntegerField( help_text=_("Environment display order."), allow_null=True ) - group_ids = CustomJSONField(help_text=_("Environment group list."), allow_null=True) - option_ids = CustomJSONField(help_text=_("Environment option ids"), allow_null=True) - desc_by_lang = CustomJSONField( + group_ids = serializers.JSONField(help_text=_("Environment group list."), allow_null=True) + option_ids = serializers.JSONField(help_text=_("Environment option ids"), allow_null=True) + desc_by_lang = serializers.JSONField( help_text=_("Environment description by language."), allow_null=True ) - name_by_lang = CustomJSONField(help_text=_("Environment name by language."), allow_null=True) + name_by_lang = serializers.JSONField( + help_text=_("Environment name by language."), allow_null=True + ) digest = serializers.CharField(help_text=_("Environment digest.")) class Meta: @@ -136,7 +139,7 @@ class PackageLangpacksSerializer(NoArtifactContentSerializer): PackageLangpacks serializer. """ - matches = CustomJSONField(help_text=_("Langpacks matches."), allow_null=True) + matches = serializers.JSONField(help_text=_("Langpacks matches."), allow_null=True) digest = serializers.CharField(help_text=_("Langpacks digest."), allow_null=True) class Meta: diff --git a/pulp_rpm/app/serializers/modulemd.py b/pulp_rpm/app/serializers/modulemd.py index 596651365..1b35e39d2 100644 --- a/pulp_rpm/app/serializers/modulemd.py +++ b/pulp_rpm/app/serializers/modulemd.py @@ -3,7 +3,6 @@ from pulpcore.plugin.serializers import DetailRelatedField, NoArtifactContentSerializer from rest_framework import serializers -from pulp_rpm.app.fields import CustomJSONField from pulp_rpm.app.models import Modulemd, ModulemdDefaults, ModulemdObsolete, Package @@ -32,8 +31,8 @@ class ModulemdSerializer(NoArtifactContentSerializer): arch = serializers.CharField( help_text=_("Modulemd architecture."), ) - artifacts = CustomJSONField(help_text=_("Modulemd artifacts."), allow_null=True) - dependencies = CustomJSONField(help_text=_("Modulemd dependencies."), allow_null=True) + artifacts = serializers.JSONField(help_text=_("Modulemd artifacts."), allow_null=True) + dependencies = serializers.JSONField(help_text=_("Modulemd dependencies."), allow_null=True) # TODO: The performance of this is not great, there's a noticable difference in response # time before/after. Since this will only return Package content hrefs, we might benefit # from creating a specialized version of this Field that can skip some of the work. @@ -46,7 +45,7 @@ class ModulemdSerializer(NoArtifactContentSerializer): view_name="content-rpm/packages-detail", many=True, ) - profiles = CustomJSONField(help_text=_("Modulemd profiles."), allow_null=True) + profiles = serializers.JSONField(help_text=_("Modulemd profiles."), allow_null=True) snippet = serializers.CharField(help_text=_("Modulemd snippet"), write_only=True) def create(self, validated_data): @@ -88,7 +87,7 @@ class ModulemdDefaultsSerializer(NoArtifactContentSerializer): module = serializers.CharField(help_text=_("Modulemd name.")) stream = serializers.CharField(help_text=_("Modulemd default stream.")) - profiles = CustomJSONField(help_text=_("Default profiles for modulemd streams.")) + profiles = serializers.JSONField(help_text=_("Default profiles for modulemd streams.")) snippet = serializers.CharField(help_text=_("Modulemd default snippet"), write_only=True) def create(self, validated_data): diff --git a/pulp_rpm/app/serializers/package.py b/pulp_rpm/app/serializers/package.py index 9e9afc5b6..819fd2078 100644 --- a/pulp_rpm/app/serializers/package.py +++ b/pulp_rpm/app/serializers/package.py @@ -8,7 +8,6 @@ ) from pulpcore.plugin.util import get_domain_pk from rest_framework import serializers -from pulp_rpm.app.fields import CustomJSONField from rest_framework.exceptions import NotAcceptable from pulp_rpm.app.models import Package @@ -78,62 +77,62 @@ class PackageSerializer(SingleArtifactContentUploadSerializer, ContentChecksumSe read_only=True, ) - changelogs = CustomJSONField( + changelogs = serializers.JSONField( help_text=_("Changelogs that package contains"), default="[]", required=False, read_only=True, ) - files = CustomJSONField( + files = serializers.JSONField( help_text=_("Files that package contains"), default="[]", required=False, read_only=True, ) - requires = CustomJSONField( + requires = serializers.JSONField( help_text=_("Capabilities the package requires"), default="[]", required=False, read_only=True, ) - provides = CustomJSONField( + provides = serializers.JSONField( help_text=_("Capabilities the package provides"), default="[]", required=False, read_only=True, ) - conflicts = CustomJSONField( + conflicts = serializers.JSONField( help_text=_("Capabilities the package conflicts"), default="[]", required=False, read_only=True, ) - obsoletes = CustomJSONField( + obsoletes = serializers.JSONField( help_text=_("Capabilities the package obsoletes"), default="[]", required=False, read_only=True, ) - suggests = CustomJSONField( + suggests = serializers.JSONField( help_text=_("Capabilities the package suggests"), default="[]", required=False, read_only=True, ) - enhances = CustomJSONField( + enhances = serializers.JSONField( help_text=_("Capabilities the package enhances"), default="[]", required=False, read_only=True, ) - recommends = CustomJSONField( + recommends = serializers.JSONField( help_text=_("Capabilities the package recommends"), default="[]", required=False, read_only=True, ) - supplements = CustomJSONField( + supplements = serializers.JSONField( help_text=_("Capabilities the package supplements"), default="[]", required=False, diff --git a/pulp_rpm/app/serializers/repository.py b/pulp_rpm/app/serializers/repository.py index f98f51569..1df6dcfd4 100644 --- a/pulp_rpm/app/serializers/repository.py +++ b/pulp_rpm/app/serializers/repository.py @@ -22,7 +22,6 @@ ) from pulpcore.plugin.util import get_domain, resolve_prn from rest_framework import serializers -from pulp_rpm.app.fields import CustomJSONField from pulp_rpm.app.constants import ( ALLOWED_CHECKSUM_ERROR_MSG, @@ -147,11 +146,26 @@ class RpmRepositorySerializer(RepositorySerializer): ), read_only=True, ) - repo_config = CustomJSONField( + repo_config = serializers.JSONField( required=False, help_text=_("A JSON document describing config.repo file"), ) + def to_representation(self, instance): + data = super().to_representation(instance) + # Import workflow may cause these fields to be stored as "" in the database + # This ensure the correct type of None | Enum in the response + for field in ( + "checksum_type", + "metadata_checksum_type", + "package_checksum_type", + "compression_type", + ): + field_data = data.get(field) + if field_data == "": + data[field] = None + return data + def validate(self, data): """Validate data.""" for field in ("checksum_type", "metadata_checksum_type", "package_checksum_type"): @@ -410,7 +424,7 @@ class RpmPublicationSerializer(PublicationSerializer): ), read_only=True, ) - repo_config = CustomJSONField( + repo_config = serializers.JSONField( required=False, help_text=_("A JSON document describing config.repo file"), ) @@ -549,7 +563,7 @@ class CopySerializer(ValidateFieldsMixin, serializers.Serializer): A serializer for Content Copy API. """ - config = CustomJSONField( + config = serializers.JSONField( help_text=_( dedent( """\ diff --git a/pulp_rpm/tests/conftest.py b/pulp_rpm/tests/conftest.py index c1d4ff311..3714c5128 100644 --- a/pulp_rpm/tests/conftest.py +++ b/pulp_rpm/tests/conftest.py @@ -1,6 +1,8 @@ import uuid import pytest +from urllib.parse import urljoin +import requests from pulpcore.client.pulp_rpm import ( ApiClient as RpmApiClient, @@ -14,9 +16,7 @@ RpmRepositorySyncURL, ) -from pulp_rpm.tests.functional.constants import ( - RPM_UNSIGNED_FIXTURE_URL, -) +from pulp_rpm.tests.functional.constants import RPM_UNSIGNED_FIXTURE_URL, RPM_CONTENT_NAMES @pytest.fixture(scope="session") @@ -160,3 +160,118 @@ def _init_and_sync( return (repository, remote) if not return_task else (repository, remote, task) return _init_and_sync + + +class BaseURLSession(requests.Session): + def __init__(self, base_url, *args, **kwargs): + self.base_url = base_url + super().__init__(*args, **kwargs) + + def request(self, method, url, **kwargs): + return super().request(method, urljoin(self.base_url, url), **kwargs) + + +@pytest.fixture(scope="module") +def pulp_requests(bindings_cfg): + """Uses requests lib to issue an http request to pulp server using pulp_href. + + Example: + >>> response = pulp_requests.get("/pulp/api/v3/.../?repository_version=...") + >>> type(response) + requests.Response + """ + with BaseURLSession(bindings_cfg.host) as session: + session.auth = (bindings_cfg.username, bindings_cfg.password) + yield session + + +@pytest.fixture +def get_content_summary(rpm_repository_version_api): + """A fixture that fetches the content summary from a repository.""" + + def _get_content_summary(repo, version_href=None, dump=True): + """Fetches the content summary from a given repository. + + Args: + repo: The repository where the content is fetched from. + version_href: The repository version from where the content should be fetched from. + Default: latest repository version. + dump: If true, return a dumped dictionary with convenient filters (default). + Otherwise, return the response object. + + Returns: + The content summary of the repository. + """ + version_href = version_href or repo.latest_version_href + if version_href is None: + return {} + content_summary = rpm_repository_version_api.read(version_href).content_summary + if not dump: + return content_summary + else: + # removes the hrefs, which is may get in the way of data comparision + # https://docs.pydantic.dev/latest/concepts/serialization/#pickledumpsmodel + exclude_fields = {"__all__": {"__all__": {"href"}}} + return content_summary.model_dump(exclude=exclude_fields) + + return _get_content_summary + + +@pytest.fixture +def get_content( + rpm_repository_version_api, + pulp_requests, +): + """A fixture that fetches the content from a repository.""" + + def _get_content(repo, version_href=None): + """Fetches the content from a given repository. + + Args: + repo: The repository where the content is fetched from. + version_href: The repository version from where the content should be fetched from. + Default: latest repository version. + + Returns: + A dictionary with lists of packages by content_type (package, modulemd, etc) + for 'present', 'added' and 'removed' content. E.g: + + ```python + >>> get_content(repository) + { + 'present': { + 'rpm.package': [{'arch', 'noarch', 'artifact': ...}], + 'rpm.packagegroup': [{'arch', 'noarch', 'artifact': ...}], + ... + }, + 'added': { ... }, + 'removed': { ... }, + } + ``` + """ + + def fetch_content(pulp_href) -> list: + result = pulp_requests.get(pulp_href) + result.raise_for_status() + return result.json()["results"] + + # Select verion_href + version_href = version_href or repo.latest_version_href + if version_href is None: + return {} + content_summary = rpm_repository_version_api.read(version_href).content_summary + + result = {} + for key in ("present", "added", "removed"): + content = {} + # ensure every content type returns at least an empty list + for k in RPM_CONTENT_NAMES: + content[k] = [] + # fetch content details for each content type + summary_entry = getattr(content_summary, key) + for content_type, content_dict in summary_entry.items(): + content[content_type] = fetch_content(content_dict["href"]) + result[key] = content + return result + + return _get_content diff --git a/pulp_rpm/tests/functional/api/test_acs.py b/pulp_rpm/tests/functional/api/test_acs.py index fe84d0b0d..f66cb2144 100644 --- a/pulp_rpm/tests/functional/api/test_acs.py +++ b/pulp_rpm/tests/functional/api/test_acs.py @@ -35,6 +35,7 @@ def test_acs_simple( rpm_acs_api, rpm_repository_factory, rpm_rpmremote_factory, + get_content_summary, monitor_task, monitor_task_group, gen_object_with_cleanup, @@ -72,7 +73,5 @@ def test_acs_simple( monitor_task(sync_response.task) repo = rpm_repository_api.read(repo.pulp_href) - repo_ver = rpm_repository_version_api.read(repo.latest_version_href) - - present_summary = {k: v["count"] for k, v in repo_ver.content_summary.present.items()} + present_summary = get_content_summary(repo)["present"] assert present_summary == content_summary diff --git a/pulp_rpm/tests/functional/api/test_advisory_upload.py b/pulp_rpm/tests/functional/api/test_advisory_upload.py index 89216237a..d9043f93c 100644 --- a/pulp_rpm/tests/functional/api/test_advisory_upload.py +++ b/pulp_rpm/tests/functional/api/test_advisory_upload.py @@ -70,8 +70,9 @@ def test_merging( # Third upload, two pkgs, intersects with existing, expect AdvisoryConflict failure with pytest.raises(PulpTaskError) as ctx: _, _, _ = upload_advisory_factory(advisory=CAMEL_BIRD_JSON, repository=repo, use_id=an_id) - assert "neither package list is a proper subset of the other" in str(ctx.value) - assert "ALLOW_AUTOMATIC_UNSAFE_ADVISORY_CONFLICT_RESOLUTION" in str(ctx.value) + error_msg = ctx.value.task.error["description"] + assert "neither package list is a proper subset of the other" in error_msg + assert "ALLOW_AUTOMATIC_UNSAFE_ADVISORY_CONFLICT_RESOLUTION" in error_msg # Fourth upload, intersecting pkglists, expecting three pkgs cambeardog, vers_href, _ = upload_advisory_factory( diff --git a/pulp_rpm/tests/functional/api/test_character_encoding.py b/pulp_rpm/tests/functional/api/test_character_encoding.py index 0e36ff885..72b11d5c0 100644 --- a/pulp_rpm/tests/functional/api/test_character_encoding.py +++ b/pulp_rpm/tests/functional/api/test_character_encoding.py @@ -29,7 +29,7 @@ def test_upload_non_ascii( """Test whether one can upload an RPM with non-ascii metadata.""" temp_file = tmp_path / str(uuid.uuid4()) temp_file.write_bytes(requests.get(RPM_WITH_NON_ASCII_URL).content) - artifact = pulpcore_bindings.ArtifactsApi.create(temp_file) + artifact = pulpcore_bindings.ArtifactsApi.create(str(temp_file)) response = rpm_package_api.create( artifact=artifact.pulp_href, relative_path=RPM_WITH_NON_ASCII_NAME, @@ -44,7 +44,7 @@ def test_upload_non_utf8( """Test whether an exception is raised when non-utf-8 is uploaded.""" temp_file = tmp_path / str(uuid.uuid4()) temp_file.write_bytes(requests.get(RPM_WITH_NON_UTF_8_URL).content) - artifact = pulpcore_bindings.ArtifactsApi.create(temp_file) + artifact = pulpcore_bindings.ArtifactsApi.create(str(temp_file)) with pytest.raises(PulpTaskError) as ctx: response = rpm_package_api.create( artifact=artifact.pulp_href, @@ -52,4 +52,5 @@ def test_upload_non_utf8( ) monitor_task(response.task) - assert "'utf-8' codec can't decode byte 0x80 in position 168: invalid start" in str(ctx.value) + error_msg = ctx.value.task.error["description"] + assert "'utf-8' codec can't decode byte 0x80 in position 168: invalid start" in error_msg diff --git a/pulp_rpm/tests/functional/api/test_copy.py b/pulp_rpm/tests/functional/api/test_copy.py index 6ab81f2a4..4531a016a 100644 --- a/pulp_rpm/tests/functional/api/test_copy.py +++ b/pulp_rpm/tests/functional/api/test_copy.py @@ -2,11 +2,6 @@ import pytest -from pulp_smash.pulp3.utils import ( - get_added_content_summary, - get_content_summary, -) - from pulp_rpm.tests.functional.constants import ( PULP_TYPE_PACKAGE, RPM_FIXTURE_SUMMARY, @@ -44,6 +39,7 @@ def test_modular_static_context_copy( rpm_repository_factory, rpm_repository_api, get_id, + get_content_summary, ): """Test copying a static_context-using repo to an empty destination.""" src, _ = init_and_sync(url=RPM_MODULES_STATIC_CONTEXT_FIXTURE_URL) @@ -62,8 +58,9 @@ def test_modular_static_context_copy( # Check that we have the correct content counts. dest = rpm_repository_api.read(dest.pulp_href) - assert get_content_summary(dest.to_dict()) == RPM_MODULAR_STATIC_FIXTURE_SUMMARY - assert get_added_content_summary(dest.to_dict()) == RPM_MODULAR_STATIC_FIXTURE_SUMMARY + content_summary = get_content_summary(dest) + assert content_summary["present"] == RPM_MODULAR_STATIC_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_MODULAR_STATIC_FIXTURE_SUMMARY modules = rpm_modulemd_api.list(repository_version=get_id(dest.latest_version_href)).results module_static_contexts = [ @@ -80,6 +77,7 @@ def test_basic_copy_all( rpm_repository_factory, rpm_repository_api, rpm_unsigned_repo_immediate, + get_content_summary, ): """Test copying all the content from one repo to another.""" src = rpm_unsigned_repo_immediate @@ -93,8 +91,9 @@ def test_basic_copy_all( # Check that we have the correct content counts. dest = rpm_repository_api.read(dest.pulp_href) - assert get_content_summary(dest.to_dict()) == RPM_FIXTURE_SUMMARY - assert get_added_content_summary(dest.to_dict()) == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(dest) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_FIXTURE_SUMMARY def test_copy_none( self, @@ -463,6 +462,7 @@ def test_kickstart_copy_all( rpm_kickstart_repo_immediate, rpm_repository_api, rpm_repository_factory, + get_content_summary, ): """Test copying all the content from one repo to another.""" src = rpm_kickstart_repo_immediate @@ -476,8 +476,9 @@ def test_kickstart_copy_all( # Check that we have the correct content counts. dest = rpm_repository_api.read(dest.pulp_href) - assert get_content_summary(dest.to_dict()) == RPM_KICKSTART_FIXTURE_SUMMARY - assert get_added_content_summary(dest.to_dict()) == RPM_KICKSTART_FIXTURE_SUMMARY + content_summary = get_content_summary(dest) + assert content_summary["present"] == RPM_KICKSTART_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_KICKSTART_FIXTURE_SUMMARY def test_strict_copy_module_to_empty_repo( diff --git a/pulp_rpm/tests/functional/api/test_crud_content_unit.py b/pulp_rpm/tests/functional/api/test_crud_content_unit.py index 2baa93e25..b79687cb1 100644 --- a/pulp_rpm/tests/functional/api/test_crud_content_unit.py +++ b/pulp_rpm/tests/functional/api/test_crud_content_unit.py @@ -1,10 +1,8 @@ """Tests that perform actions over content unit.""" from textwrap import dedent -from urllib.parse import urljoin import pytest -import requests from pulpcore.client.pulp_rpm import RpmModulemdDefaults, RpmModulemd from pulp_rpm.tests.functional.constants import ( @@ -30,7 +28,6 @@ def test_crud_content_unit( ): """Test creating, reading, updating, and deleting a content unit of package type.""" # Create content unit - content_unit_original = {} attrs = gen_rpm_content_attrs(signed_artifact, RPM_PACKAGE_FILENAME) response = rpm_package_api.create(**attrs) @@ -38,38 +35,35 @@ def test_crud_content_unit( # rpm package doesn't keep relative_path but the location href del attrs["relative_path"] - content_unit_original.update(content_unit.to_dict()) for key, val in attrs.items(): - assert content_unit_original[key] == val + assert getattr(content_unit, key) == val # Read a content unit by its href - content_unit = rpm_package_api.read(content_unit_original["pulp_href"]).to_dict() - for key, val in content_unit_original.items(): - assert content_unit[key] == val + response = rpm_package_api.read(content_unit.pulp_href) + assert response == content_unit # Read a content unit by its pkg_id - page = rpm_package_api.list(pkg_id=content_unit_original["pkg_id"]) + page = rpm_package_api.list(pkg_id=content_unit.pkg_id) assert len(page.results) == 1 - for key, val in content_unit_original.items(): - assert page.results[0].to_dict()[key] == val + assert page.results[0] == content_unit # Attempt to update a content unit using HTTP PATCH attrs = gen_rpm_content_attrs(signed_artifact, RPM_PACKAGE_FILENAME2) with pytest.raises(AttributeError) as exc: - rpm_package_api.partial_update(content_unit_original["pulp_href"], attrs) + rpm_package_api.partial_update(content_unit.pulp_href, attrs) msg = "object has no attribute 'partial_update'" assert msg in str(exc) # Attempt to update a content unit using HTTP PUT attrs = gen_rpm_content_attrs(signed_artifact, RPM_PACKAGE_FILENAME2) with pytest.raises(AttributeError) as exc: - rpm_package_api.update(content_unit_original["pulp_href"], attrs) + rpm_package_api.update(content_unit.pulp_href, attrs) msg = "object has no attribute 'update'" assert msg in str(exc) # Attempt to delete a content unit using HTTP DELETE with pytest.raises(AttributeError) as exc: - rpm_package_api.delete(content_unit_original["pulp_href"]) + rpm_package_api.delete(content_unit.pulp_href) msg = "object has no attribute 'delete'" assert msg in str(exc) @@ -77,7 +71,7 @@ def test_crud_content_unit( attrs = gen_rpm_content_attrs(signed_artifact, RPM_PACKAGE_FILENAME) response = rpm_package_api.create(**attrs) duplicate = rpm_package_api.read(monitor_task(response.task).created_resources[0]) - assert duplicate.pulp_href == content_unit_original["pulp_href"] + assert duplicate.pulp_href == content_unit.pulp_href # Attempt to create duplicate package while specifying a repository repo = rpm_repository_factory() @@ -87,7 +81,7 @@ def test_crud_content_unit( monitored_response = monitor_task(response.task) duplicate = rpm_package_api.read(monitored_response.created_resources[1]) - assert duplicate.pulp_href == content_unit_original["pulp_href"] + assert duplicate.pulp_href == content_unit.pulp_href repo = rpm_repository_api.read(repo.pulp_href) assert repo.latest_version_href.endswith("/versions/1/") @@ -103,7 +97,7 @@ def test_crud_content_unit( [RPM_MODULAR_FIXTURE_URL, RPM_KICKSTART_FIXTURE_URL, RPM_REPO_METADATA_FIXTURE_URL], ids=["MODULAR_FIXTURE_URL", "KICKSTART_FIXTURE_URL", "REPO_METADATA_FIXTURE_URL"], ) -def test_remove_content_unit(url, init_and_sync, rpm_repository_version_api, bindings_cfg): +def test_remove_content_unit(url, init_and_sync, get_content, pulp_requests): """ Sync a repository and test that content of any type cannot be removed directly. @@ -118,24 +112,15 @@ def test_remove_content_unit(url, init_and_sync, rpm_repository_version_api, bin - packagelangpacks - repo metadata """ - repo, _ = init_and_sync(url=url, policy="on_demand") - # Test remove content by types contained in repository. - version = rpm_repository_version_api.read(repo.latest_version_href) - - # iterate over content filtered by repository versions - for content_units in version.content_summary.added.values(): - auth = (bindings_cfg.username, bindings_cfg.password) - url = urljoin(bindings_cfg.host, content_units["href"]) - response = requests.get(url, auth=auth).json() - - # iterate over particular content units and issue delete requests - for content_unit in response["results"]: - url = urljoin(bindings_cfg.host, content_unit["pulp_href"]) - resp = requests.delete(url, auth=auth) + repo, _ = init_and_sync(url=url, policy="on_demand") + added_content = get_content(repo)["added"] - # check that '405' (method not allowed) is returned - assert resp.status_code == 405 + # iterate over content units and issue delete requests + for content_type, content_list in added_content.items(): + for content_unit in content_list: + resp = pulp_requests.delete(content_unit["pulp_href"]) + assert resp.status_code == 405 # method not allowed def test_create_modulemd_defaults(monitor_task, gen_object_with_cleanup, rpm_modulemd_defaults_api): @@ -147,7 +132,7 @@ def test_create_modulemd_defaults(monitor_task, gen_object_with_cleanup, rpm_mod request_1 = { "module": "squid", "stream": "4", - "profiles": '{"4": ["common"]}', + "profiles": {"4": ["common"]}, "snippet": dedent( """\ --- @@ -179,10 +164,11 @@ def test_create_modulemd_defaults(monitor_task, gen_object_with_cleanup, rpm_mod # Cant create duplicate request_3 = request_1.copy() request_3["module"] = "squid-mod2" # not in unique_togheter - with pytest.raises(PulpTaskError, match="duplicate key value violates unique constraint"): + with pytest.raises(PulpTaskError) as exc: modulemd_default = gen_object_with_cleanup( rpm_modulemd_defaults_api, RpmModulemdDefaults(**request_3) ) + assert "duplicate key value violates unique constraint" in exc.value.task.error["description"] def test_create_modulemds( @@ -208,8 +194,9 @@ def test_create_modulemds( assert modulemd.name == request["name"] # Cant create duplicate - with pytest.raises(PulpTaskError, match="duplicate key value violates unique constraint"): + with pytest.raises(PulpTaskError) as exc: modulemd = gen_object_with_cleanup(rpm_modulemd_api, RpmModulemd(**request)) + assert "duplicate key value violates unique constraint" in exc.value.task.error["description"] # Can upload variation request2 = request.copy() diff --git a/pulp_rpm/tests/functional/api/test_crud_remotes.py b/pulp_rpm/tests/functional/api/test_crud_remotes.py index 3364c409f..4ee78327f 100644 --- a/pulp_rpm/tests/functional/api/test_crud_remotes.py +++ b/pulp_rpm/tests/functional/api/test_crud_remotes.py @@ -5,16 +5,16 @@ import pytest from pulpcore.client.pulp_rpm.exceptions import ApiException +from pydantic import ValidationError -from pulp_rpm.tests.functional.constants import DOWNLOAD_POLICIES -from pulp_rpm.tests.functional.utils import gen_rpm_remote +from pulp_rpm.tests.functional.constants import DOWNLOAD_POLICIES, RPM_UNSIGNED_FIXTURE_URL @pytest.mark.parallel def test_basic_crud_remote(rpm_rpmremote_api, rpm_rpmremote_factory, monitor_task): """Test CRUD operations for remotes.""" # Create a remote - body = _gen_verbose_remote() + body = gen_verbose_remote_data() remote = rpm_rpmremote_factory(**body) for key in ("username", "password"): del body[key] @@ -23,10 +23,8 @@ def test_basic_crud_remote(rpm_rpmremote_api, rpm_rpmremote_factory, monitor_tas assert remote.to_dict()[key] == val, key # Try to create a second remote with an identical name - body = gen_rpm_remote() - body["name"] = remote.name with pytest.raises(ApiException): - rpm_rpmremote_api.create(body) + rpm_rpmremote_factory(name=remote.name) # Read a remote by its href remote = rpm_rpmremote_api.read(remote.pulp_href) @@ -40,7 +38,7 @@ def test_basic_crud_remote(rpm_rpmremote_api, rpm_rpmremote_factory, monitor_tas assert results[0].to_dict()[key] == val, key # Update a remote using HTTP PATCH - body = _gen_verbose_remote() + body = gen_verbose_remote_data() response = rpm_rpmremote_api.partial_update(remote.pulp_href, body) monitor_task(response.task) for key in ("username", "password"): @@ -50,7 +48,7 @@ def test_basic_crud_remote(rpm_rpmremote_api, rpm_rpmremote_factory, monitor_tas assert remote.to_dict()[key] == val, key # Update a remote using HTTP PUT - body = _gen_verbose_remote() + body = gen_verbose_remote_data() response = rpm_rpmremote_api.update(remote.pulp_href, body) monitor_task(response.task) for key in ("username", "password"): @@ -67,7 +65,7 @@ def test_basic_crud_remote(rpm_rpmremote_api, rpm_rpmremote_factory, monitor_tas @pytest.mark.parallel -def test_missing_url(rpm_rpmremote_api): +def test_missing_url(rpm_rpmremote_api, rpm_rpmremote_factory): """Verify whether is possible to create a remote without a URL. This test targets the following issues: @@ -75,9 +73,9 @@ def test_missing_url(rpm_rpmremote_api): * `Pulp #3395 `_ * `Pulp Smash #984 `_ """ - body = gen_rpm_remote() + body = gen_remote_data() del body["url"] - with pytest.raises(ApiException): + with pytest.raises(ValidationError): rpm_rpmremote_api.create(body) @@ -108,22 +106,22 @@ def test_policy_update_changes(rpm_rpmremote_api, rpm_rpmremote_factory, monitor * `Pulp #3763 `_ """ # Verify the default policy `immediate` - body = _gen_verbose_remote() + body = gen_verbose_remote_data() del body["policy"] - remote = rpm_rpmremote_factory(**body).to_dict() - assert remote["policy"] == "immediate", remote + remote = rpm_rpmremote_factory(**body) + assert remote.policy == "immediate", remote # Verify ability to change policy to value other than the default changed_policy = choice([item for item in DOWNLOAD_POLICIES if item != "immediate"]) - response = rpm_rpmremote_api.partial_update(remote["pulp_href"], {"policy": changed_policy}) + response = rpm_rpmremote_api.partial_update(remote.pulp_href, {"policy": changed_policy}) monitor_task(response.task) - remote = rpm_rpmremote_api.read(remote["pulp_href"]).to_dict() - assert remote["policy"] == changed_policy, remote + remote = rpm_rpmremote_api.read(remote.pulp_href) + assert remote.policy == changed_policy, remote # Verify an invalid policy does not update the remote policy - with pytest.raises(ApiException): - rpm_rpmremote_api.partial_update(remote["pulp_href"], {"policy": str(uuid4())}) + with pytest.raises(ValidationError): + rpm_rpmremote_api.partial_update(remote.pulp_href, {"policy": str(uuid4())}) def test_raise_on_invalid_remote_url( @@ -132,19 +130,28 @@ def test_raise_on_invalid_remote_url( # Cant create invalid RpmRemote for protocol in ("uln", "sftp", "grpc"): with pytest.raises(ApiException, match=" is not valid. It must start with: "): - body = _gen_verbose_remote() + body = gen_verbose_remote_data() body["url"] = f"{protocol}://some/rpm/remote" gen_object_with_cleanup(rpm_rpmremote_api, body) # Cant create invalid UlnRemote for protocol in ("http", "https", "file"): with pytest.raises(ApiException, match=" is not valid. It must start with: "): - body = _gen_verbose_remote() + body = gen_verbose_remote_data() body["url"] = f"{protocol}://some/uln/remote" gen_object_with_cleanup(rpm_ulnremote_api, body) -def _gen_verbose_remote(): +def gen_remote_data(): + """Return a minimum semi-random dict for use in defining a remote.""" + return { + "name": str(uuid4()), + "url": RPM_UNSIGNED_FIXTURE_URL, + "policy": "immediate", + } + + +def gen_verbose_remote_data(): """Return a semi-random dict for use in defining a remote. For most tests, it"s desirable to create remotes with as few attributes @@ -154,8 +161,8 @@ def _gen_verbose_remote(): Note that 'username' and 'password' are write-only attributes. """ - attrs = gen_rpm_remote() - attrs.update( + data = gen_remote_data() + data.update( {"password": str(uuid4()), "username": str(uuid4()), "policy": choice(DOWNLOAD_POLICIES)} ) - return attrs + return data diff --git a/pulp_rpm/tests/functional/api/test_domains.py b/pulp_rpm/tests/functional/api/test_domains.py index fed161543..72cad336a 100644 --- a/pulp_rpm/tests/functional/api/test_domains.py +++ b/pulp_rpm/tests/functional/api/test_domains.py @@ -9,7 +9,6 @@ from pulpcore.client.pulpcore.exceptions import ApiException as CoreApiException from pulp_rpm.tests.functional.utils import ( - gen_rpm_remote, get_package_repo_path, ) from pulp_rpm.tests.functional.constants import ( @@ -27,6 +26,7 @@ def test_domain_create( rpm_package_api, rpm_repository_api, rpm_rpmremote_api, + rpm_rpmremote_factory, ): """Test repo-creation in a domain.""" body = { @@ -38,7 +38,7 @@ def test_domain_create( domain_name = domain.name # create and sync in default domain (not specified) - remote = gen_object_with_cleanup(rpm_rpmremote_api, gen_rpm_remote(RPM_SIGNED_FIXTURE_URL)) + remote = rpm_rpmremote_factory(url=RPM_SIGNED_FIXTURE_URL) repo_body = {"name": str(uuid.uuid4()), "remote": remote.pulp_href} repo = gen_object_with_cleanup(rpm_repository_api, repo_body) # Check that we can "find" the new repo in the default-domain @@ -63,6 +63,7 @@ def test_domain_sync( rpm_package_lang_packs_api, rpm_repository_api, rpm_rpmremote_api, + rpm_rpmremote_factory, ): """Test repo-sync in a domain.""" @@ -76,13 +77,12 @@ def test_domain_sync( domain_name = domain.name # create and sync in the newly-created domain - remote = gen_object_with_cleanup( - rpm_rpmremote_api, - {"name": str(uuid.uuid4()), "url": RPM_SIGNED_FIXTURE_URL}, - pulp_domain=domain_name, + remote = rpm_rpmremote_factory( + name=str(uuid.uuid4()), url=RPM_SIGNED_FIXTURE_URL, pulp_domain=domain_name ) repo_body = {"name": str(uuid.uuid4()), "remote": remote.pulp_href} repo = gen_object_with_cleanup(rpm_repository_api, repo_body, pulp_domain=domain_name) + # Check that we can "find" the new repo in the new domain via filtering repos = rpm_repository_api.list(name=repo.name, pulp_domain=domain_name).results assert len(repos) == 1 @@ -130,7 +130,11 @@ def test_domain_sync( @pytest.mark.parallel def test_object_creation( - pulpcore_bindings, gen_object_with_cleanup, rpm_repository_api, rpm_rpmremote_api + pulpcore_bindings, + gen_object_with_cleanup, + rpm_repository_api, + rpm_rpmremote_api, + rpm_rpmremote_factory, ): """Test basic object creation in a separate domain.""" body = { @@ -154,7 +158,7 @@ def test_object_creation( assert default_repos.count == 0 # Try to create an object w/ cross domain relations - default_remote = gen_object_with_cleanup(rpm_rpmremote_api, gen_rpm_remote()) + default_remote = rpm_rpmremote_factory() with pytest.raises(ApiException) as e: repo_body = {"name": str(uuid.uuid4()), "remote": default_remote.pulp_href} rpm_repository_api.create(repo_body, pulp_domain=domain.name) diff --git a/pulp_rpm/tests/functional/api/test_download_policies.py b/pulp_rpm/tests/functional/api/test_download_policies.py index 294c206a8..287ad4524 100644 --- a/pulp_rpm/tests/functional/api/test_download_policies.py +++ b/pulp_rpm/tests/functional/api/test_download_policies.py @@ -17,6 +17,7 @@ def test_download_policies( rpm_publication_api, gen_object_with_cleanup, delete_orphans_pre, + get_content_summary, ): """Sync repositories with the different ``download_policy``. @@ -39,11 +40,9 @@ def test_download_policies( # Step 3, 4 assert repo.latest_version_href.endswith("/1/") - repo_ver = rpm_repository_version_api.read(repo.latest_version_href) - present_summary = {k: v["count"] for k, v in repo_ver.content_summary.present.items()} - assert present_summary == RPM_FIXTURE_SUMMARY - added_summary = {k: v["count"] for k, v in repo_ver.content_summary.added.items()} - assert added_summary == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(repo) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_FIXTURE_SUMMARY # Step 5 latest_version_href = repo.latest_version_href @@ -51,9 +50,8 @@ def test_download_policies( # Step 6, 7 assert latest_version_href == repo.latest_version_href - repo_ver = rpm_repository_version_api.read(repo.latest_version_href) - present_summary = {k: v["count"] for k, v in repo_ver.content_summary.present.items()} - assert present_summary == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(repo) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY # Step 8 publish_data = RpmRpmPublication(repository=repo.pulp_href) diff --git a/pulp_rpm/tests/functional/api/test_prune.py b/pulp_rpm/tests/functional/api/test_prune.py index 543909361..5fc7a3a8d 100644 --- a/pulp_rpm/tests/functional/api/test_prune.py +++ b/pulp_rpm/tests/functional/api/test_prune.py @@ -1,7 +1,5 @@ import pytest -from pulp_rpm.tests.functional.utils import set_up_module as setUpModule # noqa:F401 - from pulpcore.client.pulp_rpm import PrunePackages from pulpcore.client.pulp_rpm.exceptions import ApiException diff --git a/pulp_rpm/tests/functional/api/test_publish.py b/pulp_rpm/tests/functional/api/test_publish.py index 142f6a880..0269644c9 100644 --- a/pulp_rpm/tests/functional/api/test_publish.py +++ b/pulp_rpm/tests/functional/api/test_publish.py @@ -12,8 +12,6 @@ import xmltodict import dictdiffer -from pulp_smash.pulp3.utils import gen_repo, gen_distribution - from pulp_rpm.tests.functional.constants import ( RPM_ALT_LAYOUT_FIXTURE_URL, RPM_COMPLEX_FIXTURE_URL, @@ -29,8 +27,7 @@ RPM_UNSIGNED_FIXTURE_URL, SRPM_UNSIGNED_FIXTURE_URL, ) -from pulp_rpm.tests.functional.utils import gen_rpm_remote, download_and_decompress_file -from pulp_rpm.tests.functional.utils import set_up_module as setUpModule # noqa:F401 +from pulp_rpm.tests.functional.utils import download_and_decompress_file from pulpcore.client.pulp_rpm import RpmRepositorySyncURL, RpmRpmPublication from pulpcore.client.pulp_rpm.exceptions import ApiException @@ -81,9 +78,9 @@ def test_publish_with_compression_types( compression_ext, rpm_unsigned_repo_immediate, rpm_publication_api, - gen_object_with_cleanup, rpm_distribution_api, monitor_task, + rpm_distribution_factory, ): """Sync and publish an RPM repository w/ zstd compression and verify it exists.""" # 1. Publish and distribute @@ -94,8 +91,7 @@ def test_publish_with_compression_types( created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] - body = gen_distribution(publication=publication_href) - distribution = gen_object_with_cleanup(rpm_distribution_api, body) + distribution = rpm_distribution_factory(publication=publication_href) # 2. Check "primary", "filelists", "other", "updateinfo" have correct compression ext for md_type, md_href in self.get_repomd_metadata_urls(distribution.base_url).items(): @@ -107,9 +103,9 @@ def test_validate_no_checksum_tag( self, rpm_unsigned_repo_immediate, rpm_publication_api, - gen_object_with_cleanup, rpm_distribution_api, monitor_task, + rpm_distribution_factory, ): """Sync and publish an RPM repository and verify the checksum. @@ -121,8 +117,7 @@ def test_validate_no_checksum_tag( created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] - body = gen_distribution(publication=publication_href) - distribution = gen_object_with_cleanup(rpm_distribution_api, body) + distribution = rpm_distribution_factory(publication=publication_href) # 2. check the tag 'sum' is not present in updateinfo.xml update_xml_url = self.get_repomd_metadata_urls(distribution.base_url)["updateinfo"] @@ -228,10 +223,10 @@ def test_complex_repo_core_metadata( repo_url, init_and_sync, rpm_publication_api, - gen_object_with_cleanup, rpm_distribution_api, monitor_task, delete_orphans_pre, + rpm_distribution_factory, ): """Test the "complex" fixture that covers more of the metadata cases. @@ -250,8 +245,7 @@ def test_complex_repo_core_metadata( publication_href = created_resources[0] # distribute - body = gen_distribution(publication=publication_href) - distribution = gen_object_with_cleanup(rpm_distribution_api, body) + distribution = rpm_distribution_factory(publication=publication_href) # Download and parse the metadata. original_repomd = ElementTree.fromstring( @@ -412,11 +406,13 @@ def _compare_xml_metadata_file(original_metadata_text, generated_metadata_text, @pytest.mark.parametrize("mirror", [True, False], ids=["mirror", "standard"]) def test_distribution_tree_metadata_publish( mirror, - gen_object_with_cleanup, rpm_repository_api, rpm_rpmremote_api, rpm_distribution_api, monitor_task, + rpm_rpmremote_factory, + rpm_repository_factory, + rpm_distribution_factory, ): """Test the "complex" fixture that covers more of the metadata cases. @@ -427,10 +423,8 @@ def test_distribution_tree_metadata_publish( from configparser import ConfigParser # 1. create repo and remote - repo = gen_object_with_cleanup(rpm_repository_api, gen_repo(autopublish=not mirror)) - - body = gen_rpm_remote(RPM_KICKSTART_FIXTURE_URL, policy="on_demand") - remote = gen_object_with_cleanup(rpm_rpmremote_api, body) + repo = rpm_repository_factory(autopublish=not mirror) + remote = rpm_rpmremote_factory(url=RPM_KICKSTART_FIXTURE_URL, policy="on_demand") # 2, 3. Sync and publish repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href, mirror=mirror) @@ -440,8 +434,7 @@ def test_distribution_tree_metadata_publish( publication_href = [r for r in created_resources if "publication" in r][0] - body = gen_distribution(publication=publication_href) - distribution = gen_object_with_cleanup(rpm_distribution_api, body) + distribution = rpm_distribution_factory(publication=publication_href) # 4. Download and parse the metadata. original_treeinfo = requests.get(os.path.join(RPM_KICKSTART_FIXTURE_URL, ".treeinfo")).text @@ -502,9 +495,9 @@ def test_distribution_tree_metadata_publish( def get_checksum_types( init_and_sync, rpm_publication_api, - gen_object_with_cleanup, rpm_distribution_api, monitor_task, + rpm_distribution_factory, ): """Sync and publish an RPM repository.""" @@ -527,8 +520,7 @@ def _get_checksum_types(**kwargs): created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] - body = gen_distribution(publication=publication_href) - distribution = gen_object_with_cleanup(rpm_distribution_api, body) + distribution = rpm_distribution_factory(publication=publication_href) repomd = ElementTree.fromstring( requests.get(os.path.join(distribution.base_url, "repodata/repomd.xml")).text @@ -755,10 +747,10 @@ def test_directory_layout_distribute_with_treeinfo(generate_distribution): @pytest.fixture(scope="class") def generate_distribution( init_and_sync, - gen_object_with_cleanup, rpm_distribution_api, rpm_publication_api, monitor_task, + rpm_distribution_factory, ): def _generate_distribution(url=None): """Sync and publish an RPM repository. @@ -783,9 +775,8 @@ def _generate_distribution(url=None): created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] - body = gen_distribution(publication=publication_href) - distribution = gen_object_with_cleanup(rpm_distribution_api, body) + distribution = rpm_distribution_factory(publication=publication_href) - return distribution.to_dict()["base_url"] + return distribution.base_url return _generate_distribution diff --git a/pulp_rpm/tests/functional/api/test_rbac_crud.py b/pulp_rpm/tests/functional/api/test_rbac_crud.py index a8347fd75..3af0f7690 100644 --- a/pulp_rpm/tests/functional/api/test_rbac_crud.py +++ b/pulp_rpm/tests/functional/api/test_rbac_crud.py @@ -4,7 +4,6 @@ from pulpcore.client.pulp_rpm import RpmRepositorySyncURL from pulpcore.client.pulp_rpm.exceptions import ApiException -from pulp_rpm.tests.functional.utils import gen_rpm_remote from pulp_rpm.tests.functional.constants import ( RPM_SIGNED_FIXTURE_URL, RPM_UNSIGNED_FIXTURE_URL, @@ -52,14 +51,13 @@ def test_rbac_repositories(gen_user, rpm_repository_factory, rpm_repository_api, with user_creator: repo_data = repo.to_dict() repo_data.update(name="rpm_repo_test_modify") - response = rpm_repository_api.update(repo_data["pulp_href"], repo_data) - monitor_task(response.task) + monitor_task(rpm_repository_api.update(repo.pulp_href, repo_data).task) assert rpm_repository_api.read(repo.pulp_href).name == "rpm_repo_test_modify" with user_no, pytest.raises(ApiException) as exc: repo_data = repo.to_dict() repo_data.update(name="rpm_repo_test_modify_without_perms") - rpm_repository_api.update(repo_data["pulp_href"], repo_data) + rpm_repository_api.update(repo.pulp_href, repo_data) # Here is response `404` as user doesn't have even permission to retrieve repo data # so pulp response with not found instead `access denied` to not expose it exists assert exc.value.status == 404 @@ -67,7 +65,7 @@ def test_rbac_repositories(gen_user, rpm_repository_factory, rpm_repository_api, with user_viewer, pytest.raises(ApiException) as exc: repo_data = repo.to_dict() repo_data.update(name="rpm_repo_test_modify_with_view_perms") - rpm_repository_api.update(repo_data["pulp_href"], repo_data) + rpm_repository_api.update(repo.pulp_href, repo_data) # Fails with '403' as a repo can be seen but not updated. assert exc.value.status == 403 @@ -89,7 +87,12 @@ def test_rbac_repositories(gen_user, rpm_repository_factory, rpm_repository_api, @pytest.mark.parallel def test_rbac_remotes_and_sync( - gen_user, rpm_rpmremote_api, rpm_repository_api, rpm_repository_factory, monitor_task + gen_user, + rpm_rpmremote_api, + rpm_repository_api, + rpm_repository_factory, + rpm_rpmremote_factory, + monitor_task, ): """ Test creation of remotes with user with permissions and without. @@ -106,23 +109,23 @@ def test_rbac_remotes_and_sync( user_no = gen_user(model_roles=["rpm.rpmrepository_creator"]) remote = None - remote_data = gen_rpm_remote(RPM_SIGNED_FIXTURE_URL) # Create with user_no, pytest.raises(ApiException) as exc: - rpm_rpmremote_api.create(remote_data) + rpm_rpmremote_factory(url=RPM_SIGNED_FIXTURE_URL) assert exc.value.status == 403 with user_viewer, pytest.raises(ApiException) as exc: - rpm_rpmremote_api.create(remote_data) + rpm_rpmremote_factory(url=RPM_SIGNED_FIXTURE_URL) assert exc.value.status == 403 with user_creator: - remote = rpm_rpmremote_api.create(remote_data) + remote = rpm_rpmremote_factory(url=RPM_SIGNED_FIXTURE_URL) assert rpm_rpmremote_api.list(name=remote.name).count == 1 # Update - remote_data_update = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL) + remote_data_update = remote.to_dict() + remote_data_update["url"] = RPM_UNSIGNED_FIXTURE_URL with user_no, pytest.raises(ApiException) as exc: rpm_rpmremote_api.update(remote.pulp_href, remote_data_update) @@ -181,7 +184,7 @@ def test_rbac_remotes_and_sync( @pytest.mark.parallel -def test_rbac_acs(gen_user, rpm_acs_api, rpm_rpmremote_api, monitor_task): +def test_rbac_acs(gen_user, rpm_acs_api, rpm_rpmremote_api, rpm_rpmremote_factory, monitor_task): """Test RPM ACS CRUD.""" user_creator = gen_user( model_roles=[ @@ -202,8 +205,7 @@ def test_rbac_acs(gen_user, rpm_acs_api, rpm_rpmremote_api, monitor_task): ) acs = None - remote_data = gen_rpm_remote(policy="on_demand") - remote = rpm_rpmremote_api.create(remote_data) + remote = rpm_rpmremote_factory(policy="on_demand") acs_data = { "name": str(uuid.uuid4()), @@ -229,15 +231,15 @@ def test_rbac_acs(gen_user, rpm_acs_api, rpm_rpmremote_api, monitor_task): assert exc.value.status == 404 with user_viewer, pytest.raises(ApiException) as exc: - acs_to_update = rpm_acs_api.read(acs.pulp_href) - acs_to_update.paths[0] = "files/" - rpm_acs_api.update(acs_to_update.pulp_href, acs_to_update) + acs_to_update = rpm_acs_api.read(acs.pulp_href).to_dict() + acs_to_update["paths"][0] = "files/" + rpm_acs_api.update(acs.pulp_href, acs_to_update) assert exc.value.status == 403 with user_creator: - acs_to_update = rpm_acs_api.read(acs.pulp_href) - acs_to_update.paths[0] = "files/" - response = rpm_acs_api.update(acs_to_update.pulp_href, acs_to_update) + acs_to_update = rpm_acs_api.read(acs.pulp_href).to_dict() + acs_to_update["paths"][0] = "files/" + response = rpm_acs_api.update(acs.pulp_href, acs_to_update) monitor_task(response.task) assert rpm_acs_api.list(name=acs.name).count == 1 assert "files/" in rpm_acs_api.read(acs.pulp_href).paths @@ -359,9 +361,9 @@ def test_rbac_distribution( assert distribution.publication == publication.pulp_href # Update - dist_data_to_update = rpm_distribution_api.read(distribution.pulp_href) + dist_data_to_update = rpm_distribution_api.read(distribution.pulp_href).to_dict() new_name = str(uuid.uuid4()) - dist_data_to_update.name = new_name + dist_data_to_update["name"] = new_name with user_no, pytest.raises(ApiException) as exc: rpm_distribution_api.update(distribution.pulp_href, dist_data_to_update) @@ -407,6 +409,7 @@ def test_rbac_content_scoping( rpm_repository_api, rpm_repository_factory, rpm_rpmremote_api, + rpm_rpmremote_factory, monitor_task, ): """ @@ -424,11 +427,10 @@ def test_rbac_content_scoping( user_no = gen_user(model_roles=["rpm.rpmrepository_creator"]) remote = None - remote_data = gen_rpm_remote(RPM_SIGNED_FIXTURE_URL) # Create with user_creator: - remote = rpm_rpmremote_api.create(remote_data) + remote = rpm_rpmremote_factory(url=RPM_SIGNED_FIXTURE_URL) assert rpm_rpmremote_api.list().count == 1 # Sync the remote @@ -442,25 +444,25 @@ def test_rbac_content_scoping( def _assert_listed_content(): packages_count = rpm_package_api.list(repository_version=repo.latest_version_href).count - assert RPM_FIXTURE_SUMMARY["rpm.package"] == packages_count + assert RPM_FIXTURE_SUMMARY["rpm.package"]["count"] == packages_count advisories_count = rpm_advisory_api.list(repository_version=repo.latest_version_href).count - assert RPM_FIXTURE_SUMMARY["rpm.advisory"] == advisories_count + assert RPM_FIXTURE_SUMMARY["rpm.advisory"]["count"] == advisories_count package_categories_count = rpm_package_category_api.list( repository_version=repo.latest_version_href ).count - assert RPM_FIXTURE_SUMMARY["rpm.packagecategory"] == package_categories_count + assert RPM_FIXTURE_SUMMARY["rpm.packagecategory"]["count"] == package_categories_count package_groups_count = rpm_package_groups_api.list( repository_version=repo.latest_version_href ).count - assert RPM_FIXTURE_SUMMARY["rpm.packagegroup"] == package_groups_count + assert RPM_FIXTURE_SUMMARY["rpm.packagegroup"]["count"] == package_groups_count package_lang_packs_count = rpm_package_lang_packs_api.list( repository_version=repo.latest_version_href ).count - assert RPM_FIXTURE_SUMMARY["rpm.packagelangpacks"] == package_lang_packs_count + assert RPM_FIXTURE_SUMMARY["rpm.packagelangpacks"]["count"] == package_lang_packs_count # Test content visibility # TODO: modules diff --git a/pulp_rpm/tests/functional/api/test_retention_policy.py b/pulp_rpm/tests/functional/api/test_retention_policy.py index 70e32f48f..41ba36002 100644 --- a/pulp_rpm/tests/functional/api/test_retention_policy.py +++ b/pulp_rpm/tests/functional/api/test_retention_policy.py @@ -22,6 +22,7 @@ def test_sync_with_retention( rpm_repository_version_api, rpm_package_api, monitor_task, + get_content_summary, ): """Verify functionality with sync. @@ -38,13 +39,12 @@ def test_sync_with_retention( 7. Assert the repository version we end with has only one version of each package. """ repo, remote, task = init_and_sync(policy="on_demand", optimize=False, return_task=True) + summary = get_content_summary(repo) # Test that, by default, everything is retained / nothing is tossed out. - version = rpm_repository_version_api.read(repo.latest_version_href) - present = {k: v["count"] for k, v in version.content_summary.present.items()} - added = {k: v["count"] for k, v in version.content_summary.added.items()} - assert present == RPM_FIXTURE_SUMMARY - assert added == RPM_FIXTURE_SUMMARY + assert summary["present"] == RPM_FIXTURE_SUMMARY + assert summary["added"] == RPM_FIXTURE_SUMMARY + # Test that the # of packages processed is correct reports = get_progress_reports_by_code(task) assert reports["sync.parsing.packages"].total == RPM_PACKAGE_COUNT @@ -82,6 +82,7 @@ def test_sync_with_retention_and_modules( rpm_repository_api, rpm_repository_version_api, monitor_task, + get_content_summary, ): """Verify functionality with sync. @@ -105,11 +106,9 @@ def test_sync_with_retention_and_modules( ) # Test that, by default, everything is retained / nothing is tossed out. - version = rpm_repository_version_api.read(repo.latest_version_href) - present = {k: v["count"] for k, v in version.content_summary.present.items()} - added = {k: v["count"] for k, v in version.content_summary.added.items()} - assert present == RPM_MODULAR_STATIC_FIXTURE_SUMMARY - assert added == RPM_MODULAR_STATIC_FIXTURE_SUMMARY + summary = get_content_summary(repo) + assert summary["present"] == RPM_MODULAR_STATIC_FIXTURE_SUMMARY + assert summary["added"] == RPM_MODULAR_STATIC_FIXTURE_SUMMARY # Test that the # of packages processed is correct reports = get_progress_reports_by_code(task) assert reports["sync.parsing.packages"].total == RPM_MODULAR_PACKAGE_COUNT diff --git a/pulp_rpm/tests/functional/api/test_sync.py b/pulp_rpm/tests/functional/api/test_sync.py index 103606df3..034d2a8ef 100644 --- a/pulp_rpm/tests/functional/api/test_sync.py +++ b/pulp_rpm/tests/functional/api/test_sync.py @@ -9,15 +9,6 @@ from django.utils.dateparse import parse_datetime from pulpcore.tests.functional.utils import PulpTaskError -from pulp_smash.pulp3.utils import ( - gen_repo, - get_added_content_summary, - get_added_content, - get_content, - get_content_summary, - get_removed_content, - wget_download_on_host, -) from pulp_rpm.tests.functional.constants import ( AMAZON_MIRROR, @@ -71,55 +62,51 @@ RPM_MODULEMDS_DATA, RPM_ZSTD_METADATA_FIXTURE_URL, ) -from pulp_rpm.tests.functional.utils import gen_rpm_remote -from pulp_rpm.tests.functional.utils import set_up_module as setUpModule # noqa:F401 - from pulpcore.client.pulp_rpm import RpmRepositorySyncURL from pulpcore.client.pulp_rpm.exceptions import ApiException @pytest.mark.parallel -def test_sync(init_and_sync): +def test_sync(init_and_sync, get_content_summary): """Sync repositories with the rpm plugin.""" # Create a remote (default) and empty repository + # Then Assert that it's synced properly repository, remote = init_and_sync() - - # Assert that it's synced properly - latest_version_href = repository.latest_version_href - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY - assert get_added_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY + original_latest_version_href = repository.latest_version_href + assert get_content_summary(repository)["present"] == RPM_FIXTURE_SUMMARY # Sync the same repository again + # Then assert that the repository has not changed, the latest version stays the same repository, _ = init_and_sync(repository=repository, remote=remote) - - # Assert that the repository has not changed, the latest version stays the same - assert latest_version_href == repository.latest_version_href - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY + assert original_latest_version_href == repository.latest_version_href + assert get_content_summary(repository)["present"] == RPM_FIXTURE_SUMMARY @pytest.mark.parallel -def test_sync_zstd(init_and_sync): +def test_sync_zstd(init_and_sync, get_content_summary): """Test syncing non-gzip metadata.""" repository, _ = init_and_sync(url=RPM_ZSTD_METADATA_FIXTURE_URL) - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY - assert get_added_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_FIXTURE_SUMMARY @pytest.mark.parallel -def test_sync_local(init_and_sync, tmpdir): +def test_sync_local(init_and_sync, tmpdir, wget_recursive_download_on_host): """Test syncing from the local filesystem.""" - wget_download_on_host(RPM_UNSIGNED_FIXTURE_URL, str(tmpdir)) + wget_recursive_download_on_host(RPM_UNSIGNED_FIXTURE_URL, str(tmpdir)) init_and_sync(url=f"file://{tmpdir}/rpm-unsigned/") @pytest.mark.parallel -def test_sync_from_valid_mirror_list_feed(init_and_sync): +def test_sync_from_valid_mirror_list_feed(init_and_sync, get_content_summary): """Sync RPM content from a mirror list feed which contains a valid remote URL.""" repository, _ = init_and_sync(url=RPM_MIRROR_LIST_GOOD_FIXTURE_URL) - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY - assert get_added_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_FIXTURE_SUMMARY @pytest.mark.parallel @@ -134,20 +121,21 @@ def test_sync_from_invalid_mirror_list_feed(init_and_sync): with pytest.raises(PulpTaskError) as exc: init_and_sync(url=RPM_MIRROR_LIST_BAD_FIXTURE_URL) - assert "An invalid remote URL was provided" in exc.value.task.to_dict()["error"]["description"] + assert "An invalid remote URL was provided" in exc.value.task.error["description"] @pytest.mark.parallel -def test_sync_modular(init_and_sync): +def test_sync_modular(init_and_sync, get_content_summary): """Sync RPM modular content.""" repository, _ = init_and_sync(url=RPM_MODULAR_FIXTURE_URL) - assert get_content_summary(repository.to_dict()) == RPM_MODULAR_FIXTURE_SUMMARY - assert get_added_content_summary(repository.to_dict()) == RPM_MODULAR_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_MODULAR_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_MODULAR_FIXTURE_SUMMARY @pytest.mark.parallel -def test_checksum_constraint(init_and_sync): +def test_checksum_constraint(init_and_sync, get_content_summary): """Verify checksum constraint test case. Do the following: @@ -161,14 +149,14 @@ def test_checksum_constraint(init_and_sync): """ for url in [RPM_REFERENCES_UPDATEINFO_URL, RPM_UNSIGNED_FIXTURE_URL]: repository, _ = init_and_sync(url=url) - - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY - assert get_added_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_FIXTURE_SUMMARY @pytest.mark.parallel @pytest.mark.parametrize("policy", ["on_demand", "immediate"]) -def test_kickstart(policy, init_and_sync, rpm_content_distribution_trees_api): +def test_kickstart(policy, init_and_sync, rpm_content_distribution_trees_api, get_content_summary): """Sync repositories with the rpm plugin. Do the following: @@ -182,14 +170,14 @@ def test_kickstart(policy, init_and_sync, rpm_content_distribution_trees_api): 6. Assert that the same number of packages are present. """ repository, remote = init_and_sync(url=RPM_KICKSTART_FIXTURE_URL, policy=policy) - latest_version_href = repository.latest_version_href - assert get_content_summary(repository.to_dict()) == RPM_KICKSTART_FIXTURE_SUMMARY - assert get_added_content_summary(repository.to_dict()) == RPM_KICKSTART_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_KICKSTART_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_KICKSTART_FIXTURE_SUMMARY repository, _ = init_and_sync(repository=repository, remote=remote) - - assert get_content_summary(repository.to_dict()) == RPM_KICKSTART_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_KICKSTART_FIXTURE_SUMMARY assert latest_version_href == repository.latest_version_href distribution_tree = rpm_content_distribution_trees_api.list( @@ -198,7 +186,7 @@ def test_kickstart(policy, init_and_sync, rpm_content_distribution_trees_api): assert "RHEL" == distribution_tree.release_short -def test_mutated_packages(init_and_sync): +def test_mutated_packages(init_and_sync, get_content_summary, get_content): """Sync two copies of the same packages. Make sure we end up with only one copy. @@ -216,9 +204,9 @@ def test_mutated_packages(init_and_sync): 7. Assert that the packages have changed since the last sync. """ repository, _ = init_and_sync(url=RPM_UNSIGNED_FIXTURE_URL) - - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY - assert get_added_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_FIXTURE_SUMMARY # Save the copy of the original packages. original_packages = { @@ -229,21 +217,18 @@ def test_mutated_packages(init_and_sync): content["release"], content["arch"], ): content - for content in get_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME] + for content in get_content(repository)["present"][RPM_PACKAGE_CONTENT_NAME] } # Create a remote with a different test fixture with the same NEVRA but # different digests. repository, _ = init_and_sync(repository=repository, url=RPM_SIGNED_FIXTURE_URL) + content_summary = get_content_summary(repository) # In case of "duplicates" the most recent one is chosen, so the old # package is removed from and the new one is added to a repo version. - assert ( - len(get_added_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME]) - ) == RPM_PACKAGE_COUNT - assert ( - len(get_removed_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME]) - ) == RPM_PACKAGE_COUNT + assert (content_summary["added"][RPM_PACKAGE_CONTENT_NAME]["count"]) == RPM_PACKAGE_COUNT + assert (content_summary["removed"][RPM_PACKAGE_CONTENT_NAME]["count"]) == RPM_PACKAGE_COUNT # Test that the packages have been modified. mutated_packages = { @@ -254,14 +239,14 @@ def test_mutated_packages(init_and_sync): content["release"], content["arch"], ): content - for content in get_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME] + for content in get_content(repository)["present"][RPM_PACKAGE_CONTENT_NAME] } for nevra in original_packages: assert original_packages[nevra]["pkgId"] != mutated_packages[nevra]["pkgId"] -def test_sync_diff_checksum_packages(init_and_sync): +def test_sync_diff_checksum_packages(init_and_sync, get_content, get_content_summary): """Sync two fixture content with same NEVRA and different checksum. Make sure we end up with the most recently synced content. @@ -277,9 +262,10 @@ def test_sync_diff_checksum_packages(init_and_sync): repository, _ = init_and_sync(url=RPM_UNSIGNED_FIXTURE_URL, policy="on_demand") repository, _ = init_and_sync(repository=repository, url=RPM_SHA512_FIXTURE_URL) + content_in_repo = get_content(repository) - added_content = get_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME] - removed_content = get_removed_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME] + added_content = content_in_repo["added"][RPM_PACKAGE_CONTENT_NAME] + removed_content = content_in_repo["removed"][RPM_PACKAGE_CONTENT_NAME] # In case of "duplicates" the most recent one is chosen, so the old # package is removed from and the new one is added to a repo version. @@ -293,7 +279,7 @@ def test_sync_diff_checksum_packages(init_and_sync): @pytest.mark.parallel -def test_mutated_advisory_metadata(init_and_sync): +def test_mutated_advisory_metadata(init_and_sync, get_content_summary, get_content): """Sync two copies of the same Advisory (only description is updated). Make sure we end up with only one copy. @@ -312,25 +298,25 @@ def test_mutated_advisory_metadata(init_and_sync): 7. Assert that the updaterecords have changed since the last sync. """ repository, _ = init_and_sync(url=RPM_UNSIGNED_FIXTURE_URL, policy="on_demand") - - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY - assert get_added_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY + assert content_summary["added"] == RPM_FIXTURE_SUMMARY original_updaterecords = { content["id"]: content - for content in get_content(repository.to_dict())[RPM_ADVISORY_CONTENT_NAME] + for content in get_content(repository)["present"][RPM_ADVISORY_CONTENT_NAME] } repository, _ = init_and_sync(repository=repository, url=RPM_UPDATED_UPDATEINFO_FIXTURE_URL) - - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY - assert len(get_added_content(repository.to_dict())[RPM_ADVISORY_CONTENT_NAME]) == 4 - assert len(get_removed_content(repository.to_dict())[RPM_ADVISORY_CONTENT_NAME]) == 4 + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY + assert content_summary["added"][RPM_ADVISORY_CONTENT_NAME]["count"] == 4 + assert content_summary["removed"][RPM_ADVISORY_CONTENT_NAME]["count"] == 4 # Test that the updateinfo have been modified. mutated_updaterecords = { content["id"]: content - for content in get_content(repository.to_dict())[RPM_ADVISORY_CONTENT_NAME] + for content in get_content(repository)["present"][RPM_ADVISORY_CONTENT_NAME] } assert mutated_updaterecords != original_updaterecords @@ -346,6 +332,9 @@ def test_optimize( rpm_repository_api, rpm_rpmremote_api, monitor_task, + get_content, + rpm_repository_factory, + rpm_rpmremote_factory, ): """Tests that sync is skipped when no critical parameters of the sync change. @@ -377,7 +366,7 @@ def test_optimize( # create a new repo version, sync again, assert not optimized repository = rpm_repository_api.read(repository.pulp_href) - content = choice(get_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME]) + content = choice(get_content(repository)["present"][RPM_PACKAGE_CONTENT_NAME]) response = rpm_repository_api.modify( repository.pulp_href, {"remove_content_units": [content["pulp_href"]]} ) @@ -401,7 +390,7 @@ def test_optimize( assert all(report.code != "sync.was_skipped" for report in task.progress_reports) # create new remote with the same URL and download_policy as the first and run a sync task - new_remote = gen_object_with_cleanup(rpm_rpmremote_api, gen_rpm_remote()) + new_remote = rpm_rpmremote_factory() repository_sync_data = RpmRepositorySyncURL( remote=new_remote.pulp_href, sync_policy="mirror_content_only" ) @@ -451,7 +440,7 @@ def test_optimize( @pytest.mark.parallel -def test_sync_advisory_new_version(init_and_sync): +def test_sync_advisory_new_version(init_and_sync, get_content): """Sync a repository and re-sync with newer version of Advisory. Test if advisory with same ID and pkglist, but newer version is updated. @@ -463,15 +452,16 @@ def test_sync_advisory_new_version(init_and_sync): repository, _ = init_and_sync(url=RPM_UNSIGNED_FIXTURE_URL) repository, _ = init_and_sync(repository=repository, url=RPM_ADVISORY_UPDATED_VERSION_URL) + repo_content = get_content(repository) # check if newer version advisory was added and older removed - added_advisories = get_added_content(repository.to_dict())[PULP_TYPE_ADVISORY] + added_advisories = repo_content["added"][PULP_TYPE_ADVISORY] added_advisory = [ advisory["version"] for advisory in added_advisories if advisory["id"] == RPM_ADVISORY_TEST_ID ] - removed_advisories = get_removed_content(repository.to_dict())[PULP_TYPE_ADVISORY] + removed_advisories = repo_content["removed"][PULP_TYPE_ADVISORY] removed_advisory = [ advisory["version"] for advisory in removed_advisories @@ -481,7 +471,7 @@ def test_sync_advisory_new_version(init_and_sync): @pytest.mark.parallel -def test_sync_advisory_old_version(init_and_sync): +def test_sync_advisory_old_version(init_and_sync, get_content): """Sync a repository and re-sync with older version of Advisory. Test if advisory with same ID and pkglist, but older version is not updated. @@ -496,7 +486,7 @@ def test_sync_advisory_old_version(init_and_sync): repository, _ = init_and_sync(repository=repository, url=RPM_UNSIGNED_FIXTURE_URL) repository_version_new = repository.latest_version_href - present_advisories = get_content(repository.to_dict())[PULP_TYPE_ADVISORY] + present_advisories = get_content(repository)["present"][PULP_TYPE_ADVISORY] advisory_version = [ advisory["version"] for advisory in present_advisories @@ -511,22 +501,23 @@ def test_sync_advisory_old_version(init_and_sync): @pytest.mark.parallel -def test_sync_merge_advisories(init_and_sync): +def test_sync_merge_advisories(init_and_sync, get_content): """Sync two advisories with same ID, version and different pkglist. Test if two advisories are merged. """ repository, _ = init_and_sync(url=RPM_UNSIGNED_FIXTURE_URL) repository, _ = init_and_sync(repository=repository, url=RPM_ADVISORY_DIFFERENT_PKGLIST_URL) + repo_content = get_content(repository) # check advisories were merged - added_advisories = get_added_content(repository.to_dict())[PULP_TYPE_ADVISORY] + added_advisories = repo_content["added"][PULP_TYPE_ADVISORY] added_advisory_pkglist = [ advisory["pkglist"] for advisory in added_advisories if advisory["id"] == RPM_ADVISORY_TEST_ID ] - removed_advisories = get_removed_content(repository.to_dict())[PULP_TYPE_ADVISORY] + removed_advisories = repo_content["removed"][PULP_TYPE_ADVISORY] removed_advisory_pkglist = [ advisory["pkglist"] for advisory in removed_advisories @@ -569,7 +560,7 @@ def test_sync_advisory_diff_repo(init_and_sync): "ALLOW_AUTOMATIC_UNSAFE_ADVISORY_CONFLICT_RESOLUTION = True (q.v.) " "in your configuration. Advisory id: {}".format(RPM_ADVISORY_TEST_ID) ) - assert error_msg in exc.value.task.to_dict()["error"]["description"] + assert error_msg in exc.value.task.error["description"] @pytest.mark.parallel @@ -607,7 +598,7 @@ def test_sync_advisory_incomplete_pgk_list(init_and_sync): "At least one of the advisories is wrong. " "Advisory id: {}".format(RPM_ADVISORY_TEST_ID) ) - assert error_msg in exc.value.task.to_dict()["error"]["description"] + assert error_msg in exc.value.task.error["description"] @pytest.mark.parallel @@ -647,19 +638,21 @@ def test_sync_advisory_no_updated_date(init_and_sync): @pytest.mark.parallel -def test_sync_advisory_updated_update_date(init_and_sync): +def test_sync_advisory_updated_update_date(init_and_sync, get_content): """Test sync advisory with updated update_date.""" repository, _ = init_and_sync(url=RPM_UNSIGNED_FIXTURE_URL) repository, _ = init_and_sync(repository=repository, url=RPM_UPDATED_UPDATEINFO_FIXTURE_URL) + content_summary = get_content(repository) + # check advisories were merged added_advisory_date = [ advisory["updated_date"] - for advisory in get_added_content(repository.to_dict())[PULP_TYPE_ADVISORY] + for advisory in content_summary["added"][PULP_TYPE_ADVISORY] if RPM_ADVISORY_TEST_ID_NEW in advisory["id"] ] removed_advisory_date = [ advisory["updated_date"] - for advisory in get_removed_content(repository.to_dict())[PULP_TYPE_ADVISORY] + for advisory in content_summary["removed"][PULP_TYPE_ADVISORY] if RPM_ADVISORY_TEST_ID_NEW in advisory["id"] ] @@ -667,24 +660,24 @@ def test_sync_advisory_updated_update_date(init_and_sync): @pytest.mark.parallel -def test_sync_advisory_older_update_date(init_and_sync): +def test_sync_advisory_older_update_date(init_and_sync, get_content): """Test sync advisory with older update_date.""" repository, _ = init_and_sync(url=RPM_UPDATED_UPDATEINFO_FIXTURE_URL) + advisory_date = [ advisory["updated_date"] - for advisory in get_content(repository.to_dict())[PULP_TYPE_ADVISORY] + for advisory in get_content(repository)["present"][PULP_TYPE_ADVISORY] if advisory["id"] == RPM_ADVISORY_TEST_ID ] repository, _ = init_and_sync(repository, url=RPM_UNSIGNED_FIXTURE_URL) + repo_content = get_content(repository) advisory_date_new = [ advisory["updated_date"] - for advisory in get_content(repository.to_dict())[PULP_TYPE_ADVISORY] + for advisory in repo_content["present"][PULP_TYPE_ADVISORY] if advisory["id"] == RPM_ADVISORY_TEST_ID ] - added_advisories = [ - advisory["id"] for advisory in get_added_content(repository.to_dict())[PULP_TYPE_ADVISORY] - ] + added_advisories = [advisory["id"] for advisory in repo_content["added"][PULP_TYPE_ADVISORY]] # check if advisory is preserved and no advisory with same id was added assert parse_datetime(advisory_date[0]) == parse_datetime(advisory_date_new[0]) @@ -692,27 +685,28 @@ def test_sync_advisory_older_update_date(init_and_sync): @pytest.mark.parallel -def test_sync_repo_metadata_change(init_and_sync): +def test_sync_repo_metadata_change(init_and_sync, get_content): """Sync RPM modular content.""" repository, _ = init_and_sync(url=RPM_CUSTOM_REPO_METADATA_FIXTURE_URL) repository, _ = init_and_sync( repository=repository, url=RPM_CUSTOM_REPO_METADATA_CHANGED_FIXTURE_URL ) + repo_content = get_content(repository) # Check if repository was updated with repository metadata assert repository.latest_version_href.rstrip("/")[-1] == "2" - assert PULP_TYPE_REPOMETADATA in get_added_content(repository.to_dict()) + assert PULP_TYPE_REPOMETADATA in repo_content["added"] @pytest.mark.parallel -def test_sync_modular_static_context(init_and_sync): +def test_sync_modular_static_context(init_and_sync, get_content_summary, get_content): """Sync RPM modular content that includes the new static_context_field.""" repository, _ = init_and_sync(url=RPM_MODULES_STATIC_CONTEXT_FIXTURE_URL) + content_summary = get_content_summary(repository) + summary = content_summary["present"] + added = content_summary["added"] - summary = get_content_summary(repository.to_dict()) - added = get_added_content_summary(repository.to_dict()) - - modules = get_content(repository.to_dict())[PULP_TYPE_MODULEMD] + modules = get_content(repository)["present"][PULP_TYPE_MODULEMD] module_static_contexts = [ (module["name"], module["version"]) for module in modules if module["static_context"] ] @@ -723,14 +717,14 @@ def test_sync_modular_static_context(init_and_sync): @pytest.mark.parallel @pytest.mark.parametrize("sync_policy", ["mirror_content_only", "additive"]) -def test_sync_skip_srpm(init_and_sync, sync_policy): +def test_sync_skip_srpm(init_and_sync, sync_policy, get_content): """In mirror_content_only mode, skip_types is allowed.""" repository, _ = init_and_sync( url=SRPM_UNSIGNED_FIXTURE_URL, skip_types=["srpm"], sync_policy=sync_policy ) - - present_package_count = len(get_content(repository.to_dict())[PULP_TYPE_PACKAGE]) - present_advisory_count = len(get_content(repository.to_dict())[PULP_TYPE_ADVISORY]) + present_repo_content = get_content(repository)["present"] + present_package_count = len(present_repo_content[PULP_TYPE_PACKAGE]) + present_advisory_count = len(present_repo_content[PULP_TYPE_ADVISORY]) assert present_package_count == 0 assert present_advisory_count == SRPM_UNSIGNED_FIXTURE_ADVISORY_COUNT @@ -773,7 +767,7 @@ def test_invalid_url(init_and_sync): with pytest.raises(PulpTaskError) as exc: init_and_sync(url="http://i-am-an-invalid-url.com/invalid/") - assert exc.value.task.to_dict()["error"]["description"] is not None + assert exc.value.task.error["description"] is not None @pytest.mark.parallel @@ -783,7 +777,7 @@ def test_invalid_rpm_content(init_and_sync): init_and_sync(url=RPM_INVALID_FIXTURE_URL) for key in ("missing", "filelists.xml"): - assert key in exc.value.task.to_dict()["error"]["description"] + assert key in exc.value.task.error["description"] @pytest.mark.parallel @@ -804,7 +798,7 @@ def test_sync_metadata_with_unsupported_checksum_type(init_and_sync): assert ( "does not contain at least one trusted hasher which " "is specified in the 'ALLOWED_CONTENT_CHECKSUMS'" - ) in exc.value.task.to_dict()["error"]["description"] + ) in exc.value.task.error["description"] @pytest.mark.parallel @@ -828,7 +822,7 @@ def test_sync_packages_with_unsupported_checksum_type(init_and_sync): with pytest.raises(PulpTaskError) as exc: init_and_sync(url="https://fixtures.com/packages_with_unsupported_checksum") - error_description = exc.value.task.to_dict()["error"]["description"] + error_description = exc.value.task.error["description"] assert "rpm-with-md5/bear-4.1.noarch.rpm contains forbidden checksum type" in error_description @@ -838,7 +832,7 @@ def test_complete_mirror_with_xml_base_fails(init_and_sync): with pytest.raises(PulpTaskError) as exc: init_and_sync(url=REPO_WITH_XML_BASE_URL, sync_policy="mirror_complete") - error_description = exc.value.task.to_dict()["error"]["description"] + error_description = exc.value.task.error["description"] assert "features which are incompatible with 'mirror' sync" in error_description @@ -857,7 +851,7 @@ def test_complete_mirror_with_external_location_href_fails(init_and_sync): url="https://fixtures.com/repo_with_external_data", sync_policy="mirror_complete" ) - error_description = exc.value.task.to_dict()["error"]["description"] + error_description = exc.value.task.error["description"] assert "features which are incompatible with 'mirror' sync" in error_description @@ -875,7 +869,7 @@ def test_complete_mirror_with_delta_metadata_fails(init_and_sync): pass # init_and_sync(url=DRPM_UNSIGNED_FIXTURE_URL, sync_policy="mirror_complete") - error_description = exc.value.task.to_dict()["error"]["description"] + error_description = exc.value.task.error["description"] assert "features which are incompatible with 'mirror' sync" in error_description @@ -884,14 +878,14 @@ def test_mirror_and_sync_policy_provided_simultaneously_fails( gen_object_with_cleanup, rpm_repository_api, rpm_rpmremote_api, + rpm_repository_factory, + rpm_rpmremote_factory, ): """ Test that syncing fails if both the "mirror" and "sync_policy" params are provided. """ - repository = gen_object_with_cleanup(rpm_repository_api, gen_repo()) - remote = gen_object_with_cleanup( - rpm_rpmremote_api, gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL, policy="on_demand") - ) + repository = rpm_repository_factory() + remote = rpm_rpmremote_factory(url=RPM_UNSIGNED_FIXTURE_URL, policy="on_demand") repository_sync_data = RpmRepositorySyncURL( remote=remote.pulp_href, sync_policy="mirror_complete", mirror=True @@ -911,7 +905,7 @@ def test_sync_skip_srpm_fails_mirror_complete(init_and_sync): @pytest.mark.parallel -def test_core_metadata(init_and_sync, rpm_package_api): +def test_core_metadata(init_and_sync, rpm_package_api, get_content): """Test that the metadata returned by the Pulp API post-sync matches what we expect. Do the following: @@ -925,11 +919,11 @@ def test_core_metadata(init_and_sync, rpm_package_api): package = rpm_package_api.list( name=RPM_COMPLEX_PACKAGE_DATA["name"], repository_version=repository.latest_version_href ).results[0] - package = package.to_dict() + package_dict = package.model_dump() # sort file and changelog metadata - package["changelogs"].sort(reverse=True) - for metadata in [package, RPM_COMPLEX_PACKAGE_DATA]: + package_dict["changelogs"].sort(reverse=True) + for metadata in [package_dict, RPM_COMPLEX_PACKAGE_DATA]: # the list-of-lists can't be sorted easily so we produce a string representation files = [] for f in metadata["files"]: @@ -942,14 +936,14 @@ def test_core_metadata(init_and_sync, rpm_package_api): # TODO: figure out how to un-ignore "time_file" without breaking the tests diff = dictdiffer.diff( - package, + package_dict, RPM_COMPLEX_PACKAGE_DATA, ignore={"time_file", "pulp_created", "pulp_last_updated", "pulp_href", "prn"}, ) assert list(diff) == [], list(diff) # assert no package is marked modular - for pkg in get_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME]: + for pkg in get_content(repository)["present"][RPM_PACKAGE_CONTENT_NAME]: assert pkg["is_modular"] is False @@ -969,9 +963,6 @@ def test_treeinfo_metadata(init_and_sync, rpm_content_distribution_trees_api): repository_version=repository.latest_version_href ).results[0] distribution_tree = distribution_tree.to_dict() - # delete pulp-specific metadata - distribution_tree.pop("pulp_href") - distribution_tree.pop("prn") # sort kickstart metadata so that we can compare the dicts properly for d in [distribution_tree, RPM_KICKSTART_DATA]: @@ -993,6 +984,7 @@ def test_modular_metadata( rpm_modulemd_defaults_api, rpm_modulemd_obsoletes_api, delete_orphans_pre, + get_content, ): """Test that the metadata returned by the Pulp API post-sync matches what we expect. @@ -1005,17 +997,17 @@ def test_modular_metadata( repository, _ = init_and_sync(url=RPM_MODULAR_FIXTURE_URL, policy="on_demand") modules = [ - md.to_dict() + md.model_dump() for md in rpm_modulemd_api.list(repository_version=repository.latest_version_href).results ] module_defaults = [ - md.to_dict() + md.model_dump() for md in rpm_modulemd_defaults_api.list( repository_version=repository.latest_version_href ).results ] module_obsoletes = [ - md.to_dict() + md.model_dump() for md in rpm_modulemd_obsoletes_api.list( repository_version=repository.latest_version_href ).results @@ -1057,12 +1049,12 @@ def module_obsolete_key(m): assert list(diff) == [], list(diff) # assert all package from modular repo is marked as modular - for pkg in get_content(repository.to_dict())[RPM_PACKAGE_CONTENT_NAME]: + for pkg in get_content(repository)["present"][RPM_PACKAGE_CONTENT_NAME]: assert pkg["is_modular"] is True @pytest.mark.parallel -def test_additive_mode(init_and_sync): +def test_additive_mode(init_and_sync, get_content): """Test of additive mode. 1. Create repository, remote and sync it @@ -1077,8 +1069,8 @@ def test_additive_mode(init_and_sync): sync_policy="additive", ) - present_package_count = len(get_content(repository.to_dict())[PULP_TYPE_PACKAGE]) - present_advisory_count = len(get_content(repository.to_dict())[PULP_TYPE_ADVISORY]) + present_package_count = len(get_content(repository)["present"][PULP_TYPE_PACKAGE]) + present_advisory_count = len(get_content(repository)["present"][PULP_TYPE_ADVISORY]) assert (RPM_PACKAGE_COUNT + SRPM_UNSIGNED_FIXTURE_PACKAGE_COUNT) == present_package_count assert (RPM_ADVISORY_COUNT + SRPM_UNSIGNED_FIXTURE_ADVISORY_COUNT) == present_advisory_count @@ -1086,7 +1078,7 @@ def test_additive_mode(init_and_sync): @pytest.mark.parallel @pytest.mark.parametrize("sync_policy", ["mirror_complete", "mirror_content_only"]) -def test_mirror_mode(sync_policy, init_and_sync, rpm_publication_api): +def test_mirror_mode(sync_policy, init_and_sync, rpm_publication_api, get_content_summary): """Test of mirror mode.""" repository, remote = init_and_sync(url=SRPM_UNSIGNED_FIXTURE_URL, policy="on_demand") @@ -1099,7 +1091,8 @@ def test_mirror_mode(sync_policy, init_and_sync, rpm_publication_api): # check that one publication was created w/ no repository versions # and only the new content is present - assert get_content_summary(repository.to_dict()) == RPM_FIXTURE_SUMMARY + content_summary = get_content_summary(repository) + assert content_summary["present"] == RPM_FIXTURE_SUMMARY assert repository.latest_version_href == f"{repository.pulp_href}versions/2/" if sync_policy == "mirror_complete": diff --git a/pulp_rpm/tests/functional/conftest.py b/pulp_rpm/tests/functional/conftest.py index 60cec36b4..46765c77e 100644 --- a/pulp_rpm/tests/functional/conftest.py +++ b/pulp_rpm/tests/functional/conftest.py @@ -118,13 +118,13 @@ def signed_artifact(pulpcore_bindings, tmp_path): sha256=hashlib.sha256(data).hexdigest(), limit=1 ) try: - return artifacts.results[0].to_dict() + return artifacts.results[0] except IndexError: pass temp_file = tmp_path / str(uuid.uuid4()) temp_file.write_bytes(data) - return pulpcore_bindings.ArtifactsApi.create(temp_file).to_dict() + return pulpcore_bindings.ArtifactsApi.create(str(temp_file)) @pytest.fixture @@ -139,7 +139,7 @@ def _rpm_artifact_factory(url=RPM_SIGNED_URL, pulp_domain=None): if not pulp_domain_enabled: raise RuntimeError("Server does not have domains enabled.") kwargs["pulp_domain"] = pulp_domain - return gen_object_with_cleanup(pulpcore_bindings.ArtifactsApi, temp_file, **kwargs) + return gen_object_with_cleanup(pulpcore_bindings.ArtifactsApi, str(temp_file), **kwargs) return _rpm_artifact_factory diff --git a/pulp_rpm/tests/functional/constants.py b/pulp_rpm/tests/functional/constants.py index 2ac5a3487..5547539cb 100644 --- a/pulp_rpm/tests/functional/constants.py +++ b/pulp_rpm/tests/functional/constants.py @@ -3,20 +3,30 @@ # flake8: noqa from urllib.parse import urljoin -from pulp_smash import config -from pulp_smash.pulp3.constants import ( - BASE_CONTENT_PATH, - BASE_DISTRIBUTION_PATH, - BASE_REPO_PATH, - BASE_PATH, - BASE_PUBLICATION_PATH, - BASE_REMOTE_PATH, -) +import django + +django.setup() +from django.conf import settings + +# start from-pulp-smash +BASE_PATH = settings.V3_API_ROOT or "/pulp/api/v3/" + +BASE_CONTENT_PATH = urljoin(BASE_PATH, "content/") + +BASE_DISTRIBUTION_PATH = urljoin(BASE_PATH, "distributions/") + +BASE_REPO_PATH = urljoin(BASE_PATH, "repositories/") + +BASE_PUBLICATION_PATH = urljoin(BASE_PATH, "publications/") + +BASE_REMOTE_PATH = urljoin(BASE_PATH, "remotes/") + +# end from-pulp-smash RPM_COPY_PATH = urljoin(BASE_PATH, "rpm/copy/") """The URL used for copying RPM content between repos.""" -PULP_FIXTURES_BASE_URL = config.get_config().get_fixtures_url() +PULP_FIXTURES_BASE_URL = "https://fixtures.pulpproject.org/" DOWNLOAD_POLICIES = ["immediate", "on_demand", "streamed"] @@ -113,11 +123,11 @@ """The number of updated record units.""" RPM_FIXTURE_SUMMARY = { - RPM_PACKAGE_CONTENT_NAME: RPM_PACKAGE_COUNT, - RPM_ADVISORY_CONTENT_NAME: RPM_ADVISORY_COUNT, - RPM_PACKAGECATEGORY_CONTENT_NAME: RPM_PACKAGECATEGORY_COUNT, - RPM_PACKAGEGROUP_CONTENT_NAME: RPM_PACKAGEGROUP_COUNT, - RPM_PACKAGELANGPACKS_CONTENT_NAME: RPM_PACKAGELANGPACKS_COUNT, + RPM_PACKAGE_CONTENT_NAME: {"count": RPM_PACKAGE_COUNT}, + RPM_ADVISORY_CONTENT_NAME: {"count": RPM_ADVISORY_COUNT}, + RPM_PACKAGECATEGORY_CONTENT_NAME: {"count": RPM_PACKAGECATEGORY_COUNT}, + RPM_PACKAGEGROUP_CONTENT_NAME: {"count": RPM_PACKAGEGROUP_COUNT}, + RPM_PACKAGELANGPACKS_CONTENT_NAME: {"count": RPM_PACKAGELANGPACKS_COUNT}, } """The breakdown of how many of each type of content unit are present in the standard repositories, i.e. :data:`RPM_SIGNED_FIXTURE_URL` and @@ -164,11 +174,11 @@ """The number of modules-default present on `RPM_MODULAR_FIXTURE_URL`.""" RPM_MODULAR_FIXTURE_SUMMARY = { - RPM_PACKAGE_CONTENT_NAME: RPM_MODULAR_PACKAGE_COUNT, - RPM_MODULAR_MODULES_CONTENT_NAME: RPM_MODULAR_MODULES_COUNT, - RPM_MODULAR_DEFAULTS_CONTENT_NAME: RPM_MODULAR_DEFAULTS_COUNT, - RPM_MODULES_OBSOLETE_CONTENT_NAME: RPM_MODULES_OBSOLETE_COUNT, - RPM_ADVISORY_CONTENT_NAME: RPM_MODULAR_ADVISORY_COUNT, + RPM_PACKAGE_CONTENT_NAME: {"count": RPM_MODULAR_PACKAGE_COUNT}, + RPM_MODULAR_MODULES_CONTENT_NAME: {"count": RPM_MODULAR_MODULES_COUNT}, + RPM_MODULAR_DEFAULTS_CONTENT_NAME: {"count": RPM_MODULAR_DEFAULTS_COUNT}, + RPM_MODULES_OBSOLETE_CONTENT_NAME: {"count": RPM_MODULES_OBSOLETE_COUNT}, + RPM_ADVISORY_CONTENT_NAME: {"count": RPM_MODULAR_ADVISORY_COUNT}, } RPM_MODULES_STATIC_CONTEXT_FIXTURE_URL = urljoin( @@ -177,11 +187,11 @@ """The URL to a modular RPM repository that uses the static_context field.""" RPM_MODULAR_STATIC_FIXTURE_SUMMARY = { - RPM_PACKAGE_CONTENT_NAME: RPM_MODULAR_PACKAGE_COUNT, - RPM_MODULAR_MODULES_CONTENT_NAME: RPM_MODULAR_MODULES_COUNT, - RPM_MODULAR_DEFAULTS_CONTENT_NAME: RPM_MODULAR_DEFAULTS_COUNT, - RPM_MODULES_OBSOLETE_CONTENT_NAME: RPM_MODULES_OBSOLETE_COUNT, - RPM_ADVISORY_CONTENT_NAME: RPM_MODULAR_ADVISORY_COUNT, + RPM_PACKAGE_CONTENT_NAME: {"count": RPM_MODULAR_PACKAGE_COUNT}, + RPM_MODULAR_MODULES_CONTENT_NAME: {"count": RPM_MODULAR_MODULES_COUNT}, + RPM_MODULAR_DEFAULTS_CONTENT_NAME: {"count": RPM_MODULAR_DEFAULTS_COUNT}, + RPM_MODULES_OBSOLETE_CONTENT_NAME: {"count": RPM_MODULES_OBSOLETE_COUNT}, + RPM_ADVISORY_CONTENT_NAME: {"count": RPM_MODULAR_ADVISORY_COUNT}, } """The breakdown of how many of each type of content unit are present in the @@ -458,12 +468,12 @@ RPM_KICKSTART_COUNT = 1 RPM_KICKSTART_FIXTURE_SUMMARY = { - RPM_KICKSTART_CONTENT_NAME: RPM_KICKSTART_COUNT, - RPM_PACKAGE_CONTENT_NAME: 1, - RPM_PACKAGECATEGORY_CONTENT_NAME: 1, - RPM_PACKAGEENVIRONMENT_CONTENT_NAME: 1, - RPM_PACKAGEGROUP_CONTENT_NAME: 1, - RPM_PACKAGELANGPACKS_CONTENT_NAME: 1, + RPM_KICKSTART_CONTENT_NAME: {"count": RPM_KICKSTART_COUNT}, + RPM_PACKAGE_CONTENT_NAME: {"count": 1}, + RPM_PACKAGECATEGORY_CONTENT_NAME: {"count": 1}, + RPM_PACKAGEENVIRONMENT_CONTENT_NAME: {"count": 1}, + RPM_PACKAGEGROUP_CONTENT_NAME: {"count": 1}, + RPM_PACKAGELANGPACKS_CONTENT_NAME: {"count": 1}, } RPM_KICKSTART_FIXTURE_SIZE = 9917733 @@ -2280,3 +2290,21 @@ } ] }""" # noqa + +RPM_CONTENT_NAMES = [ + # Package + RPM_PACKAGE_CONTENT_NAME, + # UpdateRecord/Advisory + RPM_ADVISORY_CONTENT_NAME, + # Kickstart + RPM_KICKSTART_CONTENT_NAME, + # Groups + RPM_PACKAGECATEGORY_CONTENT_NAME, + RPM_PACKAGEENVIRONMENT_CONTENT_NAME, + RPM_PACKAGEGROUP_CONTENT_NAME, + RPM_PACKAGELANGPACKS_CONTENT_NAME, + # Modularity + RPM_MODULAR_MODULES_CONTENT_NAME, + RPM_MODULAR_DEFAULTS_CONTENT_NAME, + RPM_MODULES_OBSOLETE_CONTENT_NAME, +] diff --git a/pulp_rpm/tests/functional/content_handler/test_config_repo.py b/pulp_rpm/tests/functional/content_handler/test_config_repo.py index e6f11e916..c657144c4 100644 --- a/pulp_rpm/tests/functional/content_handler/test_config_repo.py +++ b/pulp_rpm/tests/functional/content_handler/test_config_repo.py @@ -44,14 +44,20 @@ def test_config_repo_auto_distribute( """Whether config.repo is properly served using auto-distribute.""" repo, pub, dist = setup_empty_distribution - body = {"repository": repo.pulp_href, "publication": None} + # NOTE(core-3.70): + # If generate_repo_config=True isnt passed here the default of False is used, even that + # the repository had is set to True before. + body = {"repository": repo.pulp_href, "publication": None, "generate_repo_config": True} monitor_task(rpm_distribution_api.partial_update(dist.pulp_href, body).task) # Check that distribution is now using repository to auto-distribute dist = rpm_distribution_api.read(dist.pulp_href) assert repo.pulp_href == dist.repository assert dist.publication is None - content = requests.get(f"{dist.base_url}config.repo").content + response = requests.get(f"{dist.base_url}config.repo") + response.raise_for_status() + + content = response.content assert bytes(f"[{dist.name}]\n", "utf-8") in content assert bytes(f"baseurl={dist.base_url}\n", "utf-8") in content assert bytes("gpgcheck=0\n", "utf-8") in content diff --git a/pulp_rpm/tests/functional/utils.py b/pulp_rpm/tests/functional/utils.py index 41823b874..7da5cf71e 100644 --- a/pulp_rpm/tests/functional/utils.py +++ b/pulp_rpm/tests/functional/utils.py @@ -3,75 +3,15 @@ import gzip import os import subprocess -from io import StringIO -from functools import partial -from unittest import SkipTest import pyzstd import requests -from pulp_smash import api, cli, config, selectors -from pulp_smash.pulp3.utils import gen_remote, get_content, require_pulp_3, require_pulp_plugins - from pulp_rpm.tests.functional.constants import ( PRIVATE_GPG_KEY_URL, - RPM_COPY_PATH, - RPM_PACKAGE_CONTENT_NAME, - RPM_UNSIGNED_FIXTURE_URL, - RPM_PUBLICATION_PATH, PACKAGES_DIRECTORY, ) -from pulpcore.client.pulpcore import ApiClient as CoreApiClient, TasksApi -from pulpcore.client.pulp_rpm import ApiClient as RpmApiClient - - -cfg = config.get_config() -configuration = cfg.get_bindings_config() - - -skip_if = partial(selectors.skip_if, exc=SkipTest) # pylint:disable=invalid-name -"""The ``@skip_if`` decorator, customized for unittest. - -:func:`pulp_smash.selectors.skip_if` is test runner agnostic. This function is -identical, except that ``exc`` has been set to ``unittest.SkipTest``. -""" - -core_client = CoreApiClient(configuration) -tasks = TasksApi(core_client) - - -def set_up_module(): - """Skip tests Pulp 3 isn't under test or if pulp_rpm isn't installed.""" - require_pulp_3(SkipTest) - require_pulp_plugins({"rpm"}, SkipTest) - - -def gen_rpm_client(): - """Return an OBJECT for rpm client.""" - return RpmApiClient(configuration) - - -def gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL, **kwargs): - """Return a semi-random dict for use in creating a rpm Remote. - - :param url: The URL of an external content source. - """ - return gen_remote(url, **kwargs) - - -def get_rpm_package_paths(repo): - """Return the relative path of content units present in a RPM repository. - - :param repo: A dict of information about the repository. - :returns: A list with the paths of units present in a given repository. - """ - return [ - content_unit["location_href"] - for content_unit in get_content(repo)[RPM_PACKAGE_CONTENT_NAME] - if "location_href" in content_unit - ] - def gen_rpm_content_attrs(artifact, rpm_name): """Generate a dict with content unit attributes. @@ -79,97 +19,7 @@ def gen_rpm_content_attrs(artifact, rpm_name): :param artifact: A dict of info about the artifact. :returns: A semi-random dict for use in creating a content unit. """ - return {"artifact": artifact["pulp_href"], "relative_path": rpm_name} - - -def rpm_copy(cfg, config, recursive=False): - """Sync a repository. - - :param pulp_smash.config.PulpSmashConfig cfg: Information about the Pulp - host. - :param remote: A dict of information about the remote of the repository - to be synced. - :param config: A dict of information about the copy. - :param kwargs: Keyword arguments to be merged in to the request data. - :returns: The server's response. A dict of information about the just - created sync. - """ - client = api.Client(cfg) - data = {"config": config, "dependency_solving": recursive} - return client.post(RPM_COPY_PATH, data) - - -def publish(cfg, repo, version_href=None, repo_config=None): - """Publish a repository. - - :param pulp_smash.config.PulpSmashConfig cfg: Information about the Pulp - host. - :param repo: A dict of information about the repository. - :param version_href: A href for the repo version to be published. - :param repo_config: An option specifying config for .repo file - :returns: A publication. A dict of information about the just created - publication. - """ - if version_href: - body = {"repository_version": version_href} - else: - body = {"repository": repo["pulp_href"]} - - body.update({"repo_config": repo_config}) - - client = api.Client(cfg, api.json_handler) - call_report = client.post(RPM_PUBLICATION_PATH, body) - tasks = tuple(api.poll_spawned_tasks(cfg, call_report)) - return client.get(tasks[-1]["created_resources"][0]) - - -def gen_yum_config_file(cfg, repositoryid, baseurl, name, **kwargs): - """Generate a yum configuration file and write it to ``/etc/yum.repos.d/``. - - Generate a yum configuration file containing a single repository section, - and write it to ``/etc/yum.repos.d/{repositoryid}.repo``. - :param cfg: The system on which to create - a yum configuration file. - :param repositoryid: The section's ``repositoryid``. Used when naming the - configuration file and populating the brackets at the head of the file. - For details, see yum.conf(5). - :param baseurl: The required option ``baseurl`` specifying the url of repo. - For details, see yum.conf(5) - :param name: The required option ``name`` specifying the name of repo. - For details, see yum.conf(5). - :param kwargs: Section options. Each kwarg corresponds to one option. For - details, see yum.conf(5). - :returns: The path to the yum configuration file. - """ - # required repo options - kwargs.setdefault("name", name) - kwargs.setdefault("baseurl", baseurl) - # assume some common used defaults - kwargs.setdefault("enabled", 1) - kwargs.setdefault("gpgcheck", 0) - kwargs.setdefault("metadata_expire", 0) # force metadata load every time - - # Check if the settings specifies a content host role else assume ``api`` - try: - content_host = cfg.get_hosts("content")[0].roles["content"] - except IndexError: - content_host = cfg.get_hosts("api")[0].roles["api"] - - # if sslverify is not provided in kwargs it is inferred from cfg - kwargs.setdefault("sslverify", content_host.get("verify") and "yes" or "no") - - path = os.path.join("/etc/yum.repos.d/", repositoryid + ".repo") - with StringIO() as section: - section.write("[{}]\n".format(repositoryid)) - for key, value in kwargs.items(): - section.write("{} = {}\n".format(key, value)) - # machine.session is used here to keep SSH session open - cli.Client(cfg).machine.session().run( - 'echo "{}" | {}tee {} > /dev/null'.format( - section.getvalue(), "" if cli.is_root(cfg) else "sudo ", path - ) - ) - return path + return {"artifact": artifact.pulp_href, "relative_path": rpm_name} def init_signed_repo_configuration(): diff --git a/unittest_requirements.txt b/unittest_requirements.txt index e5715b49d..e841c047d 100644 --- a/unittest_requirements.txt +++ b/unittest_requirements.txt @@ -3,3 +3,4 @@ pytest<8 asynctest mock pytest-django +pytest-custom_exit_code