diff --git a/.github/workflows/ci-quick.yaml b/.github/workflows/ci-quick.yaml index d9860b57d3..cc868b3901 100644 --- a/.github/workflows/ci-quick.yaml +++ b/.github/workflows/ci-quick.yaml @@ -14,7 +14,7 @@ jobs: timeout-minutes: 5 steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: persist-credentials: false - name: Install tox & poetry @@ -37,6 +37,30 @@ jobs: run: | poetry run pre-commit run --all-files + check-pyproject-dynamic-versioning: + name: Poetry dynamic versioning check + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + persist-credentials: false + - name: Install deps + run: sudo snap install yq + - name: Check versioning + run: | + VERSION=$(yq -p toml -oy '.tool.poetry.version' ./pyproject.toml) + DYNAMIC_FIELDS=$(yq -p toml -oc '.project.dynamic' ./pyproject.toml) + if [[ $VERSION != *"0.0.0"* ]] + then + exit 1 + fi + if [[ $DYNAMIC_FIELDS != *"version"* ]] + then + exit 1 + fi + actionlint: name: Lint .github/workflows/ runs-on: ubuntu-latest diff --git a/single_kernel_mongo/managers/cluster.py b/single_kernel_mongo/managers/cluster.py index e6df012ef7..8096fd5f68 100644 --- a/single_kernel_mongo/managers/cluster.py +++ b/single_kernel_mongo/managers/cluster.py @@ -7,7 +7,7 @@ import json from logging import getLogger -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, final from data_platform_helpers.advanced_statuses.models import StatusObject from ops.framework import Object @@ -234,6 +234,7 @@ def update_ldap_user_to_dn_mapping(self) -> None: ) +@final class ClusterRequirer(Object): """Manage relations between the config server and mongos router on the mongos side.""" @@ -272,6 +273,7 @@ def assert_pass_hook_checks(self) -> None: raise DeferrableFailedHookChecksError( "Mongos was waiting for config-server to enable TLS. Wait for TLS to be enabled until starting mongos." ) + if self.dependent.refresh_in_progress: logger.warning( "Processing client applications is not supported during an upgrade. The charm may be in a broken, unrecoverable state." @@ -309,6 +311,11 @@ def share_credentials_to_clients(self, username: str | None, password: str | Non def update_mongos_and_restart(self) -> None: """Start/restarts mongos with config server information.""" self.assert_pass_hook_checks() + + # Wait for + if not self.state.cluster.username or not self.state.cluster.password: + raise WaitingForSecretsError("Waiting for username and password.") + key_file_contents = self.state.cluster.keyfile config_server_db_uri = self.state.cluster.config_server_uri @@ -493,7 +500,7 @@ def is_client_ca_compatible(self) -> bool: def mongos_and_config_server_peer_tls_status(self) -> tuple[bool, bool]: """Returns the peer TLS integration status for mongos and config-server.""" if self.state.mongos_cluster_relation: - mongos_has_tls = self.state.peer_tls_relation is not None + mongos_has_tls = self.state.tls.peer_enabled config_server_has_tls = self.state.cluster.internal_ca_secret is not None return mongos_has_tls, config_server_has_tls @@ -502,7 +509,7 @@ def mongos_and_config_server_peer_tls_status(self) -> tuple[bool, bool]: def mongos_and_config_server_client_tls_status(self) -> tuple[bool, bool]: """Returns the client TLS integration status for mongos and config-server.""" if self.state.mongos_cluster_relation: - mongos_has_tls = self.state.client_tls_relation is not None + mongos_has_tls = self.state.tls.client_enabled config_server_has_tls = self.state.cluster.external_ca_secret is not None return mongos_has_tls, config_server_has_tls diff --git a/single_kernel_mongo/managers/config.py b/single_kernel_mongo/managers/config.py index e1470f0a94..cb3f172c6a 100644 --- a/single_kernel_mongo/managers/config.py +++ b/single_kernel_mongo/managers/config.py @@ -657,13 +657,9 @@ def config_server_db_parameter(self) -> dict[str, Any]: """The config server DB parameter.""" # In case we are integrated with a config-server, we need to provide # it's URI to mongos so it can configure_and_restart to it. - if uri := self.state.cluster.config_server_uri: + if uri := self.state.config_server_uri: return {"sharding": {"configDB": uri}} - return { - "sharding": { - "configDB": f"{self.state.app_peer_data.replica_set}/{self.state.unit_peer_data.internal_address}:{MongoPorts.MONGODB_PORT.value}" - } - } + return {} @property @override diff --git a/single_kernel_mongo/state/charm_state.py b/single_kernel_mongo/state/charm_state.py index a677620bc8..b356bd8500 100644 --- a/single_kernel_mongo/state/charm_state.py +++ b/single_kernel_mongo/state/charm_state.py @@ -595,6 +595,15 @@ def config_server_name(self) -> str | None: ) return None + @property + def config_server_uri(self) -> str | None: + """Gets the config-server URI for Mongos.""" + if self.charm_role.name == CharmKind.MONGOS: + return self.cluster.config_server_uri + if not self.is_role(MongoDBRoles.CONFIG_SERVER): + return None + return f"{self.app_peer_data.replica_set}/{self.unit_peer_data.internal_address}:{MongoPorts.MONGODB_PORT.value}" + def get_subject_name(self) -> str: """Generate the subject name for CSR.""" # In sharded MongoDB deployments it is a requirement that all subject names match across diff --git a/single_kernel_mongo/state/cluster_state.py b/single_kernel_mongo/state/cluster_state.py index 20f568f2b7..4334143c95 100644 --- a/single_kernel_mongo/state/cluster_state.py +++ b/single_kernel_mongo/state/cluster_state.py @@ -27,6 +27,8 @@ class ClusterStateKeys(str, Enum): EXT_CA_SECRET = "ext-ca-secret" LDAP_USER_TO_DN_MAPPING = "ldap-user-to-dn-mapping" LDAP_HASH = "ldap-hash" + USERNAME = "username" + PASSWORD = "password" class ClusterState(AbstractRelationState[Data]): @@ -43,6 +45,16 @@ def config_server_uri(self) -> str: """Return config-server URI in the databag.""" return self.relation_data.get(ClusterStateKeys.CONFIG_SERVER_DB.value, "") + @property + def username(self) -> str: + """Return config-server URI in the databag.""" + return self.relation_data.get(ClusterStateKeys.USERNAME.value, "") + + @property + def password(self) -> str: + """Return config-server URI in the databag.""" + return self.relation_data.get(ClusterStateKeys.PASSWORD.value, "") + @property def database(self) -> str: """Return database value in the databag.""" diff --git a/tests/integration/mongos/ldap/test_ldap.py b/tests/integration/mongos/ldap/test_ldap.py index 9a14ce354d..5f5261110b 100644 --- a/tests/integration/mongos/ldap/test_ldap.py +++ b/tests/integration/mongos/ldap/test_ldap.py @@ -2,7 +2,6 @@ # Copyright 2025 Canonical Ltd. # See LICENSE file for licensing details. -from pathlib import Path import pytest from juju.model import Model @@ -93,7 +92,11 @@ async def test_build_and_deploy_mongodb_cluster( @pytest.mark.abort_on_fail async def test_build_and_deploy_mongos( - ops_test: OpsTest, mongos_charm: Path, substrate: Substrate, mongod_resource, base_app_name + ops_test: OpsTest, + mongos_charm: str, + substrate: Substrate, + mongos_resource: dict[str, str], + base_app_name: str, ) -> None: """Deploys mongos and data integrator, and integrates both. @@ -106,7 +109,7 @@ async def test_build_and_deploy_mongos( ops_test=ops_test, charm=mongos_charm, substrate=substrate, - mongod_resource=mongod_resource, + mongod_resource=mongos_resource, app_name=base_app_name, num_units=1, subordinate=(substrate == "lxd"), @@ -135,11 +138,46 @@ async def test_build_and_deploy_mongos( subordinate=(substrate == "lxd"), ) + +@pytest.mark.abort_on_fail +async def test_config_server_only_integrated_with_mongos(ops_test: OpsTest, substrate: Substrate): + app_name = await get_app_name(ops_test, charm_name="mongos") + + await ops_test.model.integrate(f"{LDAP_OFFER}:ldap", f"{CONFIG_SERVER_APP_NAME}:ldap") + await ops_test.model.integrate( + f"{LDAP_CERT_OFFER}:send-ca-cert", f"{CONFIG_SERVER_APP_NAME}:ldap-certificate-transfer" + ) + await ops_test.model.wait_for_idle( + apps=[CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME, SHARD_TWO_APP_NAME], + idle_period=20, + status="active", + ) + # connect sharded cluster to mongos await ops_test.model.integrate( f"{app_name}:{CLUSTER_REL_NAME}", f"{CONFIG_SERVER_APP_NAME}:{CLUSTER_REL_NAME}", ) + await ops_test.model.wait_for_idle( + apps=[CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME, SHARD_TWO_APP_NAME], + idle_period=20, + status="active", + ) + await wait_for_mongodb_units_blocked( + ops_test, + substrate, + app_name, + status="mongos and config-server not integrated with the same ldap server.", + timeout=300, + subordinate=(substrate == "lxd"), + ) + await ops_test.model.applications[CONFIG_SERVER_APP_NAME].remove_relation( + f"{LDAP_OFFER}:ldap", f"{CONFIG_SERVER_APP_NAME}:ldap" + ) + await ops_test.model.applications[CONFIG_SERVER_APP_NAME].remove_relation( + f"{LDAP_CERT_OFFER}:send-ca-cert", f"{CONFIG_SERVER_APP_NAME}:ldap-certificate-transfer" + ) + await ops_test.model.wait_for_idle( apps=[CONFIG_SERVER_APP_NAME, SHARD_ONE_APP_NAME, SHARD_TWO_APP_NAME, app_name], idle_period=20, @@ -250,10 +288,10 @@ async def test_teardown(ops_test: OpsTest, kubernetes_model: Model): await ops_test.model.applications[app_name].remove_relation( f"{LDAP_CERT_OFFER}:send-ca-cert", f"{app_name}:ldap-certificate-transfer" ) - await ops_test.model.applications[app_name].remove_relation( + await ops_test.model.applications[CONFIG_SERVER_APP_NAME].remove_relation( f"{LDAP_OFFER}:ldap", f"{CONFIG_SERVER_APP_NAME}:ldap" ) - await ops_test.model.applications[app_name].remove_relation( + await ops_test.model.applications[CONFIG_SERVER_APP_NAME].remove_relation( f"{LDAP_CERT_OFFER}:send-ca-cert", f"{CONFIG_SERVER_APP_NAME}:ldap-certificate-transfer" ) diff --git a/tests/spread/mongodb/lxd/test_encryption_invalid_scenario.py/task.yaml b/tests/spread/mongodb/lxd/test_encryption_invalid_scenario.py/task.yaml index c10611e512..c4db274c1f 100644 --- a/tests/spread/mongodb/lxd/test_encryption_invalid_scenario.py/task.yaml +++ b/tests/spread/mongodb/lxd/test_encryption_invalid_scenario.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large # TODO: Enable this when we have a stable arm64 vault charm. - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongodb/lxd/test_encryption_valid_scenario.py/task.yaml b/tests/spread/mongodb/lxd/test_encryption_valid_scenario.py/task.yaml index 200802e89a..95f7f41f10 100644 --- a/tests/spread/mongodb/lxd/test_encryption_valid_scenario.py/task.yaml +++ b/tests/spread/mongodb/lxd/test_encryption_valid_scenario.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large # TODO: Enable this when we have a stable arm64 vault charm. - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongodb/lxd/test_ldap.py/task.yaml b/tests/spread/mongodb/lxd/test_ldap.py/task.yaml index 93322172a0..8605851e5e 100644 --- a/tests/spread/mongodb/lxd/test_ldap.py/task.yaml +++ b/tests/spread/mongodb/lxd/test_ldap.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - ubuntu-22.04 + - self-hosted-linux-amd64-noble-large # TODO: Re-enable this when glauth charm supports arm64 - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongodb/lxd/test_major_upgrades.py/task.yaml b/tests/spread/mongodb/lxd/test_major_upgrades.py/task.yaml index e8d68732e7..6b1c71eb30 100644 --- a/tests/spread/mongodb/lxd/test_major_upgrades.py/task.yaml +++ b/tests/spread/mongodb/lxd/test_major_upgrades.py/task.yaml @@ -6,4 +6,4 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large diff --git a/tests/spread/mongodb/lxd/test_sharding_ldap.py/task.yaml b/tests/spread/mongodb/lxd/test_sharding_ldap.py/task.yaml index cf7332f5dc..c331d295f4 100644 --- a/tests/spread/mongodb/lxd/test_sharding_ldap.py/task.yaml +++ b/tests/spread/mongodb/lxd/test_sharding_ldap.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large # TODO: Re-enable this when glauth charm supports arm64 - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongodb/lxd/test_sharding_major_upgrades.py/task.yaml b/tests/spread/mongodb/lxd/test_sharding_major_upgrades.py/task.yaml index ef1587dd8f..2c5c2f365a 100644 --- a/tests/spread/mongodb/lxd/test_sharding_major_upgrades.py/task.yaml +++ b/tests/spread/mongodb/lxd/test_sharding_major_upgrades.py/task.yaml @@ -6,4 +6,4 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large diff --git a/tests/spread/mongodb/microk8s/test_encryption_invalid_scenario.py/task.yaml b/tests/spread/mongodb/microk8s/test_encryption_invalid_scenario.py/task.yaml index 3d02d4d603..c60ba606b2 100644 --- a/tests/spread/mongodb/microk8s/test_encryption_invalid_scenario.py/task.yaml +++ b/tests/spread/mongodb/microk8s/test_encryption_invalid_scenario.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large # TODO: Enable this when we have a stable arm64 vault charm. - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongodb/microk8s/test_encryption_valid_scenario.py/task.yaml b/tests/spread/mongodb/microk8s/test_encryption_valid_scenario.py/task.yaml index b8faecc3b2..ddc0b89e3d 100644 --- a/tests/spread/mongodb/microk8s/test_encryption_valid_scenario.py/task.yaml +++ b/tests/spread/mongodb/microk8s/test_encryption_valid_scenario.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large # TODO: Enable this when we have a stable arm64 vault charm. - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongodb/microk8s/test_ldap.py/task.yaml b/tests/spread/mongodb/microk8s/test_ldap.py/task.yaml index 917e218c8d..e4f5b60350 100644 --- a/tests/spread/mongodb/microk8s/test_ldap.py/task.yaml +++ b/tests/spread/mongodb/microk8s/test_ldap.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - ubuntu-22.04 + - self-hosted-linux-amd64-noble-large # TODO: Re-enable this when glauth charm supports arm64 - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongodb/microk8s/test_major_upgrades.py/task.yaml b/tests/spread/mongodb/microk8s/test_major_upgrades.py/task.yaml index 37211a6fea..6b6ece5584 100644 --- a/tests/spread/mongodb/microk8s/test_major_upgrades.py/task.yaml +++ b/tests/spread/mongodb/microk8s/test_major_upgrades.py/task.yaml @@ -6,4 +6,4 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large diff --git a/tests/spread/mongodb/microk8s/test_sharding_ldap.py/task.yaml b/tests/spread/mongodb/microk8s/test_sharding_ldap.py/task.yaml index 127a5647a1..792c4950f1 100644 --- a/tests/spread/mongodb/microk8s/test_sharding_ldap.py/task.yaml +++ b/tests/spread/mongodb/microk8s/test_sharding_ldap.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large # TODO: Re-enable this when glauth charm supports arm64 - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongodb/microk8s/test_sharding_major_upgrades.py/task.yaml b/tests/spread/mongodb/microk8s/test_sharding_major_upgrades.py/task.yaml index a0df8fd59d..7eaa7173ae 100644 --- a/tests/spread/mongodb/microk8s/test_sharding_major_upgrades.py/task.yaml +++ b/tests/spread/mongodb/microk8s/test_sharding_major_upgrades.py/task.yaml @@ -6,4 +6,4 @@ execute: | artifacts: - allure-results systems: - - self-hosted-linux-amd64-noble-medium + - self-hosted-linux-amd64-noble-large diff --git a/tests/spread/mongos/lxd/test_ldap.py/task.yaml b/tests/spread/mongos/lxd/test_ldap.py/task.yaml index cbbbc0479f..54bb638396 100644 --- a/tests/spread/mongos/lxd/test_ldap.py/task.yaml +++ b/tests/spread/mongos/lxd/test_ldap.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - ubuntu-22.04 + - self-hosted-linux-amd64-noble-large # TODO: Re-enable this when glauth charm supports arm64 - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/spread/mongos/microk8s/test_ldap.py/task.yaml b/tests/spread/mongos/microk8s/test_ldap.py/task.yaml index c5c663ed23..d92ab187be 100644 --- a/tests/spread/mongos/microk8s/test_ldap.py/task.yaml +++ b/tests/spread/mongos/microk8s/test_ldap.py/task.yaml @@ -6,6 +6,6 @@ execute: | artifacts: - allure-results systems: - - ubuntu-22.04 + - self-hosted-linux-amd64-noble-large # TODO: Re-enable this when glauth charm supports arm64 - #- self-hosted-linux-arm64-noble-medium + #- self-hosted-linux-arm64-noble-large diff --git a/tests/unit/test_cluster_manager.py b/tests/unit/test_cluster_manager.py index d0f0fa8b47..7c4e37b0f9 100644 --- a/tests/unit/test_cluster_manager.py +++ b/tests/unit/test_cluster_manager.py @@ -22,7 +22,10 @@ NonDeferrableFailedHookChecksError, WaitingForSecretsError, ) -from single_kernel_mongo.state.tls_state import SECRET_CA_LABEL +from single_kernel_mongo.state.tls_state import ( + SECRET_CA_LABEL, + SECRET_CERT_LABEL, +) from tests.charms.mongodb_test_charm.src.charm import MongoTestCharm from tests.charms.mongos_test_charm.src.charm import MongosTestCharm from tests.integration.helpers.types import Substrate @@ -310,8 +313,6 @@ def test_cluster_requirer_update_mongos_and_restart( mongos_harness.update_relation_data(rel_id_proxy, "test-application", {"database": "test-db"}) - manager.share_credentials_to_clients("charmed-operator", "password") - data = Path("tests/unit/data/mongos.conf").read_text().splitlines() mocker.patch("single_kernel_mongo.managers.mongo.MongoManager.reconcile_mongo_users_and_dbs") @@ -328,7 +329,12 @@ def test_cluster_requirer_update_mongos_and_restart( mongos_harness.update_relation_data( rel_id_cluster, "mongodb", - {"key-file": "deadbeef", "config-server-db": "mongodb/2.2.2.2:27017"}, + { + "key-file": "deadbeef", + "config-server-db": "mongodb/2.2.2.2:27017", + "username": "charmed-operator", + "password": "password", # nosec: B105 + }, ) statuses = mongos_harness.charm.operator.state.statuses.get( @@ -382,7 +388,7 @@ def test_cluster_requirer_update_mongos_and_restart_fail_missing_data( mongos_harness.update_relation_data( rel_id_cluster, "mongodb", - databag, + databag | {"username": "unused", "password": "unused"}, ) with pytest.raises(WaitingForSecretsError) as err: manager.update_mongos_and_restart() @@ -418,7 +424,12 @@ def test_cluster_requirer_update_mongos_and_restart_mongos_not_running( mongos_harness.update_relation_data( rel_id_cluster, "mongodb", - {"key-file": "deadbeef", "config-server-db": "mongodb/2.2.2.2:27017"}, + { + "key-file": "deadbeef", + "config-server-db": "mongodb/2.2.2.2:27017", + "username": "unused", + "password": "unused", + }, ) # Check that we raise a deferrable error because mongos is not running after restart @@ -592,6 +603,14 @@ def test_cluster_requirer_tls_status( mongos_harness.add_relation( ExternalRequirerRelations.CLIENT_TLS.value, "self-signed-certificates" ) + mocker.patch( + "single_kernel_mongo.state.tls_state.TLSState.peer_enabled", + new_callable=mocker.PropertyMock(return_value=True), + ) + mocker.patch( + "single_kernel_mongo.state.tls_state.TLSState.client_enabled", + new_callable=mocker.PropertyMock(return_value=True), + ) # Ensure some credentials are present manager.share_credentials_to_clients("charmed-operator", "password") @@ -681,10 +700,14 @@ def test_cluster_requirer_get_tls_statuses( mongos_harness.add_relation( ExternalRequirerRelations.PEER_TLS.value, "self-signed-certificates" ) - # Local certificate + # Local CA certificate manager.state.tls.set_secret( internal=True, label_name=SECRET_CA_LABEL, contents=mongos_peer_ca_secret ) + # Local cert + manager.state.tls.set_secret( + internal=True, label_name=SECRET_CERT_LABEL, contents="useless" + ) if mongos_client_ca_secret: mongos_harness.add_relation( ExternalRequirerRelations.CLIENT_TLS.value, "self-signed-certificates" @@ -692,6 +715,10 @@ def test_cluster_requirer_get_tls_statuses( manager.state.tls.set_secret( internal=False, label_name=SECRET_CA_LABEL, contents=mongos_client_ca_secret ) + # Local cert + manager.state.tls.set_secret( + internal=False, label_name=SECRET_CERT_LABEL, contents="useless" + ) # Ensure some credentials are present manager.share_credentials_to_clients("charmed-operator", "password") diff --git a/tests/unit/test_config_manager.py b/tests/unit/test_config_manager.py index f6d5997a97..7153c5003a 100644 --- a/tests/unit/test_config_manager.py +++ b/tests/unit/test_config_manager.py @@ -19,7 +19,6 @@ ) from single_kernel_mongo.state.app_peer_state import AppPeerReplicaSet from single_kernel_mongo.state.charm_state import CharmState -from single_kernel_mongo.state.cluster_state import ClusterState from single_kernel_mongo.state.ldap_state import LdapState from single_kernel_mongo.state.tls_state import TLSState from single_kernel_mongo.state.vault_state import VaultState @@ -247,8 +246,7 @@ def test_mongos_config_manager(mocker): mock_state.app_peer_data = mocker.MagicMock(AppPeerReplicaSet) mock_state.charm_role = ROLES[Substrates.VM][CharmKind.MONGOS] mock_state.substrate = Substrates.VM - mock_state.cluster = mocker.MagicMock(ClusterState) - mock_state.cluster.config_server_uri = "mongodb://config-server-url" + mock_state.config_server_uri = "mongodb://config-server-url" mock_state.tls = mocker.MagicMock(TLSState) mock_state.app_peer_data.external_connectivity = False mock_state.tls.peer_enabled = False @@ -406,12 +404,13 @@ def test_mongodb_config_manager_tls_enabled(mocker): def test_mongos_default_config_server(mocker): - mock_state = mocker.MagicMock(CharmState) - mock_state.app_peer_data = mocker.MagicMock(AppPeerReplicaSet) + mock_state = mocker.create_autospec(CharmState) + mock_state.app_peer_data = mocker.Mock(AppPeerReplicaSet) mock_state.app_peer_data.replica_set = "deadbeef" mock_state.unit_peer_data.internal_address = "127.0.0.1" - mock_state.cluster = mocker.MagicMock(ClusterState) - mock_state.cluster.config_server_uri = "" + mock_state.config_server_uri = ( + f"{mock_state.app_peer_data.replica_set}/{mock_state.unit_peer_data.internal_address}:27017" + ) mock_state.tls = mocker.MagicMock(TLSState) mock_state.app_peer_data.external_connectivity = False mock_state.tls.peer_enabled = False