From 6a3c8d3329624fbc54de91ddd6bcda15c929fe55 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 16 Apr 2026 11:10:24 +0800 Subject: [PATCH 01/42] test: migrate pytest-operator to jubilant --- tests/conftest.py | 6 + tests/integration/conftest.py | 206 ++++++++++++++++------------- tests/integration/helpers.py | 30 ++--- tests/integration/requirements.txt | 1 - tests/integration/test_charm.py | 141 +++++++++----------- tests/integration/test_upgrade.py | 74 +++++------ tests/integration/types.py | 6 +- tox.ini | 7 +- 8 files changed, 228 insertions(+), 243 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 96f5e0d2..5bc9ddbf 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -12,6 +12,12 @@ def pytest_addoption(parser: Parser): Args: parser: The pytest argument parser. """ + parser.addoption( + "--keep-models", + action="store_true", + default=False, + help="Keep temporarily-created Juju models after tests complete.", + ) parser.addoption( "--charm-file", action="append", diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 1d130d93..5ac94f21 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -13,21 +13,17 @@ from dataclasses import dataclass from datetime import datetime, timezone from pathlib import Path -from typing import AsyncGenerator, Generator, Optional +from typing import Generator, Optional from uuid import uuid4 -import nest_asyncio +import jubilant import openstack import pytest -import pytest_asyncio import yaml -from juju.application import Application -from juju.model import Model from openstack.compute.v2.keypair import Keypair from openstack.connection import Connection from openstack.image.v2.image import Image from openstack.network.v2.security_group import SecurityGroup -from pytest_operator.plugin import OpsTest import state from state import ( @@ -57,9 +53,6 @@ logger = logging.getLogger(__name__) -# This is required to dynamically load async fixtures in async def model_fixture() -nest_asyncio.apply() - TEST_CHARM_FILE = "./test_ubuntu-22.04-amd64.charm" @@ -92,24 +85,29 @@ def proxy_fixture(pytestconfig: pytest.Config) -> ProxyConfig: return ProxyConfig(http=proxy, https=proxy, no_proxy=no_proxy) -@pytest_asyncio.fixture(scope="module", name="model") -async def model_fixture(proxy: ProxyConfig, ops_test: OpsTest) -> AsyncGenerator[Model, None]: - """Juju model used in the test.""" - assert ops_test.model is not None - # Set model proxy for the runners - if proxy.http: - logger.info("Setting model proxy: %s", proxy.http) - await ops_test.model.set_config( - { - "juju-http-proxy": proxy.http, - "juju-https-proxy": proxy.https, - "apt-http-proxy": proxy.http, - "apt-https-proxy": proxy.https, - "snap-http-proxy": proxy.http, - "snap-https-proxy": proxy.https, - } - ) - yield ops_test.model +@pytest.fixture(scope="module", name="juju") +def juju_fixture( + proxy: ProxyConfig, request: pytest.FixtureRequest +) -> Generator[jubilant.Juju, None, None]: + """Juju instance with a temporary model for testing.""" + keep_models = bool(request.config.getoption("--keep-models")) + with jubilant.temp_model(keep=keep_models) as juju: + if proxy.http: + logger.info("Setting model proxy: %s", proxy.http) + juju.model_config( + { + "juju-http-proxy": proxy.http, + "juju-https-proxy": proxy.https, + "apt-http-proxy": proxy.http, + "apt-https-proxy": proxy.https, + "snap-http-proxy": proxy.http, + "snap-https-proxy": proxy.https, + } + ) + yield juju + if request.session.testsfailed: + log = juju.debug_log(limit=1000) + print(log, end="") @pytest.fixture(scope="module", name="dispatch_time") @@ -118,54 +116,56 @@ def dispatch_time_fixture(): return datetime.now(tz=timezone.utc) -@pytest_asyncio.fixture(scope="module", name="test_charm") -async def test_charm_fixture( - model: Model, +@pytest.fixture(scope="module", name="test_charm") +def test_charm_fixture( + juju: jubilant.Juju, test_id: str, private_endpoint_configs: PrivateEndpointConfigs, -) -> AsyncGenerator[Application, None]: +) -> Generator[str, None, None]: """The test charm that becomes active when valid relation data is given.""" app_name = f"test-{test_id}" - app = await _deploy_test_charm(app_name, model, private_endpoint_configs) + _deploy_test_charm(juju, app_name, private_endpoint_configs) - yield app + yield app_name - await model.remove_application(app_name=app_name) + juju.remove_application(app_name) logger.info("Test charm application %s removed.", app_name) -@pytest_asyncio.fixture(scope="module", name="test_charm_2") -async def test_charm_2( - model: Model, +@pytest.fixture(scope="module", name="test_charm_2") +def test_charm_2_fixture( + juju: jubilant.Juju, test_id: str, private_endpoint_configs: PrivateEndpointConfigs, -) -> AsyncGenerator[Application, None]: +) -> Generator[str, None, None]: """A second test charm that becomes active when valid relation data is given.""" app_name = f"test2-{test_id}" - app = await _deploy_test_charm(app_name, model, private_endpoint_configs) + _deploy_test_charm(juju, app_name, private_endpoint_configs) - yield app + yield app_name logger.info("Cleaning up test charm.") - await model.remove_application(app_name=app_name) + juju.remove_application(app_name) logger.info("Test charm application %s removed.", app_name) -async def _deploy_test_charm( +def _deploy_test_charm( + juju: jubilant.Juju, app_name: str, - model: Model, private_endpoint_configs: PrivateEndpointConfigs, -): +) -> str: """Deploy the test charm with the given application name. Args: + juju: The jubilant Juju instance. app_name: The name of the application to deploy. - model: The Juju model to deploy the charm in. private_endpoint_configs: The OpenStack private endpoint configurations. + Returns: + The application name. """ logger.info("Deploying built test charm") - app: Application = await model.deploy( + juju.deploy( TEST_CHARM_FILE, app_name, config={ @@ -176,9 +176,9 @@ async def _deploy_test_charm( "openstack-user-domain-name": private_endpoint_configs["user_domain_name"], "openstack-user-name": private_endpoint_configs["username"], }, - constraints="virt-type=virtual-machine", + constraints={"virt-type": "virtual-machine"}, ) - return app + return app_name @pytest.fixture(scope="module", name="arch") @@ -299,14 +299,14 @@ def test_id_fixture() -> str: @pytest.fixture(scope="module", name="test_configs") def test_configs_fixture( - model: Model, + juju: jubilant.Juju, charm_file: str, test_id: str, dispatch_time: datetime, ) -> TestConfigs: """The test configuration values.""" return TestConfigs( - model=model, + juju=juju, charm_file=charm_file, dispatch_time=dispatch_time, test_id=test_id, @@ -321,15 +321,15 @@ def image_configs_fixture(): ) -@pytest_asyncio.fixture(scope="module", name="script_secret") -async def script_secret_fixture(test_configs) -> _Secret: +@pytest.fixture(scope="module", name="script_secret") +def script_secret_fixture(juju: jubilant.Juju) -> _Secret: """The script secret.""" secret_name = f"script-{uuid4().hex}" - secret_id = await test_configs.model.add_secret( - name=secret_name, - data_args=["testsecret=TEST_VALUE"], - ) # note secret_id already contains "secret:" prefix - return _Secret(id=secret_id, name=secret_name) + secret_uri = juju.add_secret( + secret_name, + {"testsecret": "TEST_VALUE"}, + ) + return _Secret(id=str(secret_uri), name=secret_name) @pytest.fixture(scope="module", name="app_config") @@ -357,62 +357,74 @@ def app_config_fixture( @pytest.fixture(scope="module", name="base_machine_constraint") -def base_machine_constraint_fixture() -> str: +def base_machine_constraint_fixture() -> dict: """The base machine constraint.""" num_cores = max(1, multiprocessing.cpu_count() - 1) - base_machine_constraint = ( - f"arch=amd64 cores={num_cores} mem=4G root-disk=20G virt-type=virtual-machine" - ) - return base_machine_constraint + return { + "arch": "amd64", + "cores": num_cores, + "mem": "4G", + "root-disk": "20G", + "virt-type": "virtual-machine", + } -@pytest_asyncio.fixture(scope="module", name="app") -async def app_fixture( +@pytest.fixture(scope="module", name="app") +def app_fixture( app_config: dict, - base_machine_constraint: str, + base_machine_constraint: dict, test_configs: TestConfigs, script_secret: _Secret, -) -> AsyncGenerator[Application, None]: +) -> Generator[str, None, None]: """The deployed application fixture.""" + app_name = f"image-builder-operator-{test_configs.test_id}" logger.info("Deploying image builder: %s", test_configs.dispatch_time) - app: Application = await test_configs.model.deploy( + test_configs.juju.deploy( test_configs.charm_file, - application_name=f"image-builder-operator-{test_configs.test_id}", + app_name, constraints=base_machine_constraint, config=app_config, ) - await app.model.grant_secret(script_secret.name, app.name) - await app.set_config( + test_configs.juju.grant_secret(script_secret.name, app_name) + test_configs.juju.config( + app_name, { SCRIPT_URL_CONFIG_NAME: "https://raw.githubusercontent.com/canonical/" "github-runner-image-builder/refs/heads/main/tests/integration/" "testdata/test_script.sh", state.SCRIPT_SECRET_ID_CONFIG_NAME: script_secret.id, - } + }, ) # This takes long due to having to wait for the machine to come up. - await test_configs.model.wait_for_idle(apps=[app.name], idle_period=30, timeout=60 * 30) + test_configs.juju.wait( + lambda s: jubilant.all_agents_idle(s, app_name), + timeout=60 * 30, + ) - yield app + yield app_name - await test_configs.model.remove_application(app_name=app.name) + test_configs.juju.remove_application(app_name) -@pytest_asyncio.fixture(scope="module", name="app_on_charmhub") -async def app_on_charmhub_fixture( +@pytest.fixture(scope="module", name="app_on_charmhub") +def app_on_charmhub_fixture( test_configs: TestConfigs, app_config: dict, - base_machine_constraint: str, - ops_test, -) -> AsyncGenerator[Application, None]: + base_machine_constraint: dict, +) -> Generator[str, None, None]: """Fixture for deploying the charm from charmhub.""" # Normally we would use latest/stable, but upgrading # from stable is currently broken, and therefore we are using edge. Change this in the future. charmhub_channel = "edge" - ret_code, stdout, stderr = await ops_test.juju( - "info", "--format", "json", "--channel", charmhub_channel, "github-runner-image-builder" + stdout = test_configs.juju.cli( + "info", + "--format", + "json", + "--channel", + charmhub_channel, + "github-runner-image-builder", + include_model=False, ) - assert ret_code == 0, f"Failed to get charm info: {stderr}" charmhub_info = json.loads(stdout.strip()) charmhub_config_options = charmhub_info["charm"]["config"]["Options"].keys() @@ -422,19 +434,23 @@ async def app_on_charmhub_fixture( for opt in (EXTERNAL_BUILD_FLAVOR_CONFIG_NAME, EXTERNAL_BUILD_NETWORK_CONFIG_NAME): if (legacy_opt := f"{legacy_config_prefix}{opt}") in charmhub_config_options: charmhub_app_config[legacy_opt] = app_config[opt] - app: Application = await test_configs.model.deploy( + + app_name = f"image-builder-operator-{test_configs.test_id}" + test_configs.juju.deploy( "github-runner-image-builder", - application_name=f"image-builder-operator-{test_configs.test_id}", + app_name, constraints=base_machine_constraint, config=charmhub_app_config, channel=charmhub_channel, ) + test_configs.juju.wait( + lambda s: jubilant.all_agents_idle(s, app_name), + timeout=60 * 30, + ) - await test_configs.model.wait_for_idle(apps=[app.name], idle_period=30, timeout=60 * 30) - - yield app + yield app_name - await test_configs.model.remove_application(app_name=app.name) + test_configs.juju.remove_application(app_name) @pytest.fixture(scope="module", name="ssh_key") @@ -522,27 +538,27 @@ def openstack_metadata_fixture( @pytest.fixture(scope="module", name="image_names") -def image_names_fixture(image_configs: ImageConfigs, app: Application, arch: state.Arch): +def image_names_fixture(image_configs: ImageConfigs, app: str, arch: state.Arch): """Expected image names after imagebuilder run.""" image_names = [] for base in image_configs.bases: - image_names.append(f"{app.name}-{base}-{arch.value}") + image_names.append(f"{app}-{base}-{arch.value}") return image_names -@pytest_asyncio.fixture(scope="module", name="bare_image_id") -async def bare_image_id_fixture( +@pytest.fixture(scope="module", name="bare_image_id") +def bare_image_id_fixture( openstack_connection: Connection, dispatch_time: datetime, image_configs: ImageConfigs, - app: Application, + app: str, arch: state.Arch, ): """The bare image expected from builder application.""" - image: Image | None = await wait_for( + image: Image | None = wait_for( functools.partial( image_created_from_dispatch, - image_name=f"{app.name}-{image_configs.bases[0]}-{arch.value}", + image_name=f"{app}-{image_configs.bases[0]}-{arch.value}", connection=openstack_connection, dispatch_time=dispatch_time, ), diff --git a/tests/integration/helpers.py b/tests/integration/helpers.py index 763a2cf2..0e27fb7d 100644 --- a/tests/integration/helpers.py +++ b/tests/integration/helpers.py @@ -5,12 +5,11 @@ import dataclasses import functools -import inspect import logging import time from datetime import datetime, timezone from pathlib import Path -from typing import Awaitable, Callable, ParamSpec, TypeVar, cast +from typing import Callable, TypeVar from openstack.connection import Connection from openstack.image.v2.image import Image @@ -22,7 +21,7 @@ CREATE_SERVER_TIMEOUT_IN_SECONDS = 15 * 60 -async def wait_for_images( +def wait_for_images( openstack_connection: Connection, dispatch_time: datetime, image_names: list[str] ): """Wait for images to be created. @@ -33,7 +32,7 @@ async def wait_for_images( image_names: The image names to check for. """ for image_name in image_names: - await wait_for( + wait_for( functools.partial( image_created_from_dispatch, connection=openstack_connection, @@ -92,13 +91,11 @@ class OpenStackConnectionParams: ssh_key: Path -P = ParamSpec("P") R = TypeVar("R") -S = Callable[P, R] | Callable[P, Awaitable[R]] -async def wait_for( - func: S, +def wait_for( + func: Callable[[], R], timeout: int | float = 300, check_interval: int = 10, ) -> R: @@ -116,23 +113,14 @@ async def wait_for( The result of the function if any. """ deadline = time.time() + timeout - is_awaitable = inspect.iscoroutinefunction(func) while time.time() < deadline: - if is_awaitable: - if result := await cast(Awaitable, func()): - return result - else: - if result := func(): - return cast(R, result) + if result := func(): + return result time.sleep(check_interval) # final check before raising TimeoutError. - if is_awaitable: - if result := await cast(Awaitable, func()): - return result - else: - if result := func(): - return cast(R, result) + if result := func(): + return result raise TimeoutError() diff --git a/tests/integration/requirements.txt b/tests/integration/requirements.txt index dc650170..40766529 100644 --- a/tests/integration/requirements.txt +++ b/tests/integration/requirements.txt @@ -1,3 +1,2 @@ fabric types-paramiko -nest_asyncio diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index 32665248..0a5b973d 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -5,17 +5,14 @@ """Integration testing module.""" +import contextlib import json import logging -from contextlib import asynccontextmanager from datetime import datetime, timezone +import jubilant import pytest -from juju.application import Application -from juju.model import Model -from juju.unit import Unit from openstack.connection import Connection -from pytest_operator.plugin import OpsTest from builder import CRON_BUILD_SCHEDULE_PATH from state import BUILD_INTERVAL_CONFIG_NAME @@ -24,40 +21,36 @@ logger = logging.getLogger(__name__) -@pytest.mark.asyncio -async def test_image_relation(app: Application, test_charm: Application): +def test_image_relation(juju: jubilant.Juju, app: str, test_charm: str): """ arrange: An active charm and a test charm that becomes active when valid relation data is set. act: When the relation is joined. assert: The test charm becomes active due to proper relation data. """ - model: Model = app.model - await model.integrate(app.name, test_charm.name) - await model.wait_for_idle([app.name], wait_for_active=True, timeout=60 * 60) + juju.integrate(app, test_charm) + juju.wait(lambda s: jubilant.all_active(s, app), timeout=60 * 60) -@pytest.mark.asyncio @pytest.mark.abort_on_fail -async def test_cos_agent_relation(app: Application): +def test_cos_agent_relation(juju: jubilant.Juju, app: str): """ arrange: An active charm. act: When the cos-agent relation is joined. assert: The test charm becomes active. """ - model: Model = app.model - grafana_agent = await model.deploy( + grafana_agent_name = f"grafana-agent-{app}" + juju.deploy( "grafana-agent", - application_name=f"grafana-agent-{app.name}", + grafana_agent_name, channel="1/edge", - series="jammy", + base="ubuntu@22.04", ) - await model.relate(f"{app.name}:cos-agent", f"{grafana_agent.name}:cos-agent") - await model.wait_for_idle(apps=[app.name], status="active", timeout=30 * 60) + juju.integrate(f"{app}:cos-agent", f"{grafana_agent_name}:cos-agent") + juju.wait(lambda s: jubilant.all_active(s, app), timeout=30 * 60) -@pytest.mark.asyncio @pytest.mark.abort_on_fail -async def test_build_image( +def test_build_image( openstack_connection: Connection, dispatch_time: datetime, image_names: list[str], @@ -67,19 +60,19 @@ async def test_build_image( act: When openstack images are listed. assert: An image is built successfully. """ - await wait_for_images(openstack_connection, dispatch_time, image_names) + wait_for_images(openstack_connection, dispatch_time, image_names) # Ignore the "too many arguments" warning, as this is not significant for a test function where # the arguments are fixtures and the function is not expected to be called directly. @pytest.mark.abort_on_fail -async def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R0913,R0917 - app: Application, - test_charm: Application, - test_charm_2: Application, +def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R0913,R0917 + juju: jubilant.Juju, + app: str, + test_charm: str, + test_charm_2: str, openstack_connection: Connection, image_names: list[str], - ops_test: OpsTest, ): """ arrange: A test_charm that has already been integrated. @@ -87,11 +80,10 @@ async def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R091 act: Integrate the test_charm_2 with the app. assert: No additional image is created but instead the already created ones are reused. """ - model: Model = app.model time_before_relation = datetime.now(tz=timezone.utc) - await model.integrate(app.name, test_charm_2.name) - await model.wait_for_idle(apps=(test_charm_2.name,), status="active", timeout=30 * 60) + juju.integrate(app, test_charm_2) + juju.wait(lambda s: jubilant.all_active(s, test_charm_2), timeout=30 * 60) # Check that no new image is created for image_name in image_names: @@ -105,20 +97,17 @@ async def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R091 ) # Check that images in relation data is same for both test charms - image_builder_unit_name = app.units[0].name - test_charm_unit_name = test_charm.units[0].name - _, test_charm_unit_data, _ = await ops_test.juju( - "show-unit", test_charm_unit_name, "--format", "json" - ) - logger.info("Test charm unit data: %s", test_charm_unit_data) - test_charm_unit_data = json.loads(test_charm_unit_data) - - test_charm_2_unit_name = test_charm_2.units[0].name - _, test_charm_2_unit_data, _ = await ops_test.juju( - "show-unit", test_charm_2_unit_name, "--format", "json" - ) - logger.info("Test charm 2 unit data: %s", test_charm_2_unit_data) - test_charm_2_unit_data = json.loads(test_charm_2_unit_data) + status = juju.status() + image_builder_unit_name = next(iter(status.apps[app].units)) + test_charm_unit_name = next(iter(status.apps[test_charm].units)) + test_charm_unit_data_str = juju.cli("show-unit", test_charm_unit_name, "--format", "json") + logger.info("Test charm unit data: %s", test_charm_unit_data_str) + test_charm_unit_data = json.loads(test_charm_unit_data_str) + + test_charm_2_unit_name = next(iter(status.apps[test_charm_2].units)) + test_charm_2_unit_data_str = juju.cli("show-unit", test_charm_2_unit_name, "--format", "json") + logger.info("Test charm 2 unit data: %s", test_charm_2_unit_data_str) + test_charm_2_unit_data = json.loads(test_charm_2_unit_data_str) assert ( test_charm_unit_data[test_charm_unit_name]["relation-info"][0]["related-units"][ @@ -130,10 +119,10 @@ async def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R091 ) -@pytest.mark.asyncio @pytest.mark.abort_on_fail -async def test_periodic_rebuilt( - app: Application, +def test_periodic_rebuilt( + juju: jubilant.Juju, + app: str, app_config: dict, openstack_connection: Connection, image_names: list[str], @@ -143,68 +132,70 @@ async def test_periodic_rebuilt( act: Modify the crontab to run every minute. assert: An image is built successfully. """ - unit: Unit = next(iter(app.units)) - - await app.model.wait_for_idle(apps=(app.name,), status="active", timeout=30 * 60) + juju.wait(lambda s: jubilant.all_active(s, app), timeout=30 * 60) dispatch_time = datetime.now(tz=timezone.utc) - async with _change_cronjob_to_minutes( - unit, current_hour_interval=app_config[BUILD_INTERVAL_CONFIG_NAME] + status = juju.status() + unit_name = next(iter(status.apps[app].units)) + with _change_cronjob_to_minutes( + juju, unit_name, current_hour_interval=app_config[BUILD_INTERVAL_CONFIG_NAME] ): - - await wait_for_images( + wait_for_images( openstack_connection=openstack_connection, dispatch_time=dispatch_time, image_names=image_names, ) -@asynccontextmanager -async def _change_cronjob_to_minutes(unit: Unit, current_hour_interval: int): +@contextlib.contextmanager +def _change_cronjob_to_minutes(juju: jubilant.Juju, unit_name: str, current_hour_interval: int): """Context manager to change the crontab to run every minute.""" minute_interval = 1 - await unit.ssh( - command=rf"sudo sed -i 's/0 \*\/{current_hour_interval}/\*\/{minute_interval} \*/g' " - f"{CRON_BUILD_SCHEDULE_PATH}" + juju.ssh( + unit_name, + rf"sudo sed -i 's/0 \*\/{current_hour_interval}/\*\/{minute_interval} \*/g' " + f"{CRON_BUILD_SCHEDULE_PATH}", ) - cron_content = await unit.ssh(command=f"cat {CRON_BUILD_SCHEDULE_PATH}") + cron_content = juju.ssh(unit_name, f"cat {CRON_BUILD_SCHEDULE_PATH}") logger.info("Cron file content: %s", cron_content) - await unit.ssh(command="sudo systemctl restart cron") + juju.ssh(unit_name, "sudo systemctl restart cron") yield - await unit.ssh( - command=rf"sudo sed -i 's/\*\/{minute_interval} \*/0 \*\/{current_hour_interval}/g' " - f"{CRON_BUILD_SCHEDULE_PATH}" + juju.ssh( + unit_name, + rf"sudo sed -i 's/\*\/{minute_interval} \*/0 \*\/{current_hour_interval}/g' " + f"{CRON_BUILD_SCHEDULE_PATH}", ) - cron_content = await unit.ssh(command=f"cat {CRON_BUILD_SCHEDULE_PATH}") + cron_content = juju.ssh(unit_name, f"cat {CRON_BUILD_SCHEDULE_PATH}") logger.info("Cronfile content: %s", cron_content) - await unit.ssh(command="sudo systemctl restart cron") + juju.ssh(unit_name, "sudo systemctl restart cron") -@pytest.mark.asyncio @pytest.mark.abort_on_fail -async def test_log_rotated(app: Application): +def test_log_rotated(juju: jubilant.Juju, app: str): """ arrange: A deployed active charm and manually write something to the log file. act: trigger logrotate manually assert: The log is rotated successfully. """ - unit: Unit = next(iter(app.units)) - await app.model.wait_for_idle(apps=(app.name,), timeout=30 * 60) + juju.wait(lambda s: jubilant.all_agents_idle(s, app), timeout=30 * 60) + status = juju.status() + unit_name = next(iter(status.apps[app].units)) test_log = "this log should be rotated" - await unit.ssh( - command=f"echo '{test_log}' | " "sudo tee -a /var/log/github-runner-image-builder/info.log" + juju.ssh( + unit_name, + f"echo '{test_log}' | sudo tee -a /var/log/github-runner-image-builder/info.log", ) # Test that the configuration is loaded successfully using --debug flag - logrotate_debug_output = await unit.ssh( - command="sudo /usr/sbin/logrotate /etc/logrotate.conf --debug 2>&1" + logrotate_debug_output = juju.ssh( + unit_name, "sudo /usr/sbin/logrotate /etc/logrotate.conf --debug 2>&1" ) assert ( "rotating pattern: /var/log/github-runner-image-builder/info.log" in logrotate_debug_output ) # Manually trigger logrotate using --force flag - await unit.ssh(command="sudo /usr/sbin/logrotate /etc/logrotate.conf --force") - log_output = await unit.ssh(command="sudo cat /var/log/github-runner-image-builder/info.log") + juju.ssh(unit_name, "sudo /usr/sbin/logrotate /etc/logrotate.conf --force") + log_output = juju.ssh(unit_name, "sudo cat /var/log/github-runner-image-builder/info.log") assert test_log not in log_output diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index 920d8580..4edd0dc4 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -7,40 +7,34 @@ import logging from datetime import datetime, timezone +import jubilant import pytest -import pytest_asyncio -from juju.application import Application -from juju.model import Model -from juju.unit import Unit -from openstack.connection import Connection from tests.integration.helpers import wait_for, wait_for_images from tests.integration.types import OpenstackMeta, TestConfigs -@pytest_asyncio.fixture(scope="module", name="app") -async def app_fixture( - app_on_charmhub: Application, +@pytest.fixture(scope="module", name="app") +def app_fixture( + juju: jubilant.Juju, + app_on_charmhub: str, test_configs: TestConfigs, openstack_metadata: OpenstackMeta, - ops_test, -) -> Application: +) -> str: """Upgrade the charm from the local charm file.""" logging.info("Refreshing the charm from the local charm file.") - await ops_test.juju( - "refresh", - "--path", - test_configs.charm_file, - "--config", - f"build-flavor={openstack_metadata.flavor}", - "--config", - f"build-network={openstack_metadata.network}", - app_on_charmhub.name, + juju.refresh( + app_on_charmhub, + path=test_configs.charm_file, + config={ + "build-flavor": openstack_metadata.flavor, + "build-network": openstack_metadata.network, + }, ) - app = app_on_charmhub - unit = app.units[0] + status = juju.status() + unit_name = next(iter(status.apps[app_on_charmhub].units)) - async def is_upgrade_charm_event_emitted(unit: Unit) -> bool: + def is_upgrade_charm_event_emitted() -> bool: """Check if the upgrade_charm event is emitted. This is to ensure false positives from only waiting for ACTIVE status or @@ -48,35 +42,30 @@ async def is_upgrade_charm_event_emitted(unit: Unit) -> bool: We cannot rely on the juju status containing revision zero, because it changes instantly, and the hook upgrade-charm can run with a significant delay. - Args: - unit: The unit to check for upgrade charm event. - Returns: bool: True if the event is emitted, False otherwise. """ - unit_name_without_slash = unit.name.replace("/", "-") + unit_name_without_slash = unit_name.replace("/", "-") juju_unit_log_file = f"/var/log/juju/unit-{unit_name_without_slash}.log" - stdout = await unit.ssh(command=f"cat {juju_unit_log_file}") + stdout = juju.ssh(unit_name, f"cat {juju_unit_log_file}") return "Emitting Juju event upgrade_charm." in stdout - await wait_for( - functools.partial(is_upgrade_charm_event_emitted, unit), timeout=360, check_interval=60 - ) - await app.model.wait_for_idle( - apps=[app.name], - raise_on_error=True, + wait_for(is_upgrade_charm_event_emitted, timeout=360, check_interval=60) + juju.wait( + lambda s: jubilant.all_agents_idle(s, app_on_charmhub), + error=jubilant.any_error, timeout=180 * 60, - check_freq=30, + delay=30, ) - return app + return app_on_charmhub -@pytest.mark.asyncio -async def test_image_build( - app: Application, - test_charm: Application, - openstack_connection: Connection, +def test_image_build( + juju: jubilant.Juju, + app: str, + test_charm: str, + openstack_connection, image_names: list[str], ): """ @@ -84,11 +73,10 @@ async def test_image_build( act: Integrate the refreshed charm with the test charm. assert: Image building is working. """ - model: Model = app.model dispatch_time = datetime.now(tz=timezone.utc) - await model.integrate(app.name, test_charm.name) + juju.integrate(app, test_charm) - await wait_for_images( + wait_for_images( openstack_connection=openstack_connection, dispatch_time=dispatch_time, image_names=image_names, diff --git a/tests/integration/types.py b/tests/integration/types.py index 00f79cc4..64d82d87 100644 --- a/tests/integration/types.py +++ b/tests/integration/types.py @@ -8,7 +8,7 @@ from datetime import datetime from pathlib import Path -from juju.model import Model +import jubilant from openstack.compute.v2.keypair import Keypair from openstack.connection import Connection from openstack.network.v2.security_group import SecurityGroup @@ -71,13 +71,13 @@ class TestConfigs(typing.NamedTuple): """Test configuration values. Attributes: - model: The juju test model. + juju: The jubilant Juju instance. charm_file: The charm file path. dispatch_time: The test start time. test_id: The test unique identifier. """ - model: Model + juju: jubilant.Juju charm_file: str | Path dispatch_time: datetime test_id: str diff --git a/tox.ini b/tox.ini index 7cda05cf..b93303d9 100644 --- a/tox.ini +++ b/tox.ini @@ -50,8 +50,7 @@ deps = pylint pyproject-flake8 pytest - pytest-asyncio - pytest-operator + jubilant requests types-PyYAML types-requests @@ -153,10 +152,8 @@ deps = -r{toxinidir}/requirements.txt allure-pytest>=2.8.18 git+https://github.com/canonical/data-platform-workflows@v24.0.0\#subdirectory=python/pytest_plugins/allure_pytest_collection_report - juju + jubilant~=1.0 pytest - pytest-asyncio - pytest-operator commands = pytest -v --tb native --ignore={[vars]app_path} --ignore={[vars]tst_path}unit --log-cli-level=INFO -s {posargs} ; Testing with microstack From 2fac78e8218946002819153f352de8521a965c2b Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 16 Apr 2026 11:36:18 +0800 Subject: [PATCH 02/42] Remove unused functools import from test_upgrade.py Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_upgrade.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index 4edd0dc4..e72d0e63 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -3,7 +3,6 @@ """Test that no breaking change occurs when upgrading the charm.""" -import functools import logging from datetime import datetime, timezone From 65bca40e1196b4bfe1d9009aaf61c521d37a73ec Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 16 Apr 2026 11:39:34 +0800 Subject: [PATCH 03/42] Disable too-many-locals pylint warning for test_charm_another_app_does_not_rebuild_image Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index 0a5b973d..f45936a0 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -66,7 +66,7 @@ def test_build_image( # Ignore the "too many arguments" warning, as this is not significant for a test function where # the arguments are fixtures and the function is not expected to be called directly. @pytest.mark.abort_on_fail -def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R0913,R0917 +def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R0913,R0914,R0917 juju: jubilant.Juju, app: str, test_charm: str, From a53df9ba762e2b9c186e85cac639a549b1731cef Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 16 Apr 2026 12:14:21 +0800 Subject: [PATCH 04/42] Address PR review comments - Use jubilant~=1.0 in lint env for consistent versioning with integration env - Wrap _change_cronjob_to_minutes yield in try/finally to ensure cron cleanup on test failure Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_charm.py | 21 +++++++++++---------- tox.ini | 2 +- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index f45936a0..c01cfadc 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -160,16 +160,17 @@ def _change_cronjob_to_minutes(juju: jubilant.Juju, unit_name: str, current_hour logger.info("Cron file content: %s", cron_content) juju.ssh(unit_name, "sudo systemctl restart cron") - yield - - juju.ssh( - unit_name, - rf"sudo sed -i 's/\*\/{minute_interval} \*/0 \*\/{current_hour_interval}/g' " - f"{CRON_BUILD_SCHEDULE_PATH}", - ) - cron_content = juju.ssh(unit_name, f"cat {CRON_BUILD_SCHEDULE_PATH}") - logger.info("Cronfile content: %s", cron_content) - juju.ssh(unit_name, "sudo systemctl restart cron") + try: + yield + finally: + juju.ssh( + unit_name, + rf"sudo sed -i 's/\*\/{minute_interval} \*/0 \*\/{current_hour_interval}/g' " + f"{CRON_BUILD_SCHEDULE_PATH}", + ) + cron_content = juju.ssh(unit_name, f"cat {CRON_BUILD_SCHEDULE_PATH}") + logger.info("Cronfile content: %s", cron_content) + juju.ssh(unit_name, "sudo systemctl restart cron") @pytest.mark.abort_on_fail diff --git a/tox.ini b/tox.ini index b93303d9..be161620 100644 --- a/tox.ini +++ b/tox.ini @@ -50,7 +50,7 @@ deps = pylint pyproject-flake8 pytest - jubilant + jubilant~=1.0 requests types-PyYAML types-requests From 5962c15eb09270a2056401c174ab1b9fe4a9e7ef Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 16 Apr 2026 13:35:31 +0800 Subject: [PATCH 05/42] Remove invalid encoding arg from basicConfig when using handlers encoding is only valid with filename, not handlers. The WatchedFileHandler is already created with encoding='utf-8'. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- app/src/github_runner_image_builder/logging.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/src/github_runner_image_builder/logging.py b/app/src/github_runner_image_builder/logging.py index 771da30e..b3fa134c 100644 --- a/app/src/github_runner_image_builder/logging.py +++ b/app/src/github_runner_image_builder/logging.py @@ -27,5 +27,4 @@ def configure(log_level: str | int) -> None: logging.basicConfig( level=log_level_normalized, handlers=(log_handler,), - encoding="utf-8", ) From db4e2d3ca434b1cae38bebaf34430f9817fa07a6 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 16 Apr 2026 14:11:40 +0800 Subject: [PATCH 06/42] test: use os-agnostic workflow --- .github/workflows/integration_test.yaml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 801904c7..3c9f800d 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -3,17 +3,16 @@ name: Integration tests on: pull_request: schedule: - - cron: "0 15 * * SAT" + - cron: "0 15 * * SAT" concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true jobs: integration-tests: - uses: - canonical/operator-workflows/.github/workflows/integration_test.yaml@main + uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@fix/workflow-version-action-deps-sys-packages secrets: inherit with: juju-channel: 3.6/stable @@ -28,5 +27,5 @@ jobs: allure-report: if: ${{ !cancelled() && github.event_name == 'schedule' }} needs: - - integration-tests + - integration-tests uses: canonical/operator-workflows/.github/workflows/allure_report.yaml@main From 953578e42186edac73c9d0c583a88b87227e372e Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 16 Apr 2026 18:18:05 +0800 Subject: [PATCH 07/42] chore: disable deploy logs --- tests/integration/conftest.py | 3 +++ tests/integration/test_charm.py | 1 + 2 files changed, 4 insertions(+) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 5ac94f21..62f2a911 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -177,6 +177,7 @@ def _deploy_test_charm( "openstack-user-name": private_endpoint_configs["username"], }, constraints={"virt-type": "virtual-machine"}, + log=False, ) return app_name @@ -384,6 +385,7 @@ def app_fixture( app_name, constraints=base_machine_constraint, config=app_config, + log=False, ) test_configs.juju.grant_secret(script_secret.name, app_name) test_configs.juju.config( @@ -442,6 +444,7 @@ def app_on_charmhub_fixture( constraints=base_machine_constraint, config=charmhub_app_config, channel=charmhub_channel, + log=False, ) test_configs.juju.wait( lambda s: jubilant.all_agents_idle(s, app_name), diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index c01cfadc..f680b104 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -44,6 +44,7 @@ def test_cos_agent_relation(juju: jubilant.Juju, app: str): grafana_agent_name, channel="1/edge", base="ubuntu@22.04", + log=False, ) juju.integrate(f"{app}:cos-agent", f"{grafana_agent_name}:cos-agent") juju.wait(lambda s: jubilant.all_active(s, app), timeout=30 * 60) From ecb787ee7fb3519d26c152f96d0ff23b7740e3fc Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 21 Apr 2026 12:29:18 +0800 Subject: [PATCH 08/42] test: resolve review comments - migrate fixtures to jubilant sync API - Pin integration_test.yaml to operator-workflows@main instead of mutable feature branch ref - Convert openstack_password_secret_fixture from async pytest_asyncio to sync pytest fixture using juju.add_secret() - Convert app_config_fixture from async pytest_asyncio to sync pytest fixture (body was already synchronous) - Fix app_fixture return type (AsyncGenerator[Application] -> Generator[str]) and replace await calls with jubilant API: grant_secret(), config() - Fix _prepare_charmhub_app_config: remove async, replace ops_test param with juju: jubilant.Juju, fix NameError on test_configs - Fix app_on_charmhub_fixture: convert to sync pytest fixture, remove ops_test param, define missing app_name, replace all async/model calls with jubilant API, yield app name string instead of Application Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .github/workflows/integration_test.yaml | 2 +- tests/integration/conftest.py | 63 +++++++++++++------------ 2 files changed, 35 insertions(+), 30 deletions(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 3c9f800d..26e5c991 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -12,7 +12,7 @@ concurrency: jobs: integration-tests: - uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@fix/workflow-version-action-deps-sys-packages + uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@main secrets: inherit with: juju-channel: 3.6/stable diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index f2f0056e..191d426c 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -334,22 +334,22 @@ def script_secret_fixture(juju: jubilant.Juju) -> _Secret: return _Secret(id=str(secret_uri), name=secret_name) -@pytest_asyncio.fixture(scope="module", name="openstack_password_secret") -async def openstack_password_secret_fixture( +@pytest.fixture(scope="module", name="openstack_password_secret") +def openstack_password_secret_fixture( test_configs: TestConfigs, private_endpoint_configs: PrivateEndpointConfigs, ) -> _Secret: """The OpenStack password Juju secret.""" secret_name = f"openstack-password-{uuid4().hex}" - secret_id = await test_configs.model.add_secret( - name=secret_name, - data_args=[f"password={private_endpoint_configs['password']}"], - ) # note secret_id already contains "secret:" prefix - return _Secret(id=secret_id, name=secret_name) + secret_uri = test_configs.juju.add_secret( + secret_name, + {"password": private_endpoint_configs["password"]}, + ) + return _Secret(id=str(secret_uri), name=secret_name) -@pytest_asyncio.fixture(scope="module", name="app_config") -async def app_config_fixture( +@pytest.fixture(scope="module", name="app_config") +def app_config_fixture( private_endpoint_configs: PrivateEndpointConfigs, image_configs: ImageConfigs, openstack_metadata: OpenstackMeta, @@ -393,7 +393,7 @@ def app_fixture( test_configs: TestConfigs, script_secret: _Secret, openstack_password_secret: _Secret, -) -> AsyncGenerator[Application, None]: +) -> Generator[str, None, None]: """The deployed application fixture.""" app_name = f"image-builder-operator-{test_configs.test_id}" logger.info("Deploying image builder: %s", test_configs.dispatch_time) @@ -404,9 +404,10 @@ def app_fixture( config=app_config, log=False, ) - await app.model.grant_secret(openstack_password_secret.name, app.name) - await app.model.grant_secret(script_secret.name, app.name) - await app.set_config( + test_configs.juju.grant_secret(openstack_password_secret.name, app_name) + test_configs.juju.grant_secret(script_secret.name, app_name) + test_configs.juju.config( + app_name, { SCRIPT_URL_CONFIG_NAME: "https://raw.githubusercontent.com/canonical/" "github-runner-image-builder/refs/heads/main/tests/integration/" @@ -425,13 +426,13 @@ def app_fixture( test_configs.juju.remove_application(app_name) -async def _prepare_charmhub_app_config( - ops_test, app_config: dict, openstack_password: str +def _prepare_charmhub_app_config( + juju: jubilant.Juju, app_config: dict, openstack_password: str ) -> tuple[str, dict, set[str]]: """Prepare the application config for charmhub deployment. Args: - ops_test: The pytest operator test instance. + juju: The jubilant Juju instance. app_config: The base application configuration. openstack_password: The plaintext OpenStack password, used as a fallback when the charmhub revision does not yet expose openstack-password-secret. @@ -441,7 +442,7 @@ async def _prepare_charmhub_app_config( """ charmhub_channel = "edge" - stdout = test_configs.juju.cli( + stdout = juju.cli( "info", "--format", "json", @@ -471,21 +472,21 @@ async def _prepare_charmhub_app_config( return charmhub_channel, charmhub_app_config, charmhub_config_options -@pytest_asyncio.fixture(scope="module", name="app_on_charmhub") -async def app_on_charmhub_fixture( # pylint: disable=too-many-arguments,too-many-positional-arguments +@pytest.fixture(scope="module", name="app_on_charmhub") +def app_on_charmhub_fixture( # pylint: disable=too-many-arguments,too-many-positional-arguments test_configs: TestConfigs, app_config: dict, base_machine_constraint: str, - ops_test, openstack_password_secret: _Secret, private_endpoint_configs: PrivateEndpointConfigs, -) -> AsyncGenerator[Application, None]: +) -> Generator[str, None, None]: """Fixture for deploying the charm from charmhub.""" + app_name = f"image-builder-charmhub-{test_configs.test_id}" # Normally we would use latest/stable, but upgrading # from stable is currently broken, and therefore we are using edge. Change this in the future. charmhub_channel, charmhub_app_config, charmhub_config_options = ( - await _prepare_charmhub_app_config( - ops_test, app_config, private_endpoint_configs["password"] + _prepare_charmhub_app_config( + test_configs.juju, app_config, private_endpoint_configs["password"] ) ) @@ -494,13 +495,12 @@ async def app_on_charmhub_fixture( # pylint: disable=too-many-arguments,too-man deploy_config = { k: v for k, v in charmhub_app_config.items() if k != OPENSTACK_PASSWORD_SECRET_CONFIG_NAME } - app: Application = await test_configs.model.deploy( + test_configs.juju.deploy( "github-runner-image-builder", app_name, constraints=base_machine_constraint, config=deploy_config, channel=charmhub_channel, - log=False, ) test_configs.juju.wait( lambda s: jubilant.all_agents_idle(s, app_name), @@ -510,12 +510,17 @@ async def app_on_charmhub_fixture( # pylint: disable=too-many-arguments,too-man if OPENSTACK_PASSWORD_SECRET_CONFIG_NAME in charmhub_config_options: # Grant access first, then set the config to trigger a config-changed hook # after the charm already has read permissions for the secret. - await app.model.grant_secret(openstack_password_secret.name, app.name) - await app.set_config({OPENSTACK_PASSWORD_SECRET_CONFIG_NAME: openstack_password_secret.id}) + test_configs.juju.grant_secret(openstack_password_secret.name, app_name) + test_configs.juju.config( + app_name, {OPENSTACK_PASSWORD_SECRET_CONFIG_NAME: openstack_password_secret.id} + ) - await test_configs.model.wait_for_idle(apps=[app.name], idle_period=30, timeout=60 * 30) + test_configs.juju.wait( + lambda s: jubilant.all_agents_idle(s, app_name), + timeout=60 * 30, + ) - yield app + yield app_name test_configs.juju.remove_application(app_name) From 1708a9800783113e7e4c79a2cf630c858c6f3a88 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Tue, 21 Apr 2026 13:01:37 +0800 Subject: [PATCH 09/42] test: remove invalid log=False from jubilant deploy calls jubilant.Juju.deploy() does not accept a log parameter (it only exists on the private _cli method). Remove it from _deploy_test_charm and app_fixture to avoid TypeError at runtime. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 191d426c..483c9535 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -178,7 +178,6 @@ def _deploy_test_charm( "openstack-user-name": private_endpoint_configs["username"], }, constraints={"virt-type": "virtual-machine"}, - log=False, ) return app_name @@ -402,7 +401,6 @@ def app_fixture( app_name, constraints=base_machine_constraint, config=app_config, - log=False, ) test_configs.juju.grant_secret(openstack_password_secret.name, app_name) test_configs.juju.grant_secret(script_secret.name, app_name) From c76d28e9e94e4e2c5548677ebd482f72f45e004c Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 00:48:06 +0800 Subject: [PATCH 10/42] test: fix black formatting and remove invalid log=False from deploy calls - Flatten parenthesized tuple assignment for _prepare_charmhub_app_config to match black's preferred style - Remove log=False from juju.deploy() in test_cos_agent_relation; jubilant's deploy() does not accept a log parameter Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/conftest.py | 6 ++---- tests/integration/test_charm.py | 1 - 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 483c9535..84fd3315 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -482,10 +482,8 @@ def app_on_charmhub_fixture( # pylint: disable=too-many-arguments,too-many-posi app_name = f"image-builder-charmhub-{test_configs.test_id}" # Normally we would use latest/stable, but upgrading # from stable is currently broken, and therefore we are using edge. Change this in the future. - charmhub_channel, charmhub_app_config, charmhub_config_options = ( - _prepare_charmhub_app_config( - test_configs.juju, app_config, private_endpoint_configs["password"] - ) + charmhub_channel, charmhub_app_config, charmhub_config_options = _prepare_charmhub_app_config( + test_configs.juju, app_config, private_endpoint_configs["password"] ) # Deploy without the secret-backed config so the charm doesn't try to read the secret diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index f680b104..c01cfadc 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -44,7 +44,6 @@ def test_cos_agent_relation(juju: jubilant.Juju, app: str): grafana_agent_name, channel="1/edge", base="ubuntu@22.04", - log=False, ) juju.integrate(f"{app}:cos-agent", f"{grafana_agent_name}:cos-agent") juju.wait(lambda s: jubilant.all_active(s, app), timeout=30 * 60) From 120754ef2fb9e641b4eb4122e2b57e1a32358033 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 01:40:42 +0800 Subject: [PATCH 11/42] test: fix type annotation for base_machine_constraint in app_on_charmhub_fixture Was annotated as str instead of dict, causing mypy to error: Argument 'constraints' to 'deploy' of 'Juju' has incompatible type 'str'; expected 'Mapping[str, str] | None' Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 84fd3315..739835ba 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -474,7 +474,7 @@ def _prepare_charmhub_app_config( def app_on_charmhub_fixture( # pylint: disable=too-many-arguments,too-many-positional-arguments test_configs: TestConfigs, app_config: dict, - base_machine_constraint: str, + base_machine_constraint: dict, openstack_password_secret: _Secret, private_endpoint_configs: PrivateEndpointConfigs, ) -> Generator[str, None, None]: From c0a2322167d43fa60a666640d54e2ce4482dcf0f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 12:19:56 +0800 Subject: [PATCH 12/42] ci: trigger ci From 71c2d6b51429c7f0b07b4cc6d9b3a5ed24cc2103 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 14:07:11 +0800 Subject: [PATCH 13/42] refactor: migrate to openstack-password-secret for authentication --- tests/integration/conftest.py | 11 ++++++++--- tests/integration/data/charm/charmcraft.yaml | 11 +++++------ tests/integration/data/charm/src/charm.py | 8 +++++++- 3 files changed, 20 insertions(+), 10 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 739835ba..975991fe 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -122,10 +122,11 @@ def test_charm_fixture( juju: jubilant.Juju, test_id: str, private_endpoint_configs: PrivateEndpointConfigs, + openstack_password_secret: _Secret, ) -> Generator[str, None, None]: """The test charm that becomes active when valid relation data is given.""" app_name = f"test-{test_id}" - _deploy_test_charm(juju, app_name, private_endpoint_configs) + _deploy_test_charm(juju, app_name, private_endpoint_configs, openstack_password_secret) yield app_name @@ -138,10 +139,11 @@ def test_charm_2_fixture( juju: jubilant.Juju, test_id: str, private_endpoint_configs: PrivateEndpointConfigs, + openstack_password_secret: _Secret, ) -> Generator[str, None, None]: """A second test charm that becomes active when valid relation data is given.""" app_name = f"test2-{test_id}" - _deploy_test_charm(juju, app_name, private_endpoint_configs) + _deploy_test_charm(juju, app_name, private_endpoint_configs, openstack_password_secret) yield app_name @@ -154,6 +156,7 @@ def _deploy_test_charm( juju: jubilant.Juju, app_name: str, private_endpoint_configs: PrivateEndpointConfigs, + openstack_password_secret: _Secret, ) -> str: """Deploy the test charm with the given application name. @@ -161,6 +164,7 @@ def _deploy_test_charm( juju: The jubilant Juju instance. app_name: The name of the application to deploy. private_endpoint_configs: The OpenStack private endpoint configurations. + openstack_password_secret: The juju secret containing the OpenStack password. Returns: The application name. @@ -171,7 +175,7 @@ def _deploy_test_charm( app_name, config={ "openstack-auth-url": private_endpoint_configs["auth_url"], - "openstack-password": private_endpoint_configs["password"], + "openstack-password-secret": openstack_password_secret.id, "openstack-project-domain-name": private_endpoint_configs["project_domain_name"], "openstack-project-name": private_endpoint_configs["project_name"], "openstack-user-domain-name": private_endpoint_configs["user_domain_name"], @@ -179,6 +183,7 @@ def _deploy_test_charm( }, constraints={"virt-type": "virtual-machine"}, ) + juju.grant_secret(openstack_password_secret.name, app_name) return app_name diff --git a/tests/integration/data/charm/charmcraft.yaml b/tests/integration/data/charm/charmcraft.yaml index 008e4ef5..12bbd2a5 100644 --- a/tests/integration/data/charm/charmcraft.yaml +++ b/tests/integration/data/charm/charmcraft.yaml @@ -55,13 +55,12 @@ config: The auth_url section of the clouds.yaml contents, used to authenticate the OpenStack \ client (e.g. http://my-openstack-deployment/openstack-keystone). See https://docs.\ openstack.org/python-openstackclient/queens/configuration/index.html for more information. - openstack-password: - type: string - default: "" + openstack-password-secret: + type: secret description: | - The password section of the clouds.yaml contents, used to authenticate the OpenStack \ - client (e.g. myverysecurepassword). See https://docs.openstack.org/python-openstackclient/\ - queens/configuration/index.html for more information. + The juju secret ID containing the OpenStack password used to authenticate the OpenStack \ + client. The secret should contain a "password" key. See https://docs.openstack.org/\ + python-openstackclient/queens/configuration/index.html for more information. openstack-project-domain-name: type: string default: "" diff --git a/tests/integration/data/charm/src/charm.py b/tests/integration/data/charm/src/charm.py index c4e0a5cd..45c9003a 100755 --- a/tests/integration/data/charm/src/charm.py +++ b/tests/integration/data/charm/src/charm.py @@ -46,12 +46,18 @@ def _on_image_relation_joined(self, event: ops.RelationJoinedEvent): event: The event fired when relation is joined. """ logger.info("Relation joined.") + secret_id = self.config.get("openstack-password-secret") + if not secret_id: + logger.warning("openstack-password-secret not set.") + return + secret = self.model.get_secret(id=secret_id) + password = secret.get_content()["password"] event.relation.data[self.unit].update( typing.cast( dict[str, str], { "auth_url": self.config["openstack-auth-url"], - "password": self.config["openstack-password"], + "password": password, "project_domain_name": self.config["openstack-project-domain-name"], "project_name": self.config["openstack-project-name"], "user_domain_name": self.config["openstack-user-domain-name"], From 888b000c8d3558717a5abffe769c4a05be101530 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 14:27:33 +0800 Subject: [PATCH 14/42] fix: cast config secret id to str for mypy compatibility self.config.get() returns int | float | str, but Model.get_secret(id=...) expects str | None. Cast to str explicitly since we already guard on truthiness. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/data/charm/src/charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/data/charm/src/charm.py b/tests/integration/data/charm/src/charm.py index 45c9003a..a3ca77bc 100755 --- a/tests/integration/data/charm/src/charm.py +++ b/tests/integration/data/charm/src/charm.py @@ -50,7 +50,7 @@ def _on_image_relation_joined(self, event: ops.RelationJoinedEvent): if not secret_id: logger.warning("openstack-password-secret not set.") return - secret = self.model.get_secret(id=secret_id) + secret = self.model.get_secret(id=str(secret_id)) password = secret.get_content()["password"] event.relation.data[self.unit].update( typing.cast( From 7fefe62c371aa0fb6aa059721cd2e80b56eb9ad4 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 14:35:34 +0800 Subject: [PATCH 15/42] refactor: migrate to openstack-password-secret for secure password handling --- README.md | 53 ++-- charmcraft.yaml | 14 +- docs/changelog.md | 270 ++++++++++++------- docs/how-to/configure-base-image.md | 8 +- docs/how-to/configure-build-interval.md | 8 +- docs/how-to/configure-revision-history.md | 8 +- docs/how-to/pin-github-runner-version.md | 10 +- docs/tutorial/quick-start.md | 31 ++- src/state.py | 54 ++-- tests/integration/conftest.py | 23 +- tests/integration/data/charm/charmcraft.yaml | 32 +-- tests/unit/conftest.py | 2 - tests/unit/factories.py | 2 - tests/unit/test_state.py | 22 +- 14 files changed, 293 insertions(+), 244 deletions(-) diff --git a/README.md b/README.md index 094b4f84..3c089ce0 100644 --- a/README.md +++ b/README.md @@ -1,33 +1,39 @@ + # GitHub runner image builder operator + -A Juju charm that provides the GitHub runner workload embedded snapshot image to the +A Juju charm that provides the GitHub runner workload embedded snapshot image to the [GitHub runner](https://charmhub.io/github-runner) charm. This charm is deployed as a VM and works on top of OpenStack infrastructure. Like any Juju charm, this charm supports one-line deployment, configuration, integration, scaling, and more. For Charmed GitHub runner image builder, this includes support for configuring: -* Multi-arch -* Multi Ubuntu bases -* Juju/MicroK8s snap channels -* External scripts -For information about how to deploy, integrate, and manage this charm, see the Official +- Multi-arch +- Multi Ubuntu bases +- Juju/MicroK8s snap channels +- External scripts + +For information about how to deploy, integrate, and manage this charm, see the Official [CharmHub Documentation](https://charmhub.io/github-runner-image-builder). ## Get started + + Deploy GitHub runner image builder with GitHub runners. + You'll need a working [OpenStack installation](https://microstack.run/docs/single-node) with flavors with a minimum of 2 CPU cores, 8GB RAM and 10GB disk. ### Set up -Follow [MicroStack's single-node](https://microstack.run/docs/single-node) starting guide to set +Follow [MicroStack's single-node](https://microstack.run/docs/single-node) starting guide to set up MicroStack. Follow the [tutorial on GitHub runner](https://charmhub.io/github-runner) to deploy the GitHub @@ -38,11 +44,13 @@ runner. Deploy the charm. ``` +juju add-secret openstack-password password= +OPENSTACK_PASSWORD_SECRET=$(juju show-secret openstack-password --format json | jq -r 'keys[0]') + juju deploy github-runner-image-builder \ ---config experimental-external-build=True \ ---config experimental-external-build-network= \ +--config build-network= \ --config openstack-auth-url= \ ---config openstack-password= \ +--config openstack-password-secret=$OPENSTACK_PASSWORD_SECRET \ --config openstack-project-domain-name= \ --config openstack-project-name= \ --config openstack-user-name= @@ -51,6 +59,7 @@ juju integrate github-runner-image-builder github-runner ``` ### Basic operations + After having deployed and integrated the charm with the GitHub runner charm, the image should start @@ -58,10 +67,12 @@ to build and be provided to the GitHub runner automatically. The whole process t minutes. ## Integrations - -* image: The image relation provides the OpenStack image ID to the GitHub runners. -* cos-agent: The COS agent subordinate charm provides observability using the Canonical -Observability Stack (COS). + + + +- image: The image relation provides the OpenStack image ID to the GitHub runners. +- cos-agent: The COS agent subordinate charm provides observability using the Canonical + Observability Stack (COS). For a full list of integrations, please refer to the [Charmhub documentation](https://charmhub.io/github-runner-image-builder/integrations). @@ -71,11 +82,13 @@ This repository contains the charm in the root directory and the `github-runner- application in the `app` directory. Refer to [Contributing](CONTRIBUTING.md) for more information. ## Learn more -* [Read more](https://charmhub.io/github-runner-image-builder) -* [Developer documentation](https://github.com/canonical/github-runner-image-builder-operator/blob/main/CONTRIBUTING.md) -* [Troubleshooting](https://matrix.to/#/#charmhub-charmdev:ubuntu.com) + +- [Read more](https://charmhub.io/github-runner-image-builder) +- [Developer documentation](https://github.com/canonical/github-runner-image-builder-operator/blob/main/CONTRIBUTING.md) +- [Troubleshooting](https://matrix.to/#/#charmhub-charmdev:ubuntu.com) ## Project and community -* [Issues](https://github.com/canonical/github-runner-image-builder-operator/issues) -* [Contributing](https://github.com/canonical/github-runner-image-builder-operator/blob/main/CONTRIBUTING.md) -* [Matrix](https://matrix.to/#/#charmhub-charmdev:ubuntu.com) + +- [Issues](https://github.com/canonical/github-runner-image-builder-operator/issues) +- [Contributing](https://github.com/canonical/github-runner-image-builder-operator/blob/main/CONTRIBUTING.md) +- [Matrix](https://matrix.to/#/#charmhub-charmdev:ubuntu.com) diff --git a/charmcraft.yaml b/charmcraft.yaml index f166394e..3b14d47e 100644 --- a/charmcraft.yaml +++ b/charmcraft.yaml @@ -47,10 +47,9 @@ parts: tar -cvzf app.tar.gz app cp app.tar.gz /root/stage organize: - app.tar.gz: app.tar.gz + app.tar.gz: app.tar.gz prime: - - app.tar.gz - + - app.tar.gz bases: - build-on: @@ -107,14 +106,6 @@ config: The auth_url section of the clouds.yaml contents, used to authenticate the OpenStack \ client (e.g. http://my-openstack-deployment/openstack-keystone). See https://docs.\ openstack.org/python-openstackclient/queens/configuration/index.html for more information. - openstack-password: - type: string - default: "" - description: | - The password section of the clouds.yaml contents, used to authenticate the OpenStack \ - client (e.g. myverysecurepassword). See https://docs.openstack.org/python-openstackclient/\ - queens/configuration/index.html for more information. - DEPRECATED: Use openstack-password-secret instead for better security. openstack-password-secret: type: secret description: | @@ -122,7 +113,6 @@ config: client. A Juju user secret ID should be passed in the format of secret:. The secret must contain a 'password' key with the OpenStack password as its value. Example: juju add-secret openstack-password password=. - This option takes precedence over openstack-password if both are set. openstack-project-domain-name: type: string default: "" diff --git a/docs/changelog.md b/docs/changelog.md index 4d65d883..592a98d4 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,318 +1,390 @@ - ## [#219 Use Juju secrets](https://github.com/canonical/github-runner-image-builder-operator/pull/219) (2026-04-17) -* Add new `openstack-password-secret` configuration option to securely store OpenStack passwords using Juju secrets. -* Deprecated `openstack-password` configuration option (still supported for backward compatibility). -* Users can migrate to the new secret-based approach by setting `openstack-password-secret` instead of `openstack-password`. + +- Require `openstack-password-secret` configuration option to securely store OpenStack passwords using Juju secrets. +- Removed deprecated `openstack-password` configuration option. ## [#206 Add resolute image support] -* Add resolute image support. + +- Add resolute image support. ## [#198 Update integration tests](https://github.com/canonical/github-runner-image-builder-operator/pull/198) (2026-02-17) -* Update integration tests that were intentionally not done in #185. + +- Update integration tests that were intentionally not done in #185. ## [#185 Remove aproxy installation and add proxy support in workload](https://github.com/canonical/github-runner-image-builder-operator/pull/185) (2026-01-20) -* Remove `aproxy` snap installation in the charm and inject proxy values from the model config into the workload process. + +- Remove `aproxy` snap installation in the charm and inject proxy values from the model config into the workload process. ## [#195 feat: otel-collector snap](https://github.com/canonical/github-runner-image-builder-operator/pull/195) (2026-02-11) -* Install opentelemetry collector snap in the runner image. + +- Install opentelemetry collector snap in the runner image. ## [#172 feat: apt upgrade](https://github.com/canonical/github-runner-image-builder-operator/pull/172) (2025-11-26) -* Apply apt-update and apt-upgrade to GH runner images by applying them during cloud-init. + +- Apply apt-update and apt-upgrade to GH runner images by applying them during cloud-init. ## [#165 fix: raise on image download/SHASUM download failure](https://github.com/canonical/github-runner-image-builder-operator/pull/165) (2025-11-18) -* Catch image/SHASUM download failure early to handle error early in the pipeline. -* Unpin ARM64 base image and use latest. + +- Catch image/SHASUM download failure early to handle error early in the pipeline. +- Unpin ARM64 base image and use latest. ## [#160 fix: run install YQ in bare cloud-init environment](https://github.com/canonical/github-runner-image-builder-operator/pull/160) (2025-10-15) + > Fix `install_yq` function silently failing. ### Bug Fixes -* Fix `install_yq` function running within another bash shell, causing any errors to go undetected. +- Fix `install_yq` function running within another bash shell, causing any errors to go undetected. ## [#155 feat: add packages to build crypto lib from source](https://github.com/canonical/github-runner-image-builder-operator/pull/155) (2025-09-29) + > Add packages to build crypto lib from source. ### New features -* Includes `cargo`, `rustc` and `pkg-config` apt packages which allows building `cryptography` library from source. +- Includes `cargo`, `rustc` and `pkg-config` apt packages which allows building `cryptography` library from source. ## [#150 Add proxy configuration to snap install during building image](https://github.com/canonical/github-runner-image-builder-operator/pull/150) (2025-09-12) ### Bug Fixes -* The proxy configuration was not set for the snap install of aproxy during image building, causing the image building to fail if snapstore need to be accessed through a proxy. This is fixed. +- The proxy configuration was not set for the snap install of aproxy during image building, causing the image building to fail if snapstore need to be accessed through a proxy. This is fixed. ## [#144 feat(docs): Evolve and standardize the documentation workflow](https://github.com/canonical/github-runner-image-builder-operator/pull/144) (2025-08-22) -* Update documentation workflows to inject local word list and check links. +- Update documentation workflows to inject local word list and check links. ## [#140 chore: use Canonical built runner binaries](https://github.com/canonical/github-runner-image-builder-operator/pull/140)(2025-08-18) -> Use Canonical built runner binaries for all architectures. +> Use Canonical built runner binaries for all architectures. ## [#142 Use release from 2025-07-25 for Noble ARM64](https://github.com/canonical/github-runner-image-builder-operator/pull/142) (2025-08-15) + > Use release from 2025-07-25 for Noble ARM64 base image. ### Bug Fixes -* Pin Noble ARM64 base image to release from 2025-07-25 due to issues with the latest image. +- Pin Noble ARM64 base image to release from 2025-07-25 due to issues with the latest image. ### Upgrade Steps -* Deployments that are currently effected, need to be redeployed. Other deployments do not require redeployment. - +- Deployments that are currently effected, need to be redeployed. Other deployments do not require redeployment. ## [#123 feat: enable logrotate](https://github.com/canonical/github-runner-image-builder-operator/pull/123)(2025-06-06) + > Enable log rotation on the GitHub runner image builder application. ## [#121 reuse image id on relation changed](https://github.com/canonical/github-runner-image-builder-operator/pull/121)(2025-05-28) + > Reuse the already existing image in an cloud and instead of rebuilding on image relation changed. ### Performance Improvements -* Image build propagation to newly joined units should be faster as they are not rebuilt. +- Image build propagation to newly joined units should be faster as they are not rebuilt. ## [#113 fix: skip run if relation data is not ready](https://github.com/canonical/github-runner-image-builder-operator/pull/113)(2025-04-29) + > Fix: Skip image build run if relation data is not ready ### Bug Fixes -* Fixed unnecessary image build runs where unit relation data was not ready. + +- Fixed unnecessary image build runs where unit relation data was not ready. ### Performance Improvements -* Image build propagation to newly joined units should be faster. +- Image build propagation to newly joined units should be faster. ## [#101 feat: ppc64le images](https://github.com/canonical/github-runner-image-builder-operator/pull/101) (2025-04-02) + > Add support for building ppc64le images. ### Upgrade Steps -* Nothing in particular to consider. If PPC64LE architecture is desired, the config option `ppc64le` or `ppc64el` has to be specified. + +- Nothing in particular to consider. If PPC64LE architecture is desired, the config option `ppc64le` or `ppc64el` has to be specified. ### Breaking Changes -* None + +- None ### New Features -* The charm is now able to build images for the `ppc64le` (`ppc64el`) architecture. `ppc64le` is not officially supported -by GitHub, but a fork of the actions runner binary has been created, which is used in the image. Note -that ppc64le support is experimental and may be removed in the future. + +- The charm is now able to build images for the `ppc64le` (`ppc64el`) architecture. `ppc64le` is not officially supported + by GitHub, but a fork of the actions runner binary has been created, which is used in the image. Note + that ppc64le support is experimental and may be removed in the future. ### Bug Fixes -* None + +- None ### Performance Improvements -* None + +- None ### Other Changes -* None -* + +- None +- + ## [#91 Feature: Add focal support](https://github.com/canonical/github-runner-image-builder-operator/pull/91) (2025-03-07) + > Add support for building focal images. ### Upgrade Steps -* Nothing in particular to consider. + +- Nothing in particular to consider. ### Breaking Changes -* None + +- None ### New Features -* Add focal as a option for base image. To build focal images specify "focal" as the `base-image` in charm configuration. Note, the focal image does not have yarn pre-installed. + +- Add focal as a option for base image. To build focal images specify "focal" as the `base-image` in charm configuration. Note, the focal image does not have yarn pre-installed. ### Bug Fixes -* None + +- None ### Performance Improvements -* None + +- None ### Other Changes -* None + +- None ## [#83 feat: s390x images](https://github.com/canonical/github-runner-image-builder-operator/pull/83) (2025-03-06) + > Add support for building s390x images. ### Upgrade Steps -* The architecture option has to be specified. + +- The architecture option has to be specified. ### Breaking Changes -* The charm expects the architecture to be specified in the configuration. + +- The charm expects the architecture to be specified in the configuration. ### New Features -* The charm is now able to build images for the `s390x` architecture. `s390x` is not officially supported -by GitHub, but a fork of the actions runner binary has been created, which is used in the image. Note -that s390x support is experimental and may be removed in the future. + +- The charm is now able to build images for the `s390x` architecture. `s390x` is not officially supported + by GitHub, but a fork of the actions runner binary has been created, which is used in the image. Note + that s390x support is experimental and may be removed in the future. ### Bug Fixes -* None + +- None ### Performance Improvements -* None + +- None ### Other Changes -* None + +- None ## [#88 Fix: move external script secret out of cloud-init](https://github.com/canonical/github-runner-image-builder-operator/pull/88) (2025-03-04) + > Move running the external script out of cloud-init and use SSH instead. ### Upgrade Steps -* Nothing in particular to consider. + +- Nothing in particular to consider. ### Breaking Changes -* None + +- None ### New Features -* None + +- None ### Bug Fixes -* cloud-init user data is preserved in the image and should not contain traces of the external script and secrets. + +- cloud-init user data is preserved in the image and should not contain traces of the external script and secrets. ### Performance Improvements -* None + +- None ### Other Changes -* None + +- None ## [#85 fix: Periodic rebuilding of images](https://github.com/canonical/github-runner-image-builder-operator/pull/85) (2025-02-24) -> Fix the periodic rebuilding of images. +> Fix the periodic rebuilding of images. ### Upgrade Steps -* Nothing in particular to consider. + +- Nothing in particular to consider. ### Breaking Changes -* None + +- None ### New Features -* None + +- None ### Bug Fixes -* Periodic image building using a cron job was not working. -* The upgrade charm hook did not re-initialize the builder, making the builder not work after an upgrade from revision 51. + +- Periodic image building using a cron job was not working. +- The upgrade charm hook did not re-initialize the builder, making the builder not work after an upgrade from revision 51. ### Performance Improvements -* None + +- None ### Other Changes -* None -* +- None +- ## [#82 Remove Juju & MicroK8s](https://github.com/canonical/github-runner-image-builder-operator/pull/82) (2025-02-14) -> Drop Juju and MicroK8s preinstallation. +> Drop Juju and MicroK8s preinstallation. ### Upgrade Steps -* Nothing in particular to consider. + +- Nothing in particular to consider. ### Breaking Changes -* The charm no longer supports pre-installing different Juju and MicroK8s versions in the image. -The configuration options `dockerhub-cache`, `juju-channels` and `microk8s-channels` have been removed. + +- The charm no longer supports pre-installing different Juju and MicroK8s versions in the image. + The configuration options `dockerhub-cache`, `juju-channels` and `microk8s-channels` have been removed. ### New Features -* None + +- None ### Bug Fixes -* None + +- None ### Performance Improvements -* None + +- None ### Other Changes -* None + +- None ## [#79 Drop chroot mode](https://github.com/canonical/github-runner-image-builder-operator/pull/79) (2025-02-12) > Drop local image building (chroot) from the charm. - ### Upgrade Steps -* The `experimental-build-*` config options have been removed and replaced by `build-flavor`, `build-network`. -Please specify those options when upgrading the charm. + +- The `experimental-build-*` config options have been removed and replaced by `build-flavor`, `build-network`. + Please specify those options when upgrading the charm. ### Breaking Changes -* The charm no longer supports local building inside the charm unit using chroot. + +- The charm no longer supports local building inside the charm unit using chroot. ### New Features -* None + +- None ### Bug Fixes -* Fixed a bug where the application log level was not set correctly (due to lower case). + +- Fixed a bug where the application log level was not set correctly (due to lower case). ### Performance Improvements -* None + +- None ### Other Changes -* Increased OpenStack server timeouts to 20 minutes in the application to allow for longer build/delete times. + +- Increased OpenStack server timeouts to 20 minutes in the application to allow for longer build/delete times. ## [#81 Drop arm charm support](https://github.com/canonical/github-runner-image-builder-operator/pull/81) (2025-02-07) > Drop ARM support from the charm. - ### Upgrade Steps -* Nothing in particular to consider. Upgrading works for amd64 only. + +- Nothing in particular to consider. Upgrading works for amd64 only. ### Breaking Changes -* No longer supports the `arm64` architecture for the charm (note that building ARM images is still supported, -the charm is agnostic about the build architecture as this is done in an external VM). + +- No longer supports the `arm64` architecture for the charm (note that building ARM images is still supported, + the charm is agnostic about the build architecture as this is done in an external VM). ### New Features -* None + +- None ### Bug Fixes -* None + +- None ### Performance Improvements -* None + +- None ### Other Changes -* None +- None ## [#65 chore: Include application repo](https://github.com/canonical/github-runner-image-builder-operator/pull/65) (2025-02-06) > Include application repository in the charm repository. ### Upgrade Steps -* Requires no redeployment (since previous revision). Upgrades from revisions only supporting -the chroot mode require redeployment. + +- Requires no redeployment (since previous revision). Upgrades from revisions only supporting + the chroot mode require redeployment. ### Breaking Changes -* No longer supports the `app-channel` configuration option. + +- No longer supports the `app-channel` configuration option. ### New Features -* None + +- None ### Bug Fixes -* Fixed a bug where the base image name was hard-coded leading to issues when multiple builders -build images concurrently using the same build tenant. + +- Fixed a bug where the base image name was hard-coded leading to issues when multiple builders + build images concurrently using the same build tenant. ### Performance Improvements -* None + +- None ### Other Changes -* None + +- None ## [#27 feat: external cloud](https://github.com/canonical/github-runner-image-builder-operator/pull/27) (2024-09-13) > Builds GitHub runner images on OpenStack VMs. ### Upgrade Steps -* Requires enough capacity on the OpenStack project cloud to launch builder VMs. -* Requires GitHub Runner revision 249 and above. + +- Requires enough capacity on the OpenStack project cloud to launch builder VMs. +- Requires GitHub Runner revision 249 and above. ### Breaking Changes -* None + +- None ### New Features -* None + +- None ### Bug Fixes -* None + +- None ### Performance Improvements -* None + +- None ### Other Changes -* None + +- None diff --git a/docs/how-to/configure-base-image.md b/docs/how-to/configure-base-image.md index f7d55fae..7d5bc672 100644 --- a/docs/how-to/configure-base-image.md +++ b/docs/how-to/configure-base-image.md @@ -6,16 +6,18 @@ base, you can use the `base-image` configuration option. ``` BASE_IMAGE=jammy OPENSTACK_AUTH_URL= -OPENSTACK_PASSWORD= OPENSTACK_PROJECT_DOMAIN_NAME= OPENSTACK_PROJECT_NAME= OPENSTACK_USER_DOMAIN_NAME= OPENSTACK_USERNAME= +juju add-secret openstack-password password= +OPENSTACK_PASSWORD_SECRET=$(juju show-secret openstack-password --format json | jq -r 'keys[0]') + juju deploy github-runner-image-builder \ --config base-image=$BASE_IMAGE --config openstack-auth-url=$OPENSTACK_AUTH_URL \ ---config openstack-password=$OPENSTACK_PASSWORD \ +--config openstack-password-secret=$OPENSTACK_PASSWORD_SECRET \ --config openstack-project-domain-name=$OPENSTACK_PROJECT_DOMAIN_NAME \ --config openstack-project-name=$OPENSTACK_PROJECT_NAME \ --config openstack-user-domain-name=$OPENSTACK_USER_DOMAIN_NAME \ @@ -23,4 +25,4 @@ juju deploy github-runner-image-builder \ ``` Currently, focal(20.04), jammy (22.04), and noble (24.04) are supported. -Note: Yarn is not pre-installed for focal images. \ No newline at end of file +Note: Yarn is not pre-installed for focal images. diff --git a/docs/how-to/configure-build-interval.md b/docs/how-to/configure-build-interval.md index 71c23869..05b43020 100644 --- a/docs/how-to/configure-build-interval.md +++ b/docs/how-to/configure-build-interval.md @@ -5,20 +5,22 @@ You can configure how often the image will be built by specifying the `build-int ``` BUILD_INTERVAL=3 OPENSTACK_AUTH_URL= -OPENSTACK_PASSWORD= OPENSTACK_PROJECT_DOMAIN_NAME= OPENSTACK_PROJECT_NAME= OPENSTACK_USER_DOMAIN_NAME= OPENSTACK_USERNAME= +juju add-secret openstack-password password= +OPENSTACK_PASSWORD_SECRET=$(juju show-secret openstack-password --format json | jq -r 'keys[0]') + juju deploy github-runner-image-builder \ --config build-interval=$BUILD_INTERVAL --config openstack-auth-url=$OPENSTACK_AUTH_URL \ ---config openstack-password=$OPENSTACK_PASSWORD \ +--config openstack-password-secret=$OPENSTACK_PASSWORD_SECRET \ --config openstack-project-domain-name=$OPENSTACK_PROJECT_DOMAIN_NAME \ --config openstack-project-name=$OPENSTACK_PROJECT_NAME \ --config openstack-user-domain-name=$OPENSTACK_USER_DOMAIN_NAME \ --config openstack-user-name=$OPENSTACK_USERNAME ``` -The example above would build images every three hours, from the latest version of dependent sources (cloud-images, apt, snap, etc). \ No newline at end of file +The example above would build images every three hours, from the latest version of dependent sources (cloud-images, apt, snap, etc). diff --git a/docs/how-to/configure-revision-history.md b/docs/how-to/configure-revision-history.md index cbe20a33..ac205992 100644 --- a/docs/how-to/configure-revision-history.md +++ b/docs/how-to/configure-revision-history.md @@ -6,20 +6,22 @@ You can limit how many revisions of the images are kept in OpenStack Glance by s ``` REVISION_HISTORY_LIMIT=2 OPENSTACK_AUTH_URL= -OPENSTACK_PASSWORD= OPENSTACK_PROJECT_DOMAIN_NAME= OPENSTACK_PROJECT_NAME= OPENSTACK_USER_DOMAIN_NAME= OPENSTACK_USERNAME= +juju add-secret openstack-password password= +OPENSTACK_PASSWORD_SECRET=$(juju show-secret openstack-password --format json | jq -r 'keys[0]') + juju deploy github-runner-image-builder \ --config revision-history-limit=$REVISION_HISTORY_LIMIT --config openstack-auth-url=$OPENSTACK_AUTH_URL \ ---config openstack-password=$OPENSTACK_PASSWORD \ +--config openstack-password-secret=$OPENSTACK_PASSWORD_SECRET \ --config openstack-project-domain-name=$OPENSTACK_PROJECT_DOMAIN_NAME \ --config openstack-project-name=$OPENSTACK_PROJECT_NAME \ --config openstack-user-domain-name=$OPENSTACK_USER_DOMAIN_NAME \ --config openstack-user-name=$OPENSTACK_USERNAME ``` -The example above would keep the two most recent revisions of the image before deletion. \ No newline at end of file +The example above would keep the two most recent revisions of the image before deletion. diff --git a/docs/how-to/pin-github-runner-version.md b/docs/how-to/pin-github-runner-version.md index 02f3a518..8ba4050e 100644 --- a/docs/how-to/pin-github-runner-version.md +++ b/docs/how-to/pin-github-runner-version.md @@ -1,21 +1,23 @@ # How to pin GitHub runner version -Depending on your needs, you can pin the GitHub runner version by specifying the `runner-version` +Depending on your needs, you can pin the GitHub runner version by specifying the `runner-version` configuration option. ``` RUNNER_VERSION=1.2.3 OPENSTACK_AUTH_URL= -OPENSTACK_PASSWORD= OPENSTACK_PROJECT_DOMAIN_NAME= OPENSTACK_PROJECT_NAME= OPENSTACK_USER_DOMAIN_NAME= OPENSTACK_USERNAME= +juju add-secret openstack-password password= +OPENSTACK_PASSWORD_SECRET=$(juju show-secret openstack-password --format json | jq -r 'keys[0]') + juju deploy github-runner-image-builder \ --config runner-version=$RUNNER_VERSION --config openstack-auth-url=$OPENSTACK_AUTH_URL \ ---config openstack-password=$OPENSTACK_PASSWORD \ +--config openstack-password-secret=$OPENSTACK_PASSWORD_SECRET \ --config openstack-project-domain-name=$OPENSTACK_PROJECT_DOMAIN_NAME \ --config openstack-project-name=$OPENSTACK_PROJECT_NAME \ --config openstack-user-domain-name=$OPENSTACK_USER_DOMAIN_NAME \ @@ -23,4 +25,4 @@ juju deploy github-runner-image-builder \ ``` You can find out what versions are available on the actions-runner repository's -[releases page](https://github.com/actions/runner/releases). \ No newline at end of file +[releases page](https://github.com/actions/runner/releases). diff --git a/docs/tutorial/quick-start.md b/docs/tutorial/quick-start.md index 0b41073b..726272dd 100644 --- a/docs/tutorial/quick-start.md +++ b/docs/tutorial/quick-start.md @@ -10,36 +10,41 @@ This quick start guide will help you deploy the GitHub Runner Image Builder char ## Requirements - A working station, e.g., a laptop, with amd64 architecture. -- Juju 3 installed and bootstrapped to a LXD controller. You can accomplish this process by -using a Multipass VM as outlined in this guide: -[Set up your test environment](https://documentation.ubuntu.com/juju/3.6/howto/manage-your-juju-deployment/set-up-your-juju-deployment-local-testing-and-development/) +- Juju 3 installed and bootstrapped to a LXD controller. You can accomplish this process by + using a Multipass VM as outlined in this guide: + [Set up your test environment](https://documentation.ubuntu.com/juju/3.6/howto/manage-your-juju-deployment/set-up-your-juju-deployment-local-testing-and-development/) - A running instance of [OpenStack](https://microstack.run/docs/single-node). ## Steps ### Shell into the Multipass VM + > NOTE: If you're working locally, you don't need to do this step. To be able to work inside the Multipass VM first you need to log in with the following command: + ``` multipass shell my-juju-vm ``` - Deploy the [GitHub runner charm in OpenStack mode](https://charmhub.io/github-runner/docs/how-to-openstack-runner). -- Deploy the GitHub runner image builder charm. For information on OpenStack credentials, refer -to the official [OpenStack documentation](https://docs.openstack.org/python-openstackclient/pike/configuration/index.html). +- Deploy the GitHub runner image builder charm. For information on OpenStack credentials, refer + to the official [OpenStack documentation](https://docs.openstack.org/python-openstackclient/pike/configuration/index.html). ``` OPENSTACK_AUTH_URL= -OPENSTACK_PASSWORD= OPENSTACK_PROJECT_DOMAIN_NAME= OPENSTACK_PROJECT_NAME= OPENSTACK_USER_DOMAIN_NAME= OPENSTACK_USERNAME= + +juju add-secret openstack-password password= +OPENSTACK_PASSWORD_SECRET=$(juju show-secret openstack-password --format json | jq -r 'keys[0]') + juju deploy github-runner-image-builder \ --config openstack-auth-url=$OPENSTACK_AUTH_URL \ ---config openstack-password=$OPENSTACK_PASSWORD \ +--config openstack-password-secret=$OPENSTACK_PASSWORD_SECRET \ --config openstack-project-domain-name=$OPENSTACK_PROJECT_DOMAIN_NAME \ --config openstack-project-name=$OPENSTACK_PROJECT_NAME \ --config openstack-user-domain-name=$OPENSTACK_USER_DOMAIN_NAME \ @@ -47,16 +52,19 @@ juju deploy github-runner-image-builder \ ``` - Verify that the image is being built via Juju logs: + ``` juju debug-log --include=github-runner-image-builder/0 ``` -- Verify that the image is successfully built. +- Verify that the image is successfully built. + ``` openstack image list | grep noble-x64 ``` -- Integrate with GitHub runners. +- Integrate with GitHub runners. + ``` juju integrate github-runner-image-builder github-runner ``` @@ -64,16 +72,19 @@ juju integrate github-runner-image-builder github-runner ## Cleanup - Remove the github-runner-image-builder charm + ``` juju remove-application github-runner-image-builder ``` - If you used Multipass, to remove the Multipass instance you created for this tutorial, use the following command. + ``` multipass delete --purge my-juju-vm ``` - Remove the images built by the charm + ``` openstack image list -f json | jq -r '.[] | select(.Name | contains("jammy-x64")) | .ID' | xargs -r openstack image delete -``` \ No newline at end of file +``` diff --git a/src/state.py b/src/state.py index cd11b230..3de0a606 100644 --- a/src/state.py +++ b/src/state.py @@ -35,8 +35,6 @@ EXTERNAL_BUILD_NETWORK_CONFIG_NAME = "build-network" OPENSTACK_AUTH_URL_CONFIG_NAME = "openstack-auth-url" # Bandit thinks this is a hardcoded password -OPENSTACK_PASSWORD_CONFIG_NAME = "openstack-password" # nosec: hardcoded_password_string -# Bandit thinks this is a hardcoded password OPENSTACK_PASSWORD_SECRET_CONFIG_NAME = ( "openstack-password-secret" # nosec: hardcoded_password_string ) @@ -633,43 +631,39 @@ def _parse_openstack_clouds_config(charm: ops.CharmBase) -> OpenstackCloudsConfi """ auth_url = typing.cast(str, charm.config.get(OPENSTACK_AUTH_URL_CONFIG_NAME)) password_secret_id = typing.cast(str, charm.config.get(OPENSTACK_PASSWORD_SECRET_CONFIG_NAME)) - password = typing.cast(str, charm.config.get(OPENSTACK_PASSWORD_CONFIG_NAME)) project_domain = typing.cast(str, charm.config.get(OPENSTACK_PROJECT_DOMAIN_CONFIG_NAME)) project = typing.cast(str, charm.config.get(OPENSTACK_PROJECT_CONFIG_NAME)) user_domain = typing.cast(str, charm.config.get(OPENSTACK_USER_DOMAIN_CONFIG_NAME)) user = typing.cast(str, charm.config.get(OPENSTACK_USER_CONFIG_NAME)) - # Check if we have the required configs, password can come from either source if not all((auth_url, project_domain, project, user_domain, user)): raise InvalidCloudConfigError("Please supply all OpenStack configurations.") - # Prefer the secret-based password if provided - if password_secret_id: - if not password_secret_id.startswith("secret:"): - raise InvalidCloudConfigError( - f"Invalid value '{password_secret_id}' for openstack-password-secret. " - "Expected a Juju secret ID in the format 'secret:'." - ) - try: - secret = charm.model.get_secret(id=password_secret_id) - except ops.SecretNotFoundError as exc: - raise InvalidCloudConfigError( - f"OpenStack password secret not found: {password_secret_id}." - ) from exc - except ops.ModelError as exc: - raise InvalidCloudConfigError( - "Charm does not have access to the OpenStack password secret. " - "Please grant the charm read access to the secret." - ) from exc - secret_content = secret.get_content(refresh=True) - password = secret_content.get("password", "") - if not password: - raise InvalidCloudConfigError( - f"Secret {password_secret_id} does not contain a 'password' key." - ) - elif not password: + if not password_secret_id: + raise InvalidCloudConfigError( + "Please supply OpenStack password via openstack-password-secret." + ) + if not password_secret_id.startswith("secret:"): + raise InvalidCloudConfigError( + f"Invalid value '{password_secret_id}' for openstack-password-secret. " + "Expected a Juju secret ID in the format 'secret:'." + ) + try: + secret = charm.model.get_secret(id=password_secret_id) + except ops.SecretNotFoundError as exc: + raise InvalidCloudConfigError( + f"OpenStack password secret not found: {password_secret_id}." + ) from exc + except ops.ModelError as exc: + raise InvalidCloudConfigError( + "Charm does not have access to the OpenStack password secret. " + "Please grant the charm read access to the secret." + ) from exc + secret_content = secret.get_content(refresh=True) + password = secret_content.get("password", "") + if not password: raise InvalidCloudConfigError( - "Please supply OpenStack password via openstack-password or openstack-password-secret." + f"Secret {password_secret_id} does not contain a 'password' key." ) clouds_config = OpenstackCloudsConfig( diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 975991fe..fa0b419a 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -33,7 +33,6 @@ EXTERNAL_BUILD_FLAVOR_CONFIG_NAME, EXTERNAL_BUILD_NETWORK_CONFIG_NAME, OPENSTACK_AUTH_URL_CONFIG_NAME, - OPENSTACK_PASSWORD_CONFIG_NAME, OPENSTACK_PASSWORD_SECRET_CONFIG_NAME, OPENSTACK_PROJECT_CONFIG_NAME, OPENSTACK_PROJECT_DOMAIN_CONFIG_NAME, @@ -430,15 +429,13 @@ def app_fixture( def _prepare_charmhub_app_config( - juju: jubilant.Juju, app_config: dict, openstack_password: str + juju: jubilant.Juju, app_config: dict ) -> tuple[str, dict, set[str]]: """Prepare the application config for charmhub deployment. Args: juju: The jubilant Juju instance. app_config: The base application configuration. - openstack_password: The plaintext OpenStack password, used as a fallback when the - charmhub revision does not yet expose openstack-password-secret. Returns: A tuple of (channel, prepared_config, config_options). @@ -458,37 +455,23 @@ def _prepare_charmhub_app_config( charmhub_config_options = set(charmhub_info["charm"]["config"]["Options"].keys()) charmhub_app_config = {k: v for k, v in app_config.items() if k in charmhub_config_options} - # We might need to test using the legacy config options. - legacy_config_prefix = "experimental-external-" - for opt in (EXTERNAL_BUILD_FLAVOR_CONFIG_NAME, EXTERNAL_BUILD_NETWORK_CONFIG_NAME): - if (legacy_opt := f"{legacy_config_prefix}{opt}") in charmhub_config_options: - charmhub_app_config[legacy_opt] = app_config[opt] - - # If the charmhub revision doesn't expose openstack-password-secret yet, fall back to the - # legacy openstack-password option so the charm has credentials during initial deployment. - if ( - OPENSTACK_PASSWORD_SECRET_CONFIG_NAME not in charmhub_config_options - and OPENSTACK_PASSWORD_CONFIG_NAME in charmhub_config_options - ): - charmhub_app_config[OPENSTACK_PASSWORD_CONFIG_NAME] = openstack_password return charmhub_channel, charmhub_app_config, charmhub_config_options @pytest.fixture(scope="module", name="app_on_charmhub") -def app_on_charmhub_fixture( # pylint: disable=too-many-arguments,too-many-positional-arguments +def app_on_charmhub_fixture( test_configs: TestConfigs, app_config: dict, base_machine_constraint: dict, openstack_password_secret: _Secret, - private_endpoint_configs: PrivateEndpointConfigs, ) -> Generator[str, None, None]: """Fixture for deploying the charm from charmhub.""" app_name = f"image-builder-charmhub-{test_configs.test_id}" # Normally we would use latest/stable, but upgrading # from stable is currently broken, and therefore we are using edge. Change this in the future. charmhub_channel, charmhub_app_config, charmhub_config_options = _prepare_charmhub_app_config( - test_configs.juju, app_config, private_endpoint_configs["password"] + test_configs.juju, app_config ) # Deploy without the secret-backed config so the charm doesn't try to read the secret diff --git a/tests/integration/data/charm/charmcraft.yaml b/tests/integration/data/charm/charmcraft.yaml index 12bbd2a5..442235a0 100644 --- a/tests/integration/data/charm/charmcraft.yaml +++ b/tests/integration/data/charm/charmcraft.yaml @@ -22,25 +22,25 @@ links: type: charm bases: - build-on: - - name: "ubuntu" - channel: "22.04" - architectures: - - amd64 + - name: "ubuntu" + channel: "22.04" + architectures: + - amd64 run-on: - - name: "ubuntu" - channel: "22.04" - architectures: - - amd64 + - name: "ubuntu" + channel: "22.04" + architectures: + - amd64 - build-on: - - name: "ubuntu" - channel: "22.04" - architectures: - - arm64 + - name: "ubuntu" + channel: "22.04" + architectures: + - arm64 run-on: - - name: "ubuntu" - channel: "22.04" - architectures: - - arm64 + - name: "ubuntu" + channel: "22.04" + architectures: + - arm64 config: options: diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 7f90fe30..caefa2a9 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -3,7 +3,6 @@ """Module for defining unit test fixtures.""" -import secrets from unittest.mock import MagicMock import pytest @@ -31,7 +30,6 @@ def harness_fixture(): harness.update_config( { state.OPENSTACK_AUTH_URL_CONFIG_NAME: "https://test-auth-url.com/", - state.OPENSTACK_PASSWORD_CONFIG_NAME: secrets.token_hex(16), state.OPENSTACK_PROJECT_DOMAIN_CONFIG_NAME: "test", state.OPENSTACK_PROJECT_CONFIG_NAME: "test", state.OPENSTACK_USER_DOMAIN_CONFIG_NAME: "test", diff --git a/tests/unit/factories.py b/tests/unit/factories.py index 2a760a20..141485dd 100644 --- a/tests/unit/factories.py +++ b/tests/unit/factories.py @@ -17,7 +17,6 @@ EXTERNAL_BUILD_FLAVOR_CONFIG_NAME, EXTERNAL_BUILD_NETWORK_CONFIG_NAME, OPENSTACK_AUTH_URL_CONFIG_NAME, - OPENSTACK_PASSWORD_CONFIG_NAME, OPENSTACK_PASSWORD_SECRET_CONFIG_NAME, OPENSTACK_PROJECT_CONFIG_NAME, OPENSTACK_PROJECT_DOMAIN_CONFIG_NAME, @@ -88,7 +87,6 @@ class Meta: # pylint: disable=too-few-public-methods EXTERNAL_BUILD_FLAVOR_CONFIG_NAME: "test-flavor", EXTERNAL_BUILD_NETWORK_CONFIG_NAME: "test-network", OPENSTACK_AUTH_URL_CONFIG_NAME: "http://testing-auth/keystone", - OPENSTACK_PASSWORD_CONFIG_NAME: "", OPENSTACK_PASSWORD_SECRET_CONFIG_NAME: "secret:test-secret-id", OPENSTACK_PROJECT_DOMAIN_CONFIG_NAME: "test-project-domain", OPENSTACK_PROJECT_CONFIG_NAME: "test-project-name", diff --git a/tests/unit/test_state.py b/tests/unit/test_state.py index 6d0572ec..30f499f5 100644 --- a/tests/unit/test_state.py +++ b/tests/unit/test_state.py @@ -7,7 +7,6 @@ # pylint:disable=protected-access import os -import secrets from unittest.mock import MagicMock import ops @@ -472,30 +471,13 @@ def test__parse_openstack_clouds_config_missing_password_key(): assert "does not contain a 'password' key" in str(exc) -def test__parse_openstack_clouds_config_legacy_password(): +def test__parse_openstack_clouds_config_no_password_secret(): """ - arrange: given a charm with the legacy openstack-password config (string). - act: when _parse_openstack_clouds_config is called. - assert: the clouds config is parsed correctly using the legacy password. - """ - charm = factories.MockCharmFactory() - test_password = secrets.token_hex(16) - charm.config[state.OPENSTACK_PASSWORD_CONFIG_NAME] = test_password - charm.config[state.OPENSTACK_PASSWORD_SECRET_CONFIG_NAME] = "" - - clouds_config = state._parse_openstack_clouds_config(charm) - - assert clouds_config.clouds[state.CLOUD_NAME].auth.password == test_password - - -def test__parse_openstack_clouds_config_no_password(): - """ - arrange: given a charm with neither password config set. + arrange: given a charm with no password secret config set. act: when _parse_openstack_clouds_config is called. assert: InvalidCloudConfigError is raised. """ charm = factories.MockCharmFactory() - charm.config[state.OPENSTACK_PASSWORD_CONFIG_NAME] = "" charm.config[state.OPENSTACK_PASSWORD_SECRET_CONFIG_NAME] = "" with pytest.raises(state.InvalidCloudConfigError) as exc: From b7b4815b4f78b0b19112513f777dd541d8a644e9 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 14:39:12 +0800 Subject: [PATCH 16/42] docs: fix vale lint errors - Replace 'via' with 'through' or 'using' (Canonical.025a) - Replace 'etc' with 'and so on' (Canonical.025a) - Fix 'snapstore' -> 'the Snap Store' in changelog (Canonical.000) - Fix 'github actions' -> 'GitHub Actions' in architecture doc (Canonical.000) - Fix Ubuntu version format to include LTS suffix (Canonical.003) - Add 'aproxy' and 'opentelemetry' to vale accept list (product names) Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .vale/styles/config/vocabularies/local/accept.txt | 2 ++ CONTRIBUTING.md | 2 +- docs/changelog.md | 2 +- docs/how-to/configure-build-interval.md | 2 +- docs/index.md | 2 +- docs/reference/charm-architecture.md | 8 ++++---- docs/tutorial/quick-start.md | 2 +- 7 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.vale/styles/config/vocabularies/local/accept.txt b/.vale/styles/config/vocabularies/local/accept.txt index 7dcb3942..e651c36b 100644 --- a/.vale/styles/config/vocabularies/local/accept.txt +++ b/.vale/styles/config/vocabularies/local/accept.txt @@ -1,3 +1,5 @@ +aproxy chroot cron +opentelemetry pipx \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d96cfa47..28a1aaf4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,7 +27,7 @@ tox # runs 'format', 'lint', and 'unit' environments The integration tests (both of the charm and the app) -require options to be passed via the command line (see `tests/conftest.py`) and +require options to be passed through the command line (see `tests/conftest.py`) and environment variables `OPENSTACK_PASSWORD` to be able to deploy the charm and/or upload images to OpenStack. ## Build the charm diff --git a/docs/changelog.md b/docs/changelog.md index 592a98d4..bff71424 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -50,7 +50,7 @@ ### Bug Fixes -- The proxy configuration was not set for the snap install of aproxy during image building, causing the image building to fail if snapstore need to be accessed through a proxy. This is fixed. +- The proxy configuration was not set for the snap install of aproxy during image building, causing the image building to fail if the Snap Store needs to be accessed through a proxy. This is fixed. ## [#144 feat(docs): Evolve and standardize the documentation workflow](https://github.com/canonical/github-runner-image-builder-operator/pull/144) (2025-08-22) diff --git a/docs/how-to/configure-build-interval.md b/docs/how-to/configure-build-interval.md index 05b43020..04267a94 100644 --- a/docs/how-to/configure-build-interval.md +++ b/docs/how-to/configure-build-interval.md @@ -23,4 +23,4 @@ juju deploy github-runner-image-builder \ --config openstack-user-name=$OPENSTACK_USERNAME ``` -The example above would build images every three hours, from the latest version of dependent sources (cloud-images, apt, snap, etc). +The example above would build images every three hours, from the latest version of dependent sources (cloud-images, apt, snap, and so on). diff --git a/docs/index.md b/docs/index.md index 5b3c0b7f..a0645ad4 100644 --- a/docs/index.md +++ b/docs/index.md @@ -21,7 +21,7 @@ SRE teams through Juju's clean interface. ## Contributing to this documentation -Documentation is an important part of this project, and we take the same open-source approach to the documentation as the code. As such, we welcome community contributions, suggestions and constructive feedback on our documentation. Our documentation is hosted on the [Charmhub forum](https://discourse.charmhub.io/t/github-runner-image-builder-documentation-overview) to enable easy collaboration. Please use the "Help us improve this documentation" links on each documentation page to either directly change something you see that's wrong, ask a question, or make a suggestion about a potential change via the comments section. +Documentation is an important part of this project, and we take the same open-source approach to the documentation as the code. As such, we welcome community contributions, suggestions and constructive feedback on our documentation. Our documentation is hosted on the [Charmhub forum](https://discourse.charmhub.io/t/github-runner-image-builder-documentation-overview) to enable easy collaboration. Please use the "Help us improve this documentation" links on each documentation page to either directly change something you see that's wrong, ask a question, or make a suggestion about a potential change through the comments section. If there's a particular area of documentation that you'd like to see that's missing, please [file a bug](https://github.com/canonical/github-runner-image-builder-operator/issues). diff --git a/docs/reference/charm-architecture.md b/docs/reference/charm-architecture.md index 8c9c0e2f..abe1dff1 100644 --- a/docs/reference/charm-architecture.md +++ b/docs/reference/charm-architecture.md @@ -45,13 +45,13 @@ The image-builder uses the [OpenStack SDK](https://docs.openstack.org/openstacks by a config option. Using an external OpenStack VM instead of the charm's machine allows for more features (using chroot has some limitations, e.g. for building snaps) and parallel image building. [cloud-init](https://cloud-init.io/) is used to install the necessary dependencies for spawning self-hosted runners -([github actions runner binary](https://github.com/actions/runner)) and tools for automatic proxy support ([aproxy](https://github.com/canonical/aproxy)). +([GitHub Actions runner binary](https://github.com/actions/runner)) and tools for automatic proxy support ([aproxy](https://github.com/canonical/aproxy)). There is also a custom script configuration combined with a secret that is run in the cloud-init script to allow further customization of the images. The image-builder repeatedly checks to see if the cloud-init script has finished successfully, then snapshots the VM, uploads the image to a specified OpenStack project -and deletes the VM. This specified OpenStack project is determined via the `image:github_runner_image_v0` integration with another charm (e.g. [GitHub Runner Charm](https://charmhub.io/github-runner)). +and deletes the VM. This specified OpenStack project is determined through the `image:github_runner_image_v0` integration with another charm (e.g. [GitHub Runner Charm](https://charmhub.io/github-runner)). The other charm can then use the image to create a VM instance with the required software preinstalled. It receives -the image ID from the Image Builder charm via the integration mentioned above. +the image ID from the Image Builder charm through the integration mentioned above. Depending on the configuration, the charm will trigger multiple image builds in parallel to speed up the process. This leads to multiple OpenStack VMs in the OpenStack cloud (and requires corresponding OpenStack quotas) and multiple @@ -66,7 +66,7 @@ and uploaded to OpenStack. The image-builder application is initialized by the charm before it can be used. Initialization includes -- Downloading and validating the base images (e.g. Ubuntu 22.04 or 24.04) +- Downloading and validating the base images (e.g. Ubuntu 22.04 LTS or Ubuntu 24.04 LTS) - Uploading the base images to OpenStack - Creating key pairs and security groups in OpenStack diff --git a/docs/tutorial/quick-start.md b/docs/tutorial/quick-start.md index 726272dd..f4c5e10f 100644 --- a/docs/tutorial/quick-start.md +++ b/docs/tutorial/quick-start.md @@ -51,7 +51,7 @@ juju deploy github-runner-image-builder \ --config openstack-user-name=$OPENSTACK_USERNAME ``` -- Verify that the image is being built via Juju logs: +- Verify that the image is being built using Juju logs: ``` juju debug-log --include=github-runner-image-builder/0 From 659d6accd0fc24e0d329cdcfc34cc2b5f451dfc3 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 15:10:22 +0800 Subject: [PATCH 17/42] test: use fix/pip-break-flag-workarounds integration test --- .github/workflows/integration_test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 26e5c991..22e3538c 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -12,7 +12,7 @@ concurrency: jobs: integration-tests: - uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@main + uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@fix/pip-break-flag-workarounds secrets: inherit with: juju-channel: 3.6/stable From f17980f9f50526ea2866a809c73cb676499c2de1 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 15:14:51 +0800 Subject: [PATCH 18/42] ci: trigger ci From b30b9695e5899dff62182edd9298064da857ff89 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 15:31:14 +0800 Subject: [PATCH 19/42] revert: ci use main --- .github/workflows/integration_test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 22e3538c..26e5c991 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -12,7 +12,7 @@ concurrency: jobs: integration-tests: - uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@fix/pip-break-flag-workarounds + uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@main secrets: inherit with: juju-channel: 3.6/stable From ac2638866e509b8c1649dcabb9e10379136f4972 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 15:55:35 +0800 Subject: [PATCH 20/42] test: restore 100% unit test coverage after openstack password secret migration - Add update_status() to image.Observer so block_if_invalid_config works - Fix test_block_on_image_relation_not_ready to mock BuilderConfig.from_charm - Add tests for ImageConfig.from_charm, CloudConfig.from_charm, BuilderConfig.from_charm, upload_cloud_ids property, _get_num_parallel_build - Add tests for _parse_openstack_clouds_config with upload auth configs - Add tests for _parse_openstack_clouds_auth_configs_from_relation with units (complete and incomplete data) - Add test for _parse_script_secrets with no secrets configured - Add test for _on_image_relation_joined with no upload_cloud_ids Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- src/image.py | 8 ++ tests/unit/test_charm.py | 13 ++- tests/unit/test_image.py | 22 ++++++ tests/unit/test_state.py | 166 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 208 insertions(+), 1 deletion(-) diff --git a/src/image.py b/src/image.py index ba82ccaa..14b25861 100644 --- a/src/image.py +++ b/src/image.py @@ -47,6 +47,14 @@ def __init__(self, charm: ops.CharmBase): charm.on[state.IMAGE_RELATION].relation_joined, self._on_image_relation_joined ) + def update_status(self, status: ops.StatusBase) -> None: + """Update the unit status. + + Args: + status: The desired status instance. + """ + self.model.unit.status = status + @charm_utils.block_if_invalid_config(defer=False) def _on_image_relation_joined(self, event: ops.RelationJoinedEvent) -> None: """Handle the image relation joined event. diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 67117818..73ad528b 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -54,12 +54,23 @@ def mock_builder_fixture(monkeypatch: pytest.MonkeyPatch): pytest.param("_on_run", id="run event"), ], ) -def test_block_on_image_relation_not_ready(charm: GithubRunnerImageBuilderCharm, hook: str): +def test_block_on_image_relation_not_ready( + monkeypatch: pytest.MonkeyPatch, charm: GithubRunnerImageBuilderCharm, hook: str +): """ arrange: given hooks that should not run build when image relation is not yet ready. act: when the hook is called. assert: the charm falls into BlockedStatus. """ + monkeypatch.setattr( + state.BuilderConfig, + "from_charm", + MagicMock( + return_value=MagicMock( + proxy=None, cloud_config=MagicMock(upload_cloud_ids=[]) + ) + ), + ) getattr(charm, hook)(MagicMock()) assert charm.unit.status == ops.BlockedStatus(f"{state.IMAGE_RELATION} integration required.") diff --git a/tests/unit/test_image.py b/tests/unit/test_image.py index b072da07..c6dd46e9 100644 --- a/tests/unit/test_image.py +++ b/tests/unit/test_image.py @@ -9,6 +9,7 @@ import secrets from unittest.mock import MagicMock +import ops import pytest from ops.testing import Harness @@ -69,6 +70,27 @@ def test__on_image_relation_joined_no_image( assert all(f"Image not yet ready for {test_unit_name}." in log for log in caplog.messages) +def test__on_image_relation_joined_no_upload_cloud_ids( + monkeypatch: pytest.MonkeyPatch, +): + """ + arrange: given a BuilderConfig with no upload_cloud_ids (image relation not yet configured). + act: when _on_image_relation_joined hook is fired. + assert: unit status is set to BlockedStatus with image integration required message. + """ + mock_config = MagicMock() + mock_config.cloud_config.upload_cloud_ids = [] + monkeypatch.setattr(state.BuilderConfig, "from_charm", MagicMock(return_value=mock_config)) + + mock_charm = MagicMock() + observer = image.Observer(mock_charm) + observer._on_image_relation_joined(MagicMock()) + + mock_charm.unit.status = observer.model.unit.status + assert isinstance(observer.model.unit.status, ops.BlockedStatus) + assert state.IMAGE_RELATION in str(observer.model.unit.status.message) + + def test__on_image_relation_joined( monkeypatch: pytest.MonkeyPatch, image_observer: image.Observer ): diff --git a/tests/unit/test_state.py b/tests/unit/test_state.py index 30f499f5..5cf980ad 100644 --- a/tests/unit/test_state.py +++ b/tests/unit/test_state.py @@ -706,3 +706,169 @@ def test__parse_script_secrets_from_config(secret: str, expected_secrets_map: di assert state._parse_script_secrets(charm=mock_charm) == expected_secrets_map assert state._parse_script_secrets(charm=mock_charm) == expected_secrets_map + + +def test__parse_script_secrets_no_secrets(): + """ + arrange: given a charm with no script secrets configured. + act: when _parse_script_secrets is called. + assert: empty dict is returned. + """ + mock_charm = MagicMock() + mock_charm.config = { + state.SCRIPT_SECRET_ID_CONFIG_NAME: "", + state.SCRIPT_SECRET_CONFIG_NAME: "", + } + + assert state._parse_script_secrets(charm=mock_charm) == {} + + +def test_image_config_from_charm(): + """ + arrange: given a mock charm with all required image config values. + act: when ImageConfig.from_charm is called. + assert: expected image config is returned. + """ + charm = factories.MockCharmFactory() + charm.config[state.ARCHITECTURE_CONFIG_NAME] = "amd64" + charm.config[state.SCRIPT_SECRET_ID_CONFIG_NAME] = "" + + image_config = state.ImageConfig.from_charm(charm=charm) + + assert image_config.arch == state.Arch.X64 + assert state.BaseImage.JAMMY in image_config.bases + assert image_config.script_secrets == {} + + +def test_cloud_config_upload_cloud_ids(): + """ + arrange: given a CloudConfig with no upload clouds beyond the build cloud. + act: when upload_cloud_ids property is accessed. + assert: empty list is returned since no upload clouds are configured. + """ + cloud_config = state.CloudConfig( + openstack_clouds_config=state.OpenstackCloudsConfig( + clouds={state.CLOUD_NAME: state._CloudsConfig(auth=None)} + ), + external_build_config=state.ExternalBuildConfig(flavor="test", network="test"), + num_revisions=5, + ) + + assert cloud_config.upload_cloud_ids == [] + + +@pytest.mark.usefixtures("patch_juju_version_33") +def test_cloud_config_from_charm(): + """ + arrange: given a mock charm with all required cloud config values. + act: when CloudConfig.from_charm is called. + assert: expected cloud config is returned. + """ + charm = factories.MockCharmFactory() + charm.model.relations.get.return_value = [] + + cloud_config = state.CloudConfig.from_charm(charm=charm) + + assert cloud_config.cloud_name == state.CLOUD_NAME + assert cloud_config.num_revisions > 0 + + +@pytest.mark.usefixtures("patch_juju_version_33") +def test_builder_config_from_charm(): + """ + arrange: given a mock charm with all required builder config values. + act: when BuilderConfig.from_charm is called. + assert: expected builder config is returned. + """ + charm = factories.MockCharmFactory() + charm.config[state.ARCHITECTURE_CONFIG_NAME] = "amd64" + charm.config[state.SCRIPT_SECRET_ID_CONFIG_NAME] = "" + charm.model.relations.get.return_value = [] + + builder_config = state.BuilderConfig.from_charm(charm=charm) + + assert builder_config.cloud_config.cloud_name == state.CLOUD_NAME + assert builder_config.image_config.arch == state.Arch.X64 + assert builder_config.app_config.parallel_build >= 1 + + +@pytest.mark.usefixtures("patch_juju_version_33") +def test__parse_openstack_clouds_config_with_upload_auths(monkeypatch: pytest.MonkeyPatch): + """ + arrange: given a charm where the image relation has units with cloud auth data. + act: when _parse_openstack_clouds_config is called. + assert: the upload cloud auth configs are merged into the clouds config. + """ + charm = factories.MockCharmFactory() + upload_auth = state.CloudsAuthConfig( + auth_url="http://upload-auth/keystone", + password="upload-pass", # nosec: B106:hardcoded_password_funcarg + project_domain_name="upload-domain", + project_name="upload-project", + user_domain_name="upload-user-domain", + username="upload-user", + ) + monkeypatch.setattr( + state, + "_parse_openstack_clouds_auth_configs_from_relation", + MagicMock(return_value={upload_auth}), + ) + + clouds_config = state._parse_openstack_clouds_config(charm=charm) + + assert upload_auth.get_id() in clouds_config.clouds + + +def test__parse_openstack_clouds_auth_configs_from_relation_with_units( + harness: Harness, charm: GithubRunnerImageBuilderCharm +): + """ + arrange: given an image relation with units that have cloud auth data. + act: when _parse_openstack_clouds_auth_configs_from_relation is called. + assert: the cloud auth configs are returned. + """ + relation_id = harness.add_relation(state.IMAGE_RELATION, "github-runner") + harness.add_relation_unit(relation_id=relation_id, remote_unit_name="github-runner/0") + harness.update_relation_data( + relation_id=relation_id, + app_or_unit="github-runner/0", + key_values={ + "auth_url": "http://test-auth/keystone", + "password": "test-pass", # nosec: B106:hardcoded_password_funcarg + "project_domain_name": "test-domain", + "project_name": "test-project", + "user_domain_name": "test-user-domain", + "username": "test-user", + }, + ) + + result = state._parse_openstack_clouds_auth_configs_from_relation(charm=charm) + + assert len(result) == 1 + auth = next(iter(result)) + assert auth.auth_url == "http://test-auth/keystone" + assert auth.username == "test-user" + + +def test__parse_openstack_clouds_auth_configs_from_relation_incomplete_unit_data( + harness: Harness, + charm: GithubRunnerImageBuilderCharm, + caplog: pytest.LogCaptureFixture, +): + """ + arrange: given an image relation with a unit that has incomplete cloud auth data. + act: when _parse_openstack_clouds_auth_configs_from_relation is called. + assert: the unit is skipped and a warning is logged. + """ + relation_id = harness.add_relation(state.IMAGE_RELATION, "github-runner") + harness.add_relation_unit(relation_id=relation_id, remote_unit_name="github-runner/0") + harness.update_relation_data( + relation_id=relation_id, + app_or_unit="github-runner/0", + key_values={"auth_url": "http://test-auth/keystone"}, + ) + + result = state._parse_openstack_clouds_auth_configs_from_relation(charm=charm) + + assert len(result) == 0 + assert any("Required field not yet set on" in msg for msg in caplog.messages) From 97631001281c350257d1ff4101f8961dfe6495d8 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Wed, 22 Apr 2026 16:02:47 +0800 Subject: [PATCH 21/42] fix: correct black formatting and bandit nosec tags in tests - Inline MagicMock call in test_charm.py to satisfy black formatting - Change B106 nosec to B105 for dict password string literal in test_state.py Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/unit/test_charm.py | 6 +----- tests/unit/test_state.py | 2 +- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py index 73ad528b..9b683012 100644 --- a/tests/unit/test_charm.py +++ b/tests/unit/test_charm.py @@ -65,11 +65,7 @@ def test_block_on_image_relation_not_ready( monkeypatch.setattr( state.BuilderConfig, "from_charm", - MagicMock( - return_value=MagicMock( - proxy=None, cloud_config=MagicMock(upload_cloud_ids=[]) - ) - ), + MagicMock(return_value=MagicMock(proxy=None, cloud_config=MagicMock(upload_cloud_ids=[]))), ) getattr(charm, hook)(MagicMock()) diff --git a/tests/unit/test_state.py b/tests/unit/test_state.py index 5cf980ad..3c635178 100644 --- a/tests/unit/test_state.py +++ b/tests/unit/test_state.py @@ -834,7 +834,7 @@ def test__parse_openstack_clouds_auth_configs_from_relation_with_units( app_or_unit="github-runner/0", key_values={ "auth_url": "http://test-auth/keystone", - "password": "test-pass", # nosec: B106:hardcoded_password_funcarg + "password": "test-pass", # nosec: B105:hardcoded_password_string "project_domain_name": "test-domain", "project_name": "test-project", "user_domain_name": "test-user-domain", From 473ff2e4a69f7f9acde76d1224005b8c4f0d84b0 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 23 Apr 2026 10:03:41 +0800 Subject: [PATCH 22/42] fix: restore openstack-password fallback for upgrade test When upgrading from a charmhub edge revision that predates the openstack-password-secret config option, the upgrade test was failing because the old charm had no credentials and the new charm (which requires openstack-password-secret) also couldn't find them. Restore the fallback in _prepare_charmhub_app_config to set the legacy openstack-password config when the charmhub version doesn't yet expose openstack-password-secret. The constant is kept local to the test code since state.py no longer needs the legacy option. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/conftest.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index fa0b419a..f75b2bd3 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -55,6 +55,9 @@ TEST_CHARM_FILE = "./test_ubuntu-22.04-amd64.charm" +# Legacy config name kept here (not in state.py) for upgrade-compatibility testing only. +_OPENSTACK_LEGACY_PASSWORD_CONFIG_NAME = "openstack-password" # nosec: hardcoded_password_string + @dataclass class _Secret: @@ -429,13 +432,15 @@ def app_fixture( def _prepare_charmhub_app_config( - juju: jubilant.Juju, app_config: dict + juju: jubilant.Juju, app_config: dict, openstack_password: str ) -> tuple[str, dict, set[str]]: """Prepare the application config for charmhub deployment. Args: juju: The jubilant Juju instance. app_config: The base application configuration. + openstack_password: The plaintext OpenStack password, used as a fallback when the + charmhub revision does not yet expose openstack-password-secret. Returns: A tuple of (channel, prepared_config, config_options). @@ -456,6 +461,14 @@ def _prepare_charmhub_app_config( charmhub_app_config = {k: v for k, v in app_config.items() if k in charmhub_config_options} + # If the charmhub revision doesn't expose openstack-password-secret yet, fall back to the + # legacy openstack-password option so the charm has credentials during initial deployment. + if ( + OPENSTACK_PASSWORD_SECRET_CONFIG_NAME not in charmhub_config_options + and _OPENSTACK_LEGACY_PASSWORD_CONFIG_NAME in charmhub_config_options + ): + charmhub_app_config[_OPENSTACK_LEGACY_PASSWORD_CONFIG_NAME] = openstack_password + return charmhub_channel, charmhub_app_config, charmhub_config_options @@ -465,13 +478,14 @@ def app_on_charmhub_fixture( app_config: dict, base_machine_constraint: dict, openstack_password_secret: _Secret, + private_endpoint_configs: PrivateEndpointConfigs, ) -> Generator[str, None, None]: """Fixture for deploying the charm from charmhub.""" app_name = f"image-builder-charmhub-{test_configs.test_id}" # Normally we would use latest/stable, but upgrading # from stable is currently broken, and therefore we are using edge. Change this in the future. charmhub_channel, charmhub_app_config, charmhub_config_options = _prepare_charmhub_app_config( - test_configs.juju, app_config + test_configs.juju, app_config, private_endpoint_configs["password"] ) # Deploy without the secret-backed config so the charm doesn't try to read the secret From ef9bffd25dc9575fca799badc65e71ae1f61e198 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 23 Apr 2026 10:06:57 +0800 Subject: [PATCH 23/42] Revert "fix: restore openstack-password fallback for upgrade test" This reverts commit 473ff2e4a69f7f9acde76d1224005b8c4f0d84b0. --- tests/integration/conftest.py | 18 ++---------------- 1 file changed, 2 insertions(+), 16 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index f75b2bd3..fa0b419a 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -55,9 +55,6 @@ TEST_CHARM_FILE = "./test_ubuntu-22.04-amd64.charm" -# Legacy config name kept here (not in state.py) for upgrade-compatibility testing only. -_OPENSTACK_LEGACY_PASSWORD_CONFIG_NAME = "openstack-password" # nosec: hardcoded_password_string - @dataclass class _Secret: @@ -432,15 +429,13 @@ def app_fixture( def _prepare_charmhub_app_config( - juju: jubilant.Juju, app_config: dict, openstack_password: str + juju: jubilant.Juju, app_config: dict ) -> tuple[str, dict, set[str]]: """Prepare the application config for charmhub deployment. Args: juju: The jubilant Juju instance. app_config: The base application configuration. - openstack_password: The plaintext OpenStack password, used as a fallback when the - charmhub revision does not yet expose openstack-password-secret. Returns: A tuple of (channel, prepared_config, config_options). @@ -461,14 +456,6 @@ def _prepare_charmhub_app_config( charmhub_app_config = {k: v for k, v in app_config.items() if k in charmhub_config_options} - # If the charmhub revision doesn't expose openstack-password-secret yet, fall back to the - # legacy openstack-password option so the charm has credentials during initial deployment. - if ( - OPENSTACK_PASSWORD_SECRET_CONFIG_NAME not in charmhub_config_options - and _OPENSTACK_LEGACY_PASSWORD_CONFIG_NAME in charmhub_config_options - ): - charmhub_app_config[_OPENSTACK_LEGACY_PASSWORD_CONFIG_NAME] = openstack_password - return charmhub_channel, charmhub_app_config, charmhub_config_options @@ -478,14 +465,13 @@ def app_on_charmhub_fixture( app_config: dict, base_machine_constraint: dict, openstack_password_secret: _Secret, - private_endpoint_configs: PrivateEndpointConfigs, ) -> Generator[str, None, None]: """Fixture for deploying the charm from charmhub.""" app_name = f"image-builder-charmhub-{test_configs.test_id}" # Normally we would use latest/stable, but upgrading # from stable is currently broken, and therefore we are using edge. Change this in the future. charmhub_channel, charmhub_app_config, charmhub_config_options = _prepare_charmhub_app_config( - test_configs.juju, app_config, private_endpoint_configs["password"] + test_configs.juju, app_config ) # Deploy without the secret-backed config so the charm doesn't try to read the secret From 6d5418e788b41317196a6a0d9377a4efc1e1db13 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Thu, 23 Apr 2026 17:01:30 +0800 Subject: [PATCH 24/42] fix: grant and set openstack-password-secret after charm refresh in upgrade test The app_fixture in test_upgrade.py was calling juju.refresh() without granting or setting the openstack-password-secret config. When upgrading from an older charmhub revision that didn't expose this config option, the refreshed charm would raise: InvalidCloudConfigError: Please supply OpenStack password via openstack-password-secret. Fix by adding openstack_password_secret to app_fixture's parameters and explicitly granting the secret and setting the config immediately after juju.refresh(), ensuring the new charm can read the OpenStack password. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_upgrade.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index e72d0e63..e9545e50 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -9,6 +9,8 @@ import jubilant import pytest +from state import OPENSTACK_PASSWORD_SECRET_CONFIG_NAME +from tests.integration.conftest import _Secret from tests.integration.helpers import wait_for, wait_for_images from tests.integration.types import OpenstackMeta, TestConfigs @@ -19,6 +21,7 @@ def app_fixture( app_on_charmhub: str, test_configs: TestConfigs, openstack_metadata: OpenstackMeta, + openstack_password_secret: _Secret, ) -> str: """Upgrade the charm from the local charm file.""" logging.info("Refreshing the charm from the local charm file.") @@ -30,6 +33,12 @@ def app_fixture( "build-network": openstack_metadata.network, }, ) + # The new charm requires openstack-password-secret; grant and set it now + # in case the charmhub version did not support this config option yet. + juju.grant_secret(openstack_password_secret.name, app_on_charmhub) + juju.config( + app_on_charmhub, {OPENSTACK_PASSWORD_SECRET_CONFIG_NAME: openstack_password_secret.id} + ) status = juju.status() unit_name = next(iter(status.apps[app_on_charmhub].units)) From 23ea8e526fbb071e7c80c8766b79d45b9b511b45 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 24 Apr 2026 10:28:41 +0800 Subject: [PATCH 25/42] ci: debug --- .github/workflows/integration_test.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 26e5c991..31c5c768 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -24,6 +24,8 @@ jobs: builder-runner-label: X64 pre-run-script: | -c "./tests/integration/aproxy_prerouting_workaround.sh" + tmate-debug: true + tmate-timeout: 90 allure-report: if: ${{ !cancelled() && github.event_name == 'schedule' }} needs: From 648d6d849e960f6f859bbd09b708657584e831fc Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Fri, 24 Apr 2026 16:09:05 +0800 Subject: [PATCH 26/42] ci: add debug logging --- src/pipx.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/src/pipx.py b/src/pipx.py index 9eef91d4..7fec05c4 100644 --- a/src/pipx.py +++ b/src/pipx.py @@ -7,6 +7,7 @@ # Code is abstracting process interactions and is currently tested in integration tests. import logging +import os import subprocess # nosec from exceptions import PipXError @@ -50,23 +51,31 @@ def _pipx_cmd(*args: str) -> None: # pragma: no cover Raises: PipXError: If there was an error running the pipx command """ + cmd = ["/usr/bin/pipx", "--verbose", *args] + logger.info("Running pipx command: %s", cmd) + logger.info( + "Proxy env: HTTP_PROXY=%s, HTTPS_PROXY=%s, NO_PROXY=%s", + os.environ.get("HTTP_PROXY", ""), + os.environ.get("HTTPS_PROXY", ""), + os.environ.get("NO_PROXY", ""), + ) try: subprocess.run( # nosec: B603 - [ - "/usr/bin/pipx", - *args, - ], + cmd, timeout=5 * 60, check=True, user=UBUNTU_USER, + capture_output=True, + text=True, ) except subprocess.CalledProcessError as exc: logger.error( - "Pipx command failed, code: %s, out: %s, err: %s", + "Pipx command failed, code: %s, stdout: %s, stderr: %s", exc.returncode, exc.stdout, exc.stderr, ) raise PipXError from exc except subprocess.SubprocessError as exc: + logger.error("Pipx subprocess error: %s", exc) raise PipXError from exc From 016585aa9ecb316b3916f4abd06326e6d7198766 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sun, 26 Apr 2026 15:39:18 +0800 Subject: [PATCH 27/42] ci: longer tmate --- .github/workflows/integration_test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 31c5c768..5fbee199 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -25,7 +25,7 @@ jobs: pre-run-script: | -c "./tests/integration/aproxy_prerouting_workaround.sh" tmate-debug: true - tmate-timeout: 90 + tmate-timeout: 120 allure-report: if: ${{ !cancelled() && github.event_name == 'schedule' }} needs: From 9911bc2620f3317147ef49b8f3530fdca36ae72d Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sun, 26 Apr 2026 17:18:25 +0800 Subject: [PATCH 28/42] fix: pass os environ to subprocess --- src/pipx.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pipx.py b/src/pipx.py index 7fec05c4..a65694ec 100644 --- a/src/pipx.py +++ b/src/pipx.py @@ -51,7 +51,7 @@ def _pipx_cmd(*args: str) -> None: # pragma: no cover Raises: PipXError: If there was an error running the pipx command """ - cmd = ["/usr/bin/pipx", "--verbose", *args] + cmd = ["/usr/bin/pipx", *args] logger.info("Running pipx command: %s", cmd) logger.info( "Proxy env: HTTP_PROXY=%s, HTTPS_PROXY=%s, NO_PROXY=%s", @@ -67,6 +67,7 @@ def _pipx_cmd(*args: str) -> None: # pragma: no cover user=UBUNTU_USER, capture_output=True, text=True, + env=os.environ, ) except subprocess.CalledProcessError as exc: logger.error( From badcd0292c04eef5b64e9ee2ee9ab3c08af907d9 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sun, 26 Apr 2026 18:52:00 +0800 Subject: [PATCH 29/42] test: use juju 4.0 --- .github/workflows/integration_test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration_test.yaml b/.github/workflows/integration_test.yaml index 5fbee199..41f3d7a5 100644 --- a/.github/workflows/integration_test.yaml +++ b/.github/workflows/integration_test.yaml @@ -15,7 +15,7 @@ jobs: uses: canonical/operator-workflows/.github/workflows/integration_test.yaml@main secrets: inherit with: - juju-channel: 3.6/stable + juju-channel: 4/stable provider: lxd modules: '["test_charm", "test_upgrade"]' self-hosted-runner: true From 5ec9484988f74cab2833dbe8a539cf3af5224fa8 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sun, 26 Apr 2026 18:52:20 +0800 Subject: [PATCH 30/42] chore: remove os environ passing --- src/pipx.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/pipx.py b/src/pipx.py index a65694ec..25b72694 100644 --- a/src/pipx.py +++ b/src/pipx.py @@ -67,7 +67,6 @@ def _pipx_cmd(*args: str) -> None: # pragma: no cover user=UBUNTU_USER, capture_output=True, text=True, - env=os.environ, ) except subprocess.CalledProcessError as exc: logger.error( From a3ebff051d8c34f115b317993f3089a104e83a61 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Sun, 26 Apr 2026 23:31:39 +0800 Subject: [PATCH 31/42] fix: use sudo for juju log read and bash for logrotate stderr capture - test_upgrade.py: add sudo to cat /var/log/juju/unit-*.log since the ubuntu user doesn't have read permission on juju log files; without it juju.ssh() raises CLIError on every poll, propagating immediately out of wait_for() - test_charm.py: wrap logrotate --debug command in sudo bash -c so that the 2>&1 redirect is interpreted by a shell, ensuring stderr (where logrotate outputs debug info) is merged into stdout and captured by juju.ssh() Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- tests/integration/test_charm.py | 2 +- tests/integration/test_upgrade.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index c01cfadc..7106085a 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -191,7 +191,7 @@ def test_log_rotated(juju: jubilant.Juju, app: str): # Test that the configuration is loaded successfully using --debug flag logrotate_debug_output = juju.ssh( - unit_name, "sudo /usr/sbin/logrotate /etc/logrotate.conf --debug 2>&1" + unit_name, "sudo bash -c '/usr/sbin/logrotate /etc/logrotate.conf --debug 2>&1'" ) assert ( "rotating pattern: /var/log/github-runner-image-builder/info.log" in logrotate_debug_output diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index e9545e50..eaa6ae97 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -55,7 +55,7 @@ def is_upgrade_charm_event_emitted() -> bool: """ unit_name_without_slash = unit_name.replace("/", "-") juju_unit_log_file = f"/var/log/juju/unit-{unit_name_without_slash}.log" - stdout = juju.ssh(unit_name, f"cat {juju_unit_log_file}") + stdout = juju.ssh(unit_name, f"sudo cat {juju_unit_log_file}") return "Emitting Juju event upgrade_charm." in stdout wait_for(is_upgrade_charm_event_emitted, timeout=360, check_interval=60) From 2fb6cff5a73c054069e2abedd28bf446992ba31f Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 00:42:11 +0800 Subject: [PATCH 32/42] fix: ssh issues --- tests/integration/conftest.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index fa0b419a..f7aaf39e 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -92,6 +92,11 @@ def juju_fixture( """Juju instance with a temporary model for testing.""" keep_models = bool(request.config.getoption("--keep-models")) with jubilant.temp_model(keep=keep_models) as juju: + # 2026/04/27 - ssh configuration is currently corrupt. Delete it. + Path("~/.ssh/config").unlink(missing_ok=True) + # Add id_rsa.pub to Juju + juju.add_ssh_key(Path("~/.ssh/id_rsa.pub").read_text(encoding="utf-8")) + # add juju add-ssh-key "$(cat ~/.ssh/id_rsa.pub)" if proxy.http: logger.info("Setting model proxy: %s", proxy.http) juju.model_config( From ea579023861750c169a4ff527045f77191da3f3e Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 02:00:57 +0800 Subject: [PATCH 33/42] fix: ssh dir --- tests/integration/conftest.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index f7aaf39e..93985067 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -93,10 +93,10 @@ def juju_fixture( keep_models = bool(request.config.getoption("--keep-models")) with jubilant.temp_model(keep=keep_models) as juju: # 2026/04/27 - ssh configuration is currently corrupt. Delete it. - Path("~/.ssh/config").unlink(missing_ok=True) - # Add id_rsa.pub to Juju - juju.add_ssh_key(Path("~/.ssh/id_rsa.pub").read_text(encoding="utf-8")) - # add juju add-ssh-key "$(cat ~/.ssh/id_rsa.pub)" + ssh_dir = Path.home() / ".ssh" + (ssh_dir / "config").unlink(missing_ok=True) + # Add id_rsa.pub to Juju to allow juju ssh commands to work in tests. + juju.add_ssh_key((ssh_dir / "id_rsa.pub").read_text(encoding="utf-8")) if proxy.http: logger.info("Setting model proxy: %s", proxy.http) juju.model_config( From d3688891aacbdb2359be77ef87db70b53e4a4ef9 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 10:12:34 +0800 Subject: [PATCH 34/42] test: remove juju add key --- tests/integration/conftest.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 93985067..199787e8 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -95,8 +95,6 @@ def juju_fixture( # 2026/04/27 - ssh configuration is currently corrupt. Delete it. ssh_dir = Path.home() / ".ssh" (ssh_dir / "config").unlink(missing_ok=True) - # Add id_rsa.pub to Juju to allow juju ssh commands to work in tests. - juju.add_ssh_key((ssh_dir / "id_rsa.pub").read_text(encoding="utf-8")) if proxy.http: logger.info("Setting model proxy: %s", proxy.http) juju.model_config( From 23680bc2ed03007ae76524bcc3a613e4048f5664 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 10:17:47 +0800 Subject: [PATCH 35/42] test: juju ssh key fixture --- tests/integration/conftest.py | 10 ++++++++++ tests/integration/test_charm.py | 2 ++ tests/integration/test_upgrade.py | 1 + 3 files changed, 13 insertions(+) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 199787e8..54d431c3 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -626,3 +626,13 @@ def bare_image_id_fixture( ) assert image, "Bare image not found" return image.id + + +@pytest.fixture(scope="module", name="juju_ssh_key") +def juju_ssh_key_fixture(juju: jubilant.Juju) -> Generator[None, None, None]: + """Add the default ssh key to juju for use in tests.""" + ssh_dir = Path.home() / ".ssh" + public_key_path = ssh_dir / "id_rsa.pub" + if public_key_path.exists(): + juju.add_ssh_key(public_key_path.read_text(encoding="utf-8")) + yield diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index 7106085a..e962939f 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -120,6 +120,7 @@ def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R0913,R091 @pytest.mark.abort_on_fail +@pytest.mark.usefixtures("juju_ssh_key") def test_periodic_rebuilt( juju: jubilant.Juju, app: str, @@ -174,6 +175,7 @@ def _change_cronjob_to_minutes(juju: jubilant.Juju, unit_name: str, current_hour @pytest.mark.abort_on_fail +@pytest.mark.usefixtures("juju_ssh_key") def test_log_rotated(juju: jubilant.Juju, app: str): """ arrange: A deployed active charm and manually write something to the log file. diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index eaa6ae97..2a418fa0 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -16,6 +16,7 @@ @pytest.fixture(scope="module", name="app") +@pytest.mark.usefixtures("juju_ssh_key") def app_fixture( juju: jubilant.Juju, app_on_charmhub: str, From d09648ecf7803194f0a78906a8ad8976358d0eba Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 10:44:26 +0800 Subject: [PATCH 36/42] test: use juju ssh key fixture --- tests/integration/test_upgrade.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index 2a418fa0..3de9b995 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -16,13 +16,13 @@ @pytest.fixture(scope="module", name="app") -@pytest.mark.usefixtures("juju_ssh_key") def app_fixture( juju: jubilant.Juju, app_on_charmhub: str, test_configs: TestConfigs, openstack_metadata: OpenstackMeta, openstack_password_secret: _Secret, + juju_ssh_key, ) -> str: """Upgrade the charm from the local charm file.""" logging.info("Refreshing the charm from the local charm file.") From a1d3dca978817a462e5b07e093afa96b4d764e64 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 12:31:19 +0800 Subject: [PATCH 37/42] test: verify ssh key added --- tests/integration/conftest.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 54d431c3..1d3b6e2b 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -92,9 +92,6 @@ def juju_fixture( """Juju instance with a temporary model for testing.""" keep_models = bool(request.config.getoption("--keep-models")) with jubilant.temp_model(keep=keep_models) as juju: - # 2026/04/27 - ssh configuration is currently corrupt. Delete it. - ssh_dir = Path.home() / ".ssh" - (ssh_dir / "config").unlink(missing_ok=True) if proxy.http: logger.info("Setting model proxy: %s", proxy.http) juju.model_config( @@ -632,7 +629,26 @@ def bare_image_id_fixture( def juju_ssh_key_fixture(juju: jubilant.Juju) -> Generator[None, None, None]: """Add the default ssh key to juju for use in tests.""" ssh_dir = Path.home() / ".ssh" - public_key_path = ssh_dir / "id_rsa.pub" - if public_key_path.exists(): + # 2026/04/27 - ssh configuration is currently corrupt. Delete it. + logger.info("Cleaning up ssh config.") + (ssh_dir / "config").unlink(missing_ok=True) + + # 2026/04/27 - add ssh key to juju for ssh commands in tests. + if (public_key_path := ssh_dir / "id_rsa.pub").exists(): + import subprocess # nosec + + result = subprocess.run( + ["/usr/bin/ssh-keygen", "-lf", str(public_key_path)], + capture_output=True, + text=True, + check=False, + ) + logger.info("SSH key fingerprint: %s", result.stdout.strip()) + + logger.info("Adding ssh key to juju.") juju.add_ssh_key(public_key_path.read_text(encoding="utf-8")) + + listed_keys = juju.cli("list-ssh-keys") + logger.info("Juju SSH keys after add:\n%s", listed_keys) + yield From cfc82e900a4fba8515091eec1db32aa0dcafb075 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 13:36:23 +0800 Subject: [PATCH 38/42] fix: setup proxy environment on relation joined hook --- src/image.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/image.py b/src/image.py index 14b25861..293688c3 100644 --- a/src/image.py +++ b/src/image.py @@ -63,6 +63,7 @@ def _on_image_relation_joined(self, event: ops.RelationJoinedEvent) -> None: event: The event emitted when a relation is joined. """ build_config = state.BuilderConfig.from_charm(charm=self.charm) + self.charm._setup_proxy_environment(build_config.proxy) proxy = state.ProxyConfig.from_env() if not build_config.cloud_config.upload_cloud_ids: self.model.unit.status = ops.BlockedStatus( From 29060e54cf5b220eaf23fd7906e0ba165c588f80 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 14:52:31 +0800 Subject: [PATCH 39/42] ci: trigger ci From 5422cd8e6bfff89a1dfead7a27fd8be2b2dba523 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 16:41:23 +0800 Subject: [PATCH 40/42] test: eject juju ssh key modification --- tests/integration/conftest.py | 29 ----------------------------- tests/integration/test_charm.py | 2 -- tests/integration/test_upgrade.py | 1 - 3 files changed, 32 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 1d3b6e2b..fa0b419a 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -623,32 +623,3 @@ def bare_image_id_fixture( ) assert image, "Bare image not found" return image.id - - -@pytest.fixture(scope="module", name="juju_ssh_key") -def juju_ssh_key_fixture(juju: jubilant.Juju) -> Generator[None, None, None]: - """Add the default ssh key to juju for use in tests.""" - ssh_dir = Path.home() / ".ssh" - # 2026/04/27 - ssh configuration is currently corrupt. Delete it. - logger.info("Cleaning up ssh config.") - (ssh_dir / "config").unlink(missing_ok=True) - - # 2026/04/27 - add ssh key to juju for ssh commands in tests. - if (public_key_path := ssh_dir / "id_rsa.pub").exists(): - import subprocess # nosec - - result = subprocess.run( - ["/usr/bin/ssh-keygen", "-lf", str(public_key_path)], - capture_output=True, - text=True, - check=False, - ) - logger.info("SSH key fingerprint: %s", result.stdout.strip()) - - logger.info("Adding ssh key to juju.") - juju.add_ssh_key(public_key_path.read_text(encoding="utf-8")) - - listed_keys = juju.cli("list-ssh-keys") - logger.info("Juju SSH keys after add:\n%s", listed_keys) - - yield diff --git a/tests/integration/test_charm.py b/tests/integration/test_charm.py index e962939f..7106085a 100644 --- a/tests/integration/test_charm.py +++ b/tests/integration/test_charm.py @@ -120,7 +120,6 @@ def test_charm_another_app_does_not_rebuild_image( # pylint: disable=R0913,R091 @pytest.mark.abort_on_fail -@pytest.mark.usefixtures("juju_ssh_key") def test_periodic_rebuilt( juju: jubilant.Juju, app: str, @@ -175,7 +174,6 @@ def _change_cronjob_to_minutes(juju: jubilant.Juju, unit_name: str, current_hour @pytest.mark.abort_on_fail -@pytest.mark.usefixtures("juju_ssh_key") def test_log_rotated(juju: jubilant.Juju, app: str): """ arrange: A deployed active charm and manually write something to the log file. diff --git a/tests/integration/test_upgrade.py b/tests/integration/test_upgrade.py index 3de9b995..eaa6ae97 100644 --- a/tests/integration/test_upgrade.py +++ b/tests/integration/test_upgrade.py @@ -22,7 +22,6 @@ def app_fixture( test_configs: TestConfigs, openstack_metadata: OpenstackMeta, openstack_password_secret: _Secret, - juju_ssh_key, ) -> str: """Upgrade the charm from the local charm file.""" logging.info("Refreshing the charm from the local charm file.") From 13cfb7231a756c073ecaf510dce6db1929bf2e30 Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 17:30:22 +0800 Subject: [PATCH 41/42] test: applications keep models --- tests/integration/conftest.py | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index fa0b419a..811c6b5b 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -85,12 +85,17 @@ def proxy_fixture(pytestconfig: pytest.Config) -> ProxyConfig: return ProxyConfig(http=proxy, https=proxy, no_proxy=no_proxy) +@pytest.fixture(scope="module", name="keep_models") +def keep_models_fixture(pytestconfig: pytest.Config) -> bool: + """Whether to keep the testing models after tests complete.""" + return pytestconfig.getoption("--keep-models") + + @pytest.fixture(scope="module", name="juju") def juju_fixture( - proxy: ProxyConfig, request: pytest.FixtureRequest + proxy: ProxyConfig, keep_models: bool, request: pytest.FixtureRequest ) -> Generator[jubilant.Juju, None, None]: """Juju instance with a temporary model for testing.""" - keep_models = bool(request.config.getoption("--keep-models")) with jubilant.temp_model(keep=keep_models) as juju: if proxy.http: logger.info("Setting model proxy: %s", proxy.http) @@ -122,6 +127,7 @@ def test_charm_fixture( test_id: str, private_endpoint_configs: PrivateEndpointConfigs, openstack_password_secret: _Secret, + keep_models: bool, ) -> Generator[str, None, None]: """The test charm that becomes active when valid relation data is given.""" app_name = f"test-{test_id}" @@ -129,8 +135,9 @@ def test_charm_fixture( yield app_name - juju.remove_application(app_name) - logger.info("Test charm application %s removed.", app_name) + if not keep_models: + juju.remove_application(app_name) + logger.info("Test charm application %s removed.", app_name) @pytest.fixture(scope="module", name="test_charm_2") @@ -139,6 +146,7 @@ def test_charm_2_fixture( test_id: str, private_endpoint_configs: PrivateEndpointConfigs, openstack_password_secret: _Secret, + keep_models: bool, ) -> Generator[str, None, None]: """A second test charm that becomes active when valid relation data is given.""" app_name = f"test2-{test_id}" @@ -146,9 +154,10 @@ def test_charm_2_fixture( yield app_name - logger.info("Cleaning up test charm.") - juju.remove_application(app_name) - logger.info("Test charm application %s removed.", app_name) + if not keep_models: + logger.info("Cleaning up test charm.") + juju.remove_application(app_name) + logger.info("Test charm application %s removed.", app_name) def _deploy_test_charm( @@ -396,6 +405,7 @@ def app_fixture( test_configs: TestConfigs, script_secret: _Secret, openstack_password_secret: _Secret, + keep_models: bool, ) -> Generator[str, None, None]: """The deployed application fixture.""" app_name = f"image-builder-operator-{test_configs.test_id}" @@ -425,7 +435,8 @@ def app_fixture( yield app_name - test_configs.juju.remove_application(app_name) + if not keep_models: + test_configs.juju.remove_application(app_name) def _prepare_charmhub_app_config( @@ -465,6 +476,7 @@ def app_on_charmhub_fixture( app_config: dict, base_machine_constraint: dict, openstack_password_secret: _Secret, + keep_models: bool, ) -> Generator[str, None, None]: """Fixture for deploying the charm from charmhub.""" app_name = f"image-builder-charmhub-{test_configs.test_id}" @@ -506,7 +518,8 @@ def app_on_charmhub_fixture( yield app_name - test_configs.juju.remove_application(app_name) + if not keep_models: + test_configs.juju.remove_application(app_name) @pytest.fixture(scope="module", name="ssh_key") From ec50c0ecce16618c8f7676c0b0b48a1645de85af Mon Sep 17 00:00:00 2001 From: charlie4284 Date: Mon, 27 Apr 2026 17:40:09 +0800 Subject: [PATCH 42/42] test: add host ssh key --- tests/integration/conftest.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 811c6b5b..171544a6 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -97,6 +97,10 @@ def juju_fixture( ) -> Generator[jubilant.Juju, None, None]: """Juju instance with a temporary model for testing.""" with jubilant.temp_model(keep=keep_models) as juju: + # 2026/04/27 - Add ssh-key from the host - there's a bug that leads to ssh failure. + ssh_pub_key_path = Path.home() / ".ssh" / "id_rsa.pub" + if ssh_pub_key_path.exists(): + juju.add_ssh_key(ssh_pub_key_path.read_text(encoding="utf-8")) if proxy.http: logger.info("Setting model proxy: %s", proxy.http) juju.model_config(