From 8eb96854f41965ba54a194aa50de603a65310091 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Apr 2026 01:43:23 +0000 Subject: [PATCH] chore: update charm libraries --- .../data_platform_libs/v0/data_interfaces.py | 41 ++- .../grafana_k8s/v0/grafana_dashboard.py | 16 +- .../prometheus_k8s/v0/prometheus_scrape.py | 317 +++++++++++++----- lib/charms/saml_integrator/v0/saml.py | 22 +- lib/charms/smtp_integrator/v0/smtp.py | 151 ++++++++- 5 files changed, 441 insertions(+), 106 deletions(-) diff --git a/lib/charms/data_platform_libs/v0/data_interfaces.py b/lib/charms/data_platform_libs/v0/data_interfaces.py index aa798149..5be1d931 100644 --- a/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -453,7 +453,7 @@ def _on_subject_requested(self, event: SubjectRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 56 +LIBPATCH = 58 PYDEPS = ["ops>=2.0.0"] @@ -842,6 +842,11 @@ def _legacy_compat_find_secret_by_old_label(self) -> None: self._secret_meta = self._model.get_secret(label=label) except SecretNotFoundError: pass + except ModelError as e: + # Permission denied can be raised if the secret exists but is not yet granted to us. + if "permission denied" in str(e): + return + raise else: if label != self.label: self.current_label = label @@ -876,6 +881,8 @@ def _legacy_migration_to_new_label_if_needed(self) -> None: except ModelError as err: if MODEL_ERRORS["not_leader"] not in str(err): raise + if "permission denied" not in str(err): + raise self.current_label = None ########################################################################## @@ -4268,6 +4275,14 @@ def _on_secret_changed_event(self, event: SecretChangedEvent): if relation.app == self.charm.app: logging.info("Secret changed event ignored for Secret Owner") + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + remote_unit = None for unit in relation.units: if unit.app != self.charm.app: @@ -5294,6 +5309,14 @@ def _on_secret_changed_event(self, event: SecretChangedEvent): ) return + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + if relation.app == self.charm.app: logging.info("Secret changed event ignored for Secret Owner") @@ -5556,6 +5579,14 @@ def _on_secret_changed_event(self, event: SecretChangedEvent): ) return + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + if relation.app == self.charm.app: logging.info("Secret changed event ignored for Secret Owner") @@ -5701,6 +5732,14 @@ def _on_secret_changed_event(self, event: SecretChangedEvent): if relation.app == self.charm.app: logging.info("Secret changed event ignored for Secret Owner") + if relation.name != self.relation_data.relation_name: + logger.debug( + "Ignoring secret-changed from endpoint %s (expected %s)", + relation.name, + self.relation_data.relation_name, + ) + return + remote_unit = None for unit in relation.units: if unit.app != self.charm.app: diff --git a/lib/charms/grafana_k8s/v0/grafana_dashboard.py b/lib/charms/grafana_k8s/v0/grafana_dashboard.py index 334ecd0d..9886fc2b 100644 --- a/lib/charms/grafana_k8s/v0/grafana_dashboard.py +++ b/lib/charms/grafana_k8s/v0/grafana_dashboard.py @@ -217,7 +217,7 @@ def __init__(self, *args): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 47 +LIBPATCH = 49 PYDEPS = ["cosl >= 0.0.50"] @@ -585,9 +585,19 @@ def _convert_dashboard_fields(cls, content: str, inject_dropdowns: bool = True) datasources[template_value["name"]] = template_value["query"].lower() # Put our own variables in the template + # We only want to inject our own dropdowns IFF they are NOT + # already in the template coming over relation data. + # We'll store all dropdowns in the template from the provider + # in a set. We'll add our own if they are not in this set. + existing_names = { + item.get("name") + for item in dict_content["templating"]["list"] + } + for d in template_dropdowns: # type: ignore - if d not in dict_content["templating"]["list"]: + if d.get("name") not in existing_names: dict_content["templating"]["list"].insert(0, d) + existing_names.add(d.get("name")) dict_content = cls._replace_template_fields(dict_content, datasources, existing_templates) return json.dumps(dict_content) @@ -1003,7 +1013,7 @@ def _is_dashboard(p: Path) -> bool: dashboard_templates = {} - for path in filter(_is_dashboard, Path(dashboards_path).glob("*")): + for path in filter(_is_dashboard, Path(dashboards_path).glob("**/*")): try: dashboard_dict = json.loads(path.read_bytes()) except json.JSONDecodeError as e: diff --git a/lib/charms/prometheus_k8s/v0/prometheus_scrape.py b/lib/charms/prometheus_k8s/v0/prometheus_scrape.py index 18b3e185..ff52245c 100644 --- a/lib/charms/prometheus_k8s/v0/prometheus_scrape.py +++ b/lib/charms/prometheus_k8s/v0/prometheus_scrape.py @@ -335,7 +335,7 @@ def _on_scrape_targets_changed(self, event): import tempfile from collections import defaultdict from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union from urllib.parse import urlparse import yaml @@ -361,7 +361,7 @@ def _on_scrape_targets_changed(self, event): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 56 +LIBPATCH = 58 # Version 0.0.53 needed for cosl.rules.generic_alert_groups PYDEPS = ["cosl>=0.0.53"] @@ -399,6 +399,14 @@ def _on_scrape_targets_changed(self, event): DEFAULT_ALERT_RULES_RELATIVE_PATH = "./src/prometheus_alert_rules" +FallbackScrapeProtocol = Literal[ + "PrometheusProto", + "OpenMetricsText0.0.1", + "OpenMetricsText1.0.0", + "PrometheusText0.0.4", + "PrometheusText1.0.0", +] + class PrometheusConfig: """A namespace for utility functions for manipulating the prometheus config dict.""" @@ -461,22 +469,156 @@ def prefix_job_names(scrape_configs: List[dict], prefix: str) -> List[dict]: return modified_scrape_configs + @staticmethod + def _build_host_to_unit( + hosts: Dict[str, Tuple[str, str, str]], + topology: Optional[JujuTopology], + ) -> Dict[str, str]: + """Build a reverse lookup dict: {address: unit_name, fqdn: unit_name, ...}. + + Maps each known unit identifier (IP address and/or FQDN) to its unit name, + so that non-wildcard targets can be matched whether specified as IP or FQDN. + + Returns an empty dict when ``topology`` is None, since matching only serves + the purpose of injecting ``juju_unit`` labels. + + The set subtraction ``{addr, fqdn} - {""}`` drops empty strings (absent FQDN, + e.g. when external_url is set) and deduplicates when addr == fqdn (non-IP + bind address). + """ + if not topology: + return {} + return { + identifier: unit_name + for unit_name, (addr, _, fqdn) in hosts.items() + for identifier in {addr, fqdn} - {""} + } + + @staticmethod + def _classify_targets(targets: List[str]) -> Tuple[List[str], List[str]]: + """Split a list of targets into wildcard and non-wildcard targets. + + Returns: + A ``(wildcard_targets, non_wildcard_targets)`` tuple. + """ + wildcard_targets = [] + non_wildcard_targets = [] + wildcard_re = re.compile(r"\*(?:(:\d+))?") + for target in targets: + if wildcard_re.match(target): + wildcard_targets.append(target) + else: + non_wildcard_targets.append(target) + return wildcard_targets, non_wildcard_targets + + @staticmethod + def _match_non_wildcard_targets( + targets: List[str], + host_to_unit: Dict[str, str], + ) -> Tuple[Dict[str, List[str]], List[str]]: + """Match non-wildcard targets against known unit addresses. + + Parses the host portion of each target (handling IPv6 bracket notation) and + looks it up in ``host_to_unit``. + + Returns: + A ``(matched_by_unit, unmatched_targets)`` tuple where ``matched_by_unit`` + maps each matched unit name to the list of targets belonging to it, and + ``unmatched_targets`` contains targets with no unit match. + """ + matched_by_unit: Dict[str, List[str]] = {} + unmatched_targets: List[str] = [] + for target in targets: + # urlparse correctly handles IPv6 (e.g. [::1]:9093), host:port, and + # bare hostnames — unlike a naive split(":")[0]. + parsed = urlparse(f"//{target}") + target_host = parsed.hostname or target.split(":", 1)[0] + matched_unit = host_to_unit.get(target_host) + if matched_unit: + matched_by_unit.setdefault(matched_unit, []).append(target) + else: + unmatched_targets.append(target) + return matched_by_unit, unmatched_targets + + @staticmethod + def _build_per_unit_job( + job: dict, + static_config: dict, + targets: List[str], + unit_name: str, + unit_path: str, + topology: Optional[JujuTopology], + ) -> dict: + """Build a single per-unit scrape job with topology labels and relabeling rules. + + Used for both wildcard and matched non-wildcard targets to avoid duplication. + + Args: + job: the original scrape job dict to base the new job on. + static_config: the original static_config dict to copy labels from. + targets: the resolved target addresses for this unit. + unit_name: the Juju unit name (e.g. "alertmanager/0"). + unit_path: path prefix to prepend to the metrics path (from external URL, may be ""). + topology: optional topology for adding Juju labels. + + Returns: + A new scrape job dict for this unit. + """ + unit_num = unit_name.split("/")[-1] + new_static = static_config.copy() + new_static["targets"] = targets + new_job = job.copy() + new_job["job_name"] = new_job.get("job_name", "unnamed-job") + "-" + unit_num + new_job["metrics_path"] = unit_path + (new_job.get("metrics_path") or "/metrics") + if topology: + new_static["labels"] = { + **topology.label_matcher_dict, + "juju_unit": unit_name, + **new_static.get("labels", {}), + } + # Instance relabeling for topology should be last in order. + new_job["relabel_configs"] = new_job.get("relabel_configs", []) + [ + PrometheusConfig.topology_relabel_config_wildcard + ] + new_job["static_configs"] = [new_static] + return new_job + @staticmethod def expand_wildcard_targets_into_individual_jobs( scrape_jobs: List[dict], - hosts: Dict[str, Tuple[str, str]], + hosts: Dict[str, Tuple[str, str, str]], topology: Optional[JujuTopology] = None, ) -> List[dict]: """Extract wildcard hosts from the given scrape_configs list into separate jobs. + For wildcard targets (e.g. "*:9093"), one job per unit is created. When + ``topology`` is provided, the ``juju_unit`` label is injected into each + per-unit job; without ``topology`` the per-unit jobs are created but no + topology labels are added. + + For non-wildcard targets (fully qualified hostnames/IPs), the host portion of + each target is matched against the known unit addresses in ``hosts``. Targets + whose address matches a known unit are expanded into a per-unit job (with + ``juju_unit`` when ``topology`` is provided), mirroring the wildcard behaviour. + Targets with no match (e.g. external services) are kept in a single job without + ``juju_unit``, preserving the previous behaviour. + Args: scrape_jobs: list of scrape jobs. - hosts: a dictionary mapping host names to host address for - all units of the relation for which this job configuration - must be constructed. + hosts: a dictionary mapping unit names to ``(address, path, fqdn)`` tuples for + all units of the relation for which this job configuration must be + constructed. topology: optional arg for adding topology labels to scrape targets. + When ``None``, wildcard targets are still expanded into per-unit jobs but + no ``juju_unit`` or topology labels are added. Non-wildcard target matching + is skipped entirely (all non-wildcard targets are kept in a single job), + since matching only serves the purpose of injecting ``juju_unit`` labels. """ - # hosts = self._relation_hosts(relation) + # Build a reverse lookup: {address: unit_name, fqdn: unit_name, ...} + # so that non-wildcard targets can be matched whether specified as IP or FQDN. + # The set subtraction {addr, fqdn} - {""} drops empty strings (absent FQDN) + # and deduplicates when addr == fqdn (non-IP bind address). + host_to_unit = PrometheusConfig._build_host_to_unit(hosts, topology) modified_scrape_jobs = [] for job in scrape_jobs: @@ -484,84 +626,66 @@ def expand_wildcard_targets_into_individual_jobs( if not static_configs: continue - # When a single unit specified more than one wildcard target, then they are expanded - # into a static_config per target - non_wildcard_static_configs = [] + # Accumulates non-wildcard targets that could not be matched to any known unit. + # These are kept in a single job with topology-only labels (no juju_unit): + # fully-qualified targets that predate this feature are unaffected. + unmatched_static_configs = [] for static_config in static_configs: targets = static_config.get("targets") if not targets: continue - # All non-wildcard targets remain in the same static_config - non_wildcard_targets = [] - - # All wildcard targets are extracted to a job per unit. If multiple wildcard - # targets are specified, they remain in the same static_config (per unit). - wildcard_targets = [] - - for target in targets: - match = re.compile(r"\*(?:(:\d+))?").match(target) - if match: - # This is a wildcard target. - # Need to expand into separate jobs and remove it from this job here - wildcard_targets.append(target) - else: - # This is not a wildcard target. Copy it over into its own static_config. - non_wildcard_targets.append(target) + wildcard_targets, non_wildcard_targets = PrometheusConfig._classify_targets( + targets + ) - # All non-wildcard targets remain in the same static_config + # Non-wildcard targets: try to match each target's host against known unit + # addresses. Matched targets get a per-unit job with juju_unit; unmatched + # targets get topology-only labels with no per-unit expansion. if non_wildcard_targets: - non_wildcard_static_config = static_config.copy() - non_wildcard_static_config["targets"] = non_wildcard_targets - - if topology: - # When non-wildcard targets (aka fully qualified hostnames) are specified, - # there is no reliable way to determine the name (Juju topology unit name) - # for such a target. Therefore labeling with Juju topology, excluding the - # unit name. - non_wildcard_static_config["labels"] = { - **topology.label_matcher_dict, - **non_wildcard_static_config.get("labels", {}), - } - - non_wildcard_static_configs.append(non_wildcard_static_config) - - # Extract wildcard targets into individual jobs - if wildcard_targets: - for unit_name, (unit_hostname, unit_path) in hosts.items(): - modified_job = job.copy() - modified_job["static_configs"] = [static_config.copy()] - modified_static_config = modified_job["static_configs"][0] - modified_static_config["targets"] = [ - target.replace("*", unit_hostname) for target in wildcard_targets - ] - - unit_num = unit_name.split("/")[-1] - job_name = modified_job.get("job_name", "unnamed-job") + "-" + unit_num - modified_job["job_name"] = job_name - modified_job["metrics_path"] = unit_path + ( - job.get("metrics_path") or "/metrics" + matched_by_unit, unmatched_targets = ( + PrometheusConfig._match_non_wildcard_targets( + non_wildcard_targets, host_to_unit ) + ) + # Unmatched targets: no unit mapping found — kept with topology-only + # labels and no per-unit expansion (juju_unit is not added). + if unmatched_targets: + unmatched_static_config = static_config.copy() + unmatched_static_config["targets"] = unmatched_targets if topology: - # Add topology labels - modified_static_config["labels"] = { + unmatched_static_config["labels"] = { **topology.label_matcher_dict, - **{"juju_unit": unit_name}, - **modified_static_config.get("labels", {}), + **unmatched_static_config.get("labels", {}), } + unmatched_static_configs.append(unmatched_static_config) + + # Matched targets: one per-unit job with juju_unit label. + for unit_name, unit_targets_list in matched_by_unit.items(): + _, unit_path, _ = hosts.get(unit_name, ("", "", "")) + modified_scrape_jobs.append( + PrometheusConfig._build_per_unit_job( + job, static_config, unit_targets_list, unit_name, unit_path, topology + ) + ) - # Instance relabeling for topology should be last in order. - modified_job["relabel_configs"] = modified_job.get( - "relabel_configs", [] - ) + [PrometheusConfig.topology_relabel_config_wildcard] - - modified_scrape_jobs.append(modified_job) + # Wildcard targets: one per-unit job per host, replacing "*" with the unit address. + if wildcard_targets: + for unit_name, (unit_hostname, unit_path, _unit_fqdn) in hosts.items(): + resolved_targets = [ + target.replace("*", unit_hostname) for target in wildcard_targets + ] + modified_scrape_jobs.append( + PrometheusConfig._build_per_unit_job( + job, static_config, resolved_targets, unit_name, unit_path, topology + ) + ) - if non_wildcard_static_configs: + if unmatched_static_configs: modified_job = job.copy() - modified_job["static_configs"] = non_wildcard_static_configs + modified_job["static_configs"] = unmatched_static_configs modified_job["metrics_path"] = modified_job.get("metrics_path") or "/metrics" if topology: @@ -823,7 +947,12 @@ class MetricsEndpointConsumer(Object): on = MonitoringEvents() # pyright: ignore - def __init__(self, charm: CharmBase, relation_name: str = DEFAULT_RELATION_NAME): + def __init__( + self, + charm: CharmBase, + relation_name: str = DEFAULT_RELATION_NAME, + fallback_scrape_protocol: Optional[FallbackScrapeProtocol] = None, + ): """A Prometheus based Monitoring service. Args: @@ -834,6 +963,17 @@ def __init__(self, charm: CharmBase, relation_name: str = DEFAULT_RELATION_NAME) It is strongly advised not to change the default, so that people deploying your charm will have a consistent experience with all other charms that consume metrics endpoints. + fallback_scrape_protocol: an optional fallback protocol to use when the + Content-Type header of a scrape response is missing or invalid. Supported + values: "PrometheusProto", "OpenMetricsText0.0.1", "OpenMetricsText1.0.0", + "PrometheusText0.0.4", "PrometheusText1.0.0". Ref: + https://prometheus.io/docs/prometheus/latest/configuration/configuration/#scrape_config. + This had to be added after we bumped to Prometheus workload major version 3. Starting in major 3, + Prometheus no longer defaults to the Prometheus text format (PrometheusText0.0.4) + when the Content-Type header is missing or invalid, and instead fails the scrape with an error. + This parameter should only be used by MetricsEndpointConsumers that use Prometheus 3 and above, as setting + this key in the scrape configs of Prometheus 2 will result in the error: + "field fallback_scrape_protocol not found in type config.ScrapeConfig". Raises: RelationNotFoundError: If there is no relation in the charm's metadata.yaml @@ -852,6 +992,7 @@ def __init__(self, charm: CharmBase, relation_name: str = DEFAULT_RELATION_NAME) super().__init__(charm, relation_name) self._charm = charm self._relation_name = relation_name + self._fallback_scrape_protocol = fallback_scrape_protocol self._tool = CosTool(self._charm) events = self._charm.on[relation_name] self.framework.observe(events.relation_changed, self._on_metrics_provider_relation_changed) @@ -1145,10 +1286,25 @@ def _static_scrape_config(self, relation) -> list: # For https scrape targets we still do not render a `tls_config` section because certs # are expected to be made available by the charm via the `update-ca-certificates` mechanism. + + if self._fallback_scrape_protocol: + for job in scrape_configs: + job["fallback_scrape_protocol"] = self._fallback_scrape_protocol + return scrape_configs - def _relation_hosts(self, relation: Relation) -> Dict[str, Tuple[str, str]]: - """Returns a mapping from unit names to (address, path) tuples, for the given relation.""" + def _relation_hosts(self, relation: Relation) -> Dict[str, Tuple[str, str, str]]: + """Returns a mapping from unit names to (address, path, fqdn) tuples. + + Args: + relation: the relation to read unit data from. + + Returns: + A dict mapping each unit name to a ``(address, path, fqdn)`` tuple. The + ``fqdn`` element may be an empty string when the FQDN is not known. When + present, it may either be distinct from, or equal to ``address``. For + example, when the unit address itself is already a hostname. + """ hosts = {} for unit in relation.units: if not (unit_databag := relation.data.get(unit)): @@ -1161,11 +1317,12 @@ def _relation_hosts(self, relation: Relation) -> Dict[str, Tuple[str, str]]: unit_address = unit_databag.get("prometheus_scrape_unit_address") or unit_databag.get( "prometheus_scrape_host" ) + unit_fqdn = unit_databag.get("prometheus_scrape_unit_fqdn", "") if not (unit_name and unit_address): continue - hosts.update({unit_name: (unit_address, unit_path)}) + hosts.update({unit_name: (unit_address, unit_path, unit_fqdn)}) return hosts @@ -1540,18 +1697,22 @@ def _set_unit_ip(self, _=None): parsed = urlparse(self.external_url) unit_address = parsed.hostname path = parsed.path + unit_fqdn = "" elif self._is_valid_unit_address(unit_ip): unit_address = unit_ip + unit_fqdn = socket.getfqdn() path = "" else: unit_address = socket.getfqdn() + unit_fqdn = unit_address path = "" - relation.data[self._charm.unit]["prometheus_scrape_unit_address"] = unit_address - relation.data[self._charm.unit]["prometheus_scrape_unit_path"] = path - relation.data[self._charm.unit]["prometheus_scrape_unit_name"] = str( - self._charm.model.unit.name - ) + relation.data[self._charm.unit].update({ + "prometheus_scrape_unit_address": unit_address, + "prometheus_scrape_unit_path": path, + "prometheus_scrape_unit_name": str(self._charm.model.unit.name), + "prometheus_scrape_unit_fqdn": unit_fqdn, + }) def _is_valid_unit_address(self, address: str) -> bool: """Validate a unit address. diff --git a/lib/charms/saml_integrator/v0/saml.py b/lib/charms/saml_integrator/v0/saml.py index 8fd1610e..28b808c8 100644 --- a/lib/charms/saml_integrator/v0/saml.py +++ b/lib/charms/saml_integrator/v0/saml.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2024 Canonical Ltd. +# Copyright 2025 Canonical Ltd. # Licensed under the Apache2.0. See LICENSE file in charm source for details. """Library to manage the relation data for the SAML Integrator charm. @@ -68,15 +68,15 @@ class method `from_relation_data`. # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 10 +LIBPATCH = 11 # pylint: disable=wrong-import-position +# ruff: noqa: E402 import re import typing import ops -from pydantic import AnyHttpUrl, BaseModel, Field -from pydantic.tools import parse_obj_as +from pydantic import AnyHttpUrl, BaseModel, Field, TypeAdapter DEFAULT_RELATION_NAME = "saml" @@ -92,9 +92,9 @@ class SamlEndpoint(BaseModel): """ name: str = Field(..., min_length=1) - url: typing.Optional[AnyHttpUrl] + url: typing.Optional[AnyHttpUrl] = None binding: str = Field(..., min_length=1) - response_url: typing.Optional[AnyHttpUrl] + response_url: typing.Optional[AnyHttpUrl] = None def to_relation_data(self) -> typing.Dict[str, str]: """Convert an instance of SamlEndpoint to the relation representation. @@ -139,13 +139,13 @@ def from_relation_data(cls, relation_data: typing.Dict[str, str]) -> "SamlEndpoi return cls( name=name, url=( - parse_obj_as(AnyHttpUrl, relation_data[f"{prefix}url"]) + TypeAdapter(AnyHttpUrl).validate_python(relation_data[f"{prefix}url"]) if relation_data[f"{prefix}url"] else None ), binding=relation_data[f"{prefix}binding"], response_url=( - parse_obj_as(AnyHttpUrl, relation_data[f"{prefix}response_url"]) + TypeAdapter(AnyHttpUrl).validate_python(relation_data[f"{prefix}response_url"]) if f"{prefix}response_url" in relation_data else None ), @@ -208,7 +208,7 @@ def from_relation_data(cls, relation_data: ops.RelationDataContent) -> "SamlRela return cls( entity_id=relation_data.get("entity_id"), # type: ignore metadata_url=( - parse_obj_as(AnyHttpUrl, relation_data.get("metadata_url")) + TypeAdapter(AnyHttpUrl).validate_python(relation_data.get("metadata_url")) if relation_data.get("metadata_url") else None ), # type: ignore @@ -231,7 +231,7 @@ class SamlDataAvailableEvent(ops.RelationEvent): @property def saml_relation_data(self) -> SamlRelationData: """Get a SamlRelationData for the relation data.""" - assert self.relation.app + assert self.relation.app # noqa: S101 return SamlRelationData.from_relation_data(self.relation.data[self.relation.app]) @property @@ -294,7 +294,7 @@ def _on_relation_changed(self, event: ops.RelationChangedEvent) -> None: Args: event: event triggering this handler. """ - assert event.relation.app + assert event.relation.app # noqa: S101 if event.relation.data[event.relation.app]: self.on.saml_data_available.emit(event.relation, app=event.app, unit=event.unit) diff --git a/lib/charms/smtp_integrator/v0/smtp.py b/lib/charms/smtp_integrator/v0/smtp.py index 9641b070..8ab7f285 100644 --- a/lib/charms/smtp_integrator/v0/smtp.py +++ b/lib/charms/smtp_integrator/v0/smtp.py @@ -68,27 +68,53 @@ def _on_config_changed(self, _) -> None: # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 19 +LIBPATCH = 21 -PYDEPS = ["pydantic>=2"] +PYDEPS = ["pydantic>=1.10,<3", "email-validator>=2"] # pylint: disable=wrong-import-position import itertools +import json import logging import typing from ast import literal_eval from enum import Enum -from typing import Dict, Optional +from typing import Any, Callable, Dict, List, Optional, TypeVar, cast import ops -from pydantic import BaseModel, Field, ValidationError +from pydantic import BaseModel, EmailStr, Field, ValidationError logger = logging.getLogger(__name__) +_F = TypeVar("_F", bound=Callable[..., Any]) + +try: + # Pydantic v2 + from pydantic import field_validator as _pyd_field_validator + + _PYDANTIC_V2 = True +except ImportError: + _pyd_field_validator = None # type: ignore[assignment] + _PYDANTIC_V2 = False + +# Pydantic v1 field validation decorator (v2 uses field_validator) +from pydantic import validator as _pyd_validator # type: ignore[attr-defined] + DEFAULT_RELATION_NAME = "smtp" LEGACY_RELATION_NAME = "smtp-legacy" +def recipients_validator() -> Callable[[_F], _F]: + """Return the correct recipients validator decorator for pydantic v1/v2. + + Returns: + A decorator to validate/normalize the recipients field before EmailStr validation. + """ + if _PYDANTIC_V2: + return cast(Any, _pyd_field_validator)("recipients", mode="before") + return cast(Any, _pyd_validator)("recipients", pre=True) + + class SmtpError(Exception): """Common ancestor for Smtp related exceptions.""" @@ -138,6 +164,8 @@ class SmtpRelationData(BaseModel): transport_security: The security protocol to use for the outgoing SMTP relay. domain: The domain used by the emails sent from SMTP relay. skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay. + smtp_sender: Optional sender email address for outgoing notifications. + recipients: List of recipient email addresses for notifications. """ host: str = Field(..., min_length=1) @@ -149,6 +177,14 @@ class SmtpRelationData(BaseModel): transport_security: TransportSecurity domain: Optional[str] = None skip_ssl_verify: Optional[bool] = False + smtp_sender: Optional[EmailStr] = None + recipients: List[EmailStr] = Field(default_factory=list) + + @recipients_validator() + @classmethod + def _recipients_str_to_list(cls, value: Any) -> Any: + """Convert recipients input to list[str] before EmailStr validation.""" + return parse_recipients(value) def to_relation_data(self) -> Dict[str, str]: """Convert an instance of SmtpRelationData to the relation representation. @@ -174,6 +210,14 @@ def to_relation_data(self) -> Dict[str, str]: logger.warning("password field exists along with password_id field, removing.") del result["password"] result["password_id"] = self.password_id + + if self.smtp_sender: + result["smtp_sender"] = str(self.smtp_sender) + + if self.recipients: + recipients = list(self.recipients) + result["recipients"] = json.dumps([str(r) for r in recipients]) + return result @@ -190,6 +234,8 @@ class SmtpDataAvailableEvent(ops.RelationEvent): transport_security: The security protocol to use for the outgoing SMTP relay. domain: The domain used by the emails sent from SMTP relay. skip_ssl_verify: Specifies if certificate trust verification is skipped in the SMTP relay. + smtp_sender: Optional sender email address for outgoing notifications. + recipients: List of recipient email addresses for notifications. """ @property @@ -248,6 +294,27 @@ def skip_ssl_verify(self) -> bool: typing.cast(str, self.relation.data[self.relation.app].get("skip_ssl_verify")) ) + @property + def smtp_sender(self) -> Optional[str]: + """Fetch the SMTP sender from the relation. + + Returns: + smtp_sender: Optional sender email address for outgoing notifications. + """ + assert self.relation.app + return self.relation.data[self.relation.app].get("smtp_sender") + + @property + def recipients(self) -> List[str]: + """Fetch the SMTP recipients from the relation. + + Returns: + recipients: list of recipient email addresses for notifications. + """ + assert self.relation.app + raw = self.relation.data[self.relation.app].get("recipients") + return parse_recipients(raw) + class SmtpRequiresEvents(ops.CharmEvents): """SMTP events. @@ -307,24 +374,27 @@ def get_relation_data_from_relation( SecretError: if the secret can't be read. """ assert relation.app - relation_data = relation.data[relation.app] - if not relation_data: + raw_relation_data = relation.data[relation.app] + if not raw_relation_data: return None - password = relation_data.get("password") - if password is None and relation_data.get("password_id"): + data: Dict[str, Any] = dict(raw_relation_data) + + password = data.get("password") + if password is None and data.get("password_id"): try: password = ( - self.model.get_secret(id=relation_data.get("password_id")) + self.model.get_secret(id=data["password_id"]) .get_content(refresh=True) .get("password") ) except ops.model.ModelError as exc: - raise SecretError( - f"Could not consume secret {relation_data.get('password_id')}" - ) from exc + raise SecretError(f"Could not consume secret {data.get('password_id')}") from exc + + # normalize recipients + data["recipients"] = parse_recipients(data.get("recipients")) - return SmtpRelationData(**{**relation_data, "password": password}) # type: ignore + return SmtpRelationData(**{**data, "password": password}) def _is_relation_data_valid(self, relation: ops.Relation) -> bool: """Validate the relation data. @@ -431,3 +501,58 @@ def update_relation_data(self, relation: ops.Relation, smtp_data: SmtpRelationDa logger.info("update data in relation id:%s", relation.id) relation_data.clear() relation_data.update(new_data) + + +def parse_recipients(raw: Any) -> list[str]: + """Normalize SMTP recipient input into a list of email strings. + + The function produces a normalized list[str] so that downstream validation (EmailStr) + can be applied consistently. + + Args: + raw: Recipient input as received from relation data, charm config, + May be None, str, or list. + + Accepted input forms: + - None or empty string + - list of stripped string values + - JSON list string + - Comma-separated string + - Single address string + + Returns: + A list of recipient strings. The email correctness is validated later by EmailStr. + + Raises: + TypeError: If raw is not None, str or list. + ValueError: If a JSON-encoded value does not decode to a list. + """ + if raw is None: + return [] + + if isinstance(raw, list): + return [str(x).strip() for x in raw if str(x).strip()] + + if not isinstance(raw, str): + raise TypeError("recipients must be a string, list, or None") + + s = raw.strip() + if not s: + return [] + + # JSON list string + if s.startswith("["): + loaded = json.loads(s) + if not isinstance(loaded, list): + raise ValueError("recipients JSON must decode to a list") + return [str(x).strip() for x in loaded if str(x).strip()] + + # JSON without a bracelet: '"a@x.com", "b@y.com"' + if '"' in s and "," in s: + loaded = json.loads(f"[{s}]") + if not isinstance(loaded, list): + raise ValueError("recipients must decode to a list") + return [str(x).strip() for x in loaded if str(x).strip()] + + # comma-separated or single + return [p.strip() for p in s.split(",") if p.strip()]