From 2a27f385df71078ae0899866c52050c48bdae8e3 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 19 Aug 2025 12:44:17 -0400 Subject: [PATCH 001/131] [minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher. --- plugins/module_utils/constants.py | 14 + plugins/module_utils/nd.py | 79 ++--- plugins/module_utils/nd_config_collection.py | 295 ++++++++++++++++++ plugins/module_utils/nd_network_resources.py | 202 ++++++++++++ plugins/module_utils/utils.py | 32 ++ plugins/modules/nd_local_user.py | 269 ++++++++++++++++ .../targets/nd_local_user/tasks/main.yml | 134 ++++++++ 7 files changed, 974 insertions(+), 51 deletions(-) create mode 100644 plugins/module_utils/nd_config_collection.py create mode 100644 plugins/module_utils/nd_network_resources.py create mode 100644 plugins/module_utils/utils.py create mode 100644 plugins/modules/nd_local_user.py create mode 100644 tests/integration/targets/nd_local_user/tasks/main.yml diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 10de9edf..cbba61b3 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -157,6 +157,11 @@ "restart", "delete", "update", + "merged", + "replaced", + "overridden", + "deleted", + "gathered", ) INTERFACE_FLOW_RULES_TYPES_MAPPING = {"port_channel": "PORTCHANNEL", "physical": "PHYSICAL", "l3out_sub_interface": "L3_SUBIF", "l3out_svi": "SVI"} @@ -170,3 +175,12 @@ ND_SETUP_NODE_DEPLOYMENT_TYPE = {"physical": "cimc", "virtual": "vnode"} BACKUP_TYPE = {"config_only": "config-only", None: "config-only", "": "config-only", "full": "full"} + +USER_ROLES_MAPPING = { + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", +} diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 03ffc85f..5f528bb8 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -18,7 +18,6 @@ from ansible.module_utils.basic import json from ansible.module_utils.basic import env_fallback from ansible.module_utils.six import PY3 -from ansible.module_utils.six.moves import filterfalse from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils._text import to_native, to_text from ansible.module_utils.connection import Connection @@ -73,53 +72,27 @@ def cmp(a, b): def issubset(subset, superset): - """Recurse through nested dictionary and compare entries""" + """Recurse through a nested dictionary and check if it is a subset of another.""" - # Both objects are the same object - if subset is superset: - return True - - # Both objects are identical - if subset == superset: - return True - - # Both objects have a different type - if isinstance(subset) is not isinstance(superset): + if type(subset) is not type(superset): return False + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + for key, value in subset.items(): - # Ignore empty values if value is None: - return True + continue - # Item from subset is missing from superset if key not in superset: return False - # Item has different types in subset and superset - if isinstance(superset.get(key)) is not isinstance(value): - return False + superset_value = superset.get(key) - # Compare if item values are subset - if isinstance(value, dict): - if not issubset(superset.get(key), value): - return False - elif isinstance(value, list): - try: - # NOTE: Fails for lists of dicts - if not set(value) <= set(superset.get(key)): - return False - except TypeError: - # Fall back to exact comparison for lists of dicts - diff = list(filterfalse(lambda i: i in value, superset.get(key))) + list(filterfalse(lambda j: j in superset.get(key), value)) - if diff: - return False - elif isinstance(value, set): - if not value <= superset.get(key): - return False - else: - if not value == superset.get(key): - return False + if not issubset(value, superset_value): + return False return True @@ -212,7 +185,7 @@ def __init__(self, module): self.previous = dict() self.proposed = dict() self.sent = dict() - self.stdout = None + self.stdout = "" # debug output self.has_modified = False @@ -266,8 +239,13 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: +<<<<<<< HEAD if data: info = self.connection.send_request(method, uri, json.dumps(data)) +======= + if data is not None: + info = conn.send_request(method, uri, json.dumps(data)) +>>>>>>> 7c967c3 ([minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher.) else: info = self.connection.send_request(method, uri) self.result["data"] = data @@ -324,6 +302,8 @@ def request( self.fail_json(msg="ND Error: {0}".format(self.error.get("message")), data=data, info=info) self.error = payload if "code" in payload: + if self.status == 404 and ignore_not_found_error: + return {} self.fail_json(msg="ND Error {code}: {message}".format(**payload), data=data, info=info, payload=payload) elif "messages" in payload and len(payload.get("messages")) > 0: self.fail_json(msg="ND Error {code} ({severity}): {message}".format(**payload["messages"][0]), data=data, info=info, payload=payload) @@ -520,30 +500,27 @@ def get_diff(self, unwanted=None): if not self.existing and self.sent: return True - existing = self.existing - sent = self.sent + existing = deepcopy(self.existing) + sent = deepcopy(self.sent) for key in unwanted: if isinstance(key, str): if key in existing: - try: - del existing[key] - except KeyError: - pass - try: - del sent[key] - except KeyError: - pass + del existing[key] + if key in sent: + del sent[key] elif isinstance(key, list): key_path, last = key[:-1], key[-1] try: existing_parent = reduce(dict.get, key_path, existing) - del existing_parent[last] + if existing_parent is not None: + del existing_parent[last] except KeyError: pass try: sent_parent = reduce(dict.get, key_path, sent) - del sent_parent[last] + if sent_parent is not None: + del sent_parent[last] except KeyError: pass return not issubset(sent, existing) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py new file mode 100644 index 00000000..1cf86756 --- /dev/null +++ b/plugins/module_utils/nd_config_collection.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import sys +from copy import deepcopy +from functools import reduce + +# Python 2 and 3 compatibility (To be removed in the future) +if sys.version_info[0] >= 3: + from collections.abc import MutableMapping + iteritems = lambda d: d.items() +else: + from collections import MutableMapping + iteritems = lambda d: d.iteritems() + +# NOTE: Single-Index Hybrid Collection for ND Network Resource Module +class NDConfigCollection(MutableMapping): + + def __init__(self, identifier_keys, data=None, use_composite_keys=False): + self.identifier_keys = identifier_keys + self.use_composite_keys = use_composite_keys + + # Dual Storage + self._list = [] + self._map = {} + + if data: + for item in data: + self.add(item) + + # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") + def _get_identifier_value(self, config): + """Generates the internal map key based on the selected mode.""" + if self.use_composite_keys: + # Mode: Composite (Tuple of ALL keys) + values = [] + for key in self.identifier_keys: + val = config.get(key) + if val is None: + return None # Missing a required part + values.append(val) + return tuple(values) + else: + # Mode: Priority (First available key) + for key in self.identifier_keys: + if key in config: + return config[key] + return None + + # Magic Methods + def __getitem__(self, key): + return self._map[key] + + def __setitem__(self, key, value): + if key in self._map: + old_ref = self._map[key] + try: + idx = self._list.index(old_ref) + self._list[idx] = value + self._map[key] = value + except ValueError: + pass + else: + # Add new + self._list.append(value) + self._map[key] = value + + def __delitem__(self, key): + if key in self._map: + obj_ref = self._map[key] + del self._map[key] + self._list.remove(obj_ref) + else: + raise KeyError(key) + + def __iter__(self): + return iter(self._map) + + def __len__(self): + return len(self._list) + + def __eq__(self, other): + if isinstance(other, NDConfigCollection): + return self._list == other._list + elif isinstance(other, list): + return self._list == other + elif isinstance(other, dict): + return self._map == other + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return str(self._list) + + # Helper Methods + def _filter_dict(self, data, ignore_keys): + return {k: v for k, v in iteritems(data) if k not in ignore_keys} + + def _issubset(self, subset, superset): + if type(subset) is not type(superset): + return False + + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + + for key, value in iteritems(subset): + if value is None: + continue + + if key not in superset: + return False + + superset_value = superset.get(key) + + if not self._issubset(value, superset_value): + return False + return True + + def _remove_unwanted_keys(self, data, unwanted_keys): + for key in unwanted_keys: + if isinstance(key, str): + if key in data: + del data[key] + elif isinstance(key, list) and len(key) > 0: + key_path, last = key[:-1], key[-1] + try: + parent = reduce(lambda d, k: d.get(k) if isinstance(d, dict) else None, key_path, data) + if isinstance(parent, dict) and last in parent: + del parent[last] + except (KeyError, TypeError): + pass + return data + + # Core Operations + def to_list(self): + return self._list + + def to_dict(self): + return self._map + + def copy(self): + return NDConfigCollection(self.identifier_keys, deepcopy(self._list), self.use_composite_keys) + + def add(self, config): + ident = self._get_identifier_value(config) + if ident is None: + mode = "Composite" if self.use_composite_keys else "Priority" + raise ValueError("[{0} Mode] Config missing required keys: {1}".format(mode, self.identifier_keys)) + + if ident in self._map: + self.__setitem__(ident, config) + else: + self._list.append(config) + self._map[ident] = config + + def merge(self, new_config): + ident = self._get_identifier_value(new_config) + if ident and ident in self._map: + self._map[ident].update(new_config) + else: + self.add(new_config) + + def replace(self, new_config): + ident = self._get_identifier_value(new_config) + if ident: + self[ident] = new_config + else: + self.add(new_config) + + def remove(self, identifiers): + # Try Map Removal + try: + target_key = self._get_identifier_value(identifiers) + if target_key and target_key in self._map: + self.__delitem__(target_key) + return + except Exception: + pass + + # Fallback: Linear Removal + to_remove = [] + for config in self._list: + match = True + for k, v in iteritems(identifiers): + if config.get(k) != v: + match = False + break + if match: + to_remove.append(self._get_identifier_value(config)) + + for ident in to_remove: + if ident in self._map: + self.__delitem__(ident) + + def get_by_key(self, key, default=None): + return self._map.get(key, default) + + def get_by_idenfiers(self, identifiers, default=None): + # Try Map Lookup + target_key = self._get_identifier_value(identifiers) + if target_key and target_key in self._map: + return self._map[target_key] + + # Fallback: Linear Lookup + valid_search_keys = [k for k in identifiers if k in self.identifier_keys] + if not valid_search_keys: + return default + + for config in self._list: + match = True + for k in valid_search_keys: + if config.get(k) != identifiers[k]: + match = False + break + if match: + return config + return default + + # Diff logic + def get_diff_config(self, new_config, unwanted_keys=None): + unwanted_keys = unwanted_keys or [] + + ident = self._get_identifier_value(new_config) + + if not ident or ident not in self._map: + return "new" + + existing = deepcopy(self._map[ident]) + sent = deepcopy(new_config) + + self._remove_unwanted_keys(existing, unwanted_keys) + self._remove_unwanted_keys(sent, unwanted_keys) + + is_subset = self._issubset(sent, existing) + + if is_subset: + return "no_diff" + else: + return "changed" + + def get_diff_collection(self, new_collection, unwanted_keys=None): + if not isinstance(new_collection, NDConfigCollection): + raise TypeError("Argument must be an NDConfigCollection") + + if len(self) != len(new_collection): + return True + + for item in new_collection.to_list(): + if self.get_diff_config(item, unwanted_keys) != "no_diff": + return True + + for ident in self._map: + if ident not in new_collection._map: + return True + + return False + + def get_diff_identifiers(self, new_collection): + current_identifiers = set(self.config_collection.keys()) + other_identifiers = set(new_collection.config_collection.keys()) + + return list(current_identifiers - other_identifiers) + + # Sanitize Operations + def sanitize(self, keys_to_remove=None, values_to_remove=None, remove_none_values=False): + keys_to_remove = keys_to_remove or [] + values_to_remove = values_to_remove or [] + + def recursive_clean(obj): + if isinstance(obj, dict): + keys = list(obj.keys()) + for k in keys: + v = obj[k] + if k in keys_to_remove or v in values_to_remove or (remove_none_values and v is None): + del obj[k] + continue + if isinstance(v, (dict, list)): + recursive_clean(v) + elif isinstance(obj, list): + for item in obj: + recursive_clean(item) + + for item in self._list: + recursive_clean(item) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py new file mode 100644 index 00000000..b73b24e7 --- /dev/null +++ b/plugins/module_utils/nd_network_resources.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from copy import deepcopy +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED + +# TODO: Make further enhancement to logs and outputs +# NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later +class NDNetworkResourceModule(NDModule): + + def __init__(self, module, path, identifier_keys, use_composite_keys=False, actions_overwrite_map=None): + super().__init__(module) + + # Initial variables + self.path = path + self.actions_overwrite_map = actions_overwrite_map or {} + self.identifier_keys = identifier_keys + self.use_composite_keys = use_composite_keys + + # Initial data + self.init_all_data = self._query_all() + + # Info ouput + self.existing = NDConfigCollection(identifier_keys, data=self.init_all_data) + self.previous = NDConfigCollection(identifier_keys) + self.proposed = NDConfigCollection(identifier_keys) + self.sent = NDConfigCollection(identifier_keys) + + # Debug output + self.nd_logs = [] + + # Helper variables + self.current_identifier = "" + self.existing_config = {} + self.proposed_config = {} + + # Actions Operations + def actions_overwrite(action): + def decorator(func): + def wrapper(self, *args, **kwargs): + overwrite_action = self.actions_overwrite_map.get(action) + if callable(overwrite_action): + return overwrite_action(self) + else: + return func(self, *args, **kwargs) + return wrapper + return decorator + + @actions_overwrite("create") + def _create(self): + if not self.module.check_mode: + return self.request(path=self.path, method="POST", data=self.proposed_config) + + @actions_overwrite("update") + def _update(self): + if not self.module.check_mode: + object_path = "{0}/{1}".format(self.path, self.current_identifier) + return self.request(path=object_path, method="PUT", data=self.proposed_config) + + @actions_overwrite("delete") + def _delete(self): + if not self.module.check_mode: + object_path = "{0}/{1}".format(self.path, self.current_identifier) + self.request(path=object_path, method="DELETE") + + @actions_overwrite("query_all") + def _query_all(self): + return self.query_obj(self.path) + + def format_log(self, identifier, status, after_data, sent_payload_data=None): + item_result = { + "identifier": identifier, + "status": status, + "before": self.existing_config, + "after": deepcopy(after_data) if after_data is not None else self.existing_config, + "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {}, + } + + if not self.module.check_mode and self.url is not None: + item_result.update( + { + "method": self.method, + "response": self.response, + "status": self.status, + "url": self.url, + } + ) + + self.nd_logs.append(item_result) + + # Logs and Outputs formating Operations + def add_logs_and_ouputs(self): + if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + if self.params.get("output_level") in ("debug", "info"): + self.result["previous"] = self.previous.to_list() + if not self.has_modified and self.previous.get_diff_collection(self.existing): + self.result["changed"] = True + if self.stdout: + self.result["stdout"] = self.stdout + + if self.params.get("output_level") == "debug": + self.result["nd_logs"] = self.nd_logs + if self.url is not None: + self.result["httpapi_logs"] = self.httpapi_logs + + if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + self.result["sent"] = self.sent.to_list() + self.result["proposed"] = self.proposed.to_list() + + self.result["current"] = self.existing.to_list() + + # Manage State Operations + def manage_state(self, state, new_configs, unwanted_keys=None, override_exceptions=None): + unwanted_keys = unwanted_keys or [] + override_exceptions = override_exceptions or [] + + self.proposed = NDConfigCollection(self.identifier_keys, data=new_configs) + self.proposed.sanitize() + self.previous = self.existing.copy() + + if state in ["merged", "replaced", "overidden"]: + for identifier, config in self.proposed.items(): + + diff_config_info = self.existing.get_diff_config(config, unwanted_keys) + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self.proposed_config = config + request_response = None + sent_payload = None + status = "no_change" + + if diff_config_info != "no_diff": + if state == "merged": + self.existing.merge(config) + self.proposed_config = self.existing[identifier] + else: + self.existing.replace(config) + + if diff_config_info == "changed": + request_response = self._update() + status = "updated" + else: + request_response = self._create() + status= "created" + + if not self.module.check_mode: + self.sent.add(self.proposed_config) + sent_payload = self.proposed_config + else: + request_response = self.proposed_config + + self.format_log(identifier, status, request_response, sent_payload) + + + if state == "overidden": + diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + for identifier in diff_identifiers: + if identifier not in override_exceptions: + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self._delete() + del self.existing[identifier] + self.format_log(identifier, "deleted", after_data={}) + + + elif state == "deleted": + for identifier, config in self.proposed.items(): + if identifier in self.existing.keys(): + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self.proposed_config = config + self._delete() + del self.existing[identifier] + self.format_log(identifier, "deleted", after_data={}) + + # Outputs Operations + def fail_json(self, msg, **kwargs): + self.add_logs_and_ouputs() + + self.result.update(**kwargs) + self.module.fail_json(msg=msg, **self.result) + + def exit_json(self, **kwargs): + self.add_logs_and_ouputs() + + if self.module._diff and self.result.get("changed") is True: + self.result["diff"] = dict( + before=self.previous.to_list(), + after=self.existing.to_list(), + ) + + self.result.update(**kwargs) + self.module.exit_json(**self.result) diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py new file mode 100644 index 00000000..5bf0a0f0 --- /dev/null +++ b/plugins/module_utils/utils.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from copy import deepcopy + + +def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): + if keys is None: + keys = [] + if values is None: + values = [] + + result = deepcopy(dict_to_sanitize) + for k, v in dict_to_sanitize.items(): + if k in keys: + del result[k] + elif v in values or (v is None and remove_none_values): + del result[k] + elif isinstance(v, dict) and recursive: + result[k] = sanitize_dict(v, keys, values) + elif isinstance(v, list) and recursive: + for index, item in enumerate(v): + if isinstance(item, dict): + result[k][index] = sanitize_dict(item, keys, values) + return result \ No newline at end of file diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py new file mode 100644 index 00000000..552df3b7 --- /dev/null +++ b/plugins/modules/nd_local_user.py @@ -0,0 +1,269 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"} + +DOCUMENTATION = r""" +--- +module: nd_local_user +version_added: "1.4.0" +short_description: Manage local users on Cisco Nexus Dashboard +description: +- Manage local users on Cisco Nexus Dashboard (ND). +- It supports creating, updating, querying, and deleting local users. +author: +- Gaspard Micol (@gmicol) +options: + config: + description: + - The list of the local users to configure. + type: list + elements: dict + suboptions: + email: + description: + - The email address of the local user. + type: str + login_id: + description: + - The login ID of the local user. + - The O(config.login_id) must be defined when creating, updating or deleting a local user. + type: str + required: true + first_name: + description: + - The first name of the local user. + type: str + last_name: + description: + - The last name of the local user. + type: str + user_password: + description: + - The password of the local user. + - Password must have a minimum of 8 characters to a maximum of 64 characters. + - Password must have three of the following; one number, one lower case character, one upper case character, one special character. + - The O(config.user_password) must be defined when creating a new local_user. + type: str + reuse_limitation: + description: + - The number of different passwords a user must use before they can reuse a previous one. + - It defaults to C(0) when unset during creation. + type: int + time_interval_limitation: + description: + - The minimum time period that must pass before a previous password can be reused. + - It defaults to C(0) when unset during creation. + type: int + security_domains: + description: + - The list of Security Domains and Roles for the local user. + - At least, one Security Domain must be defined when creating a new local user. + type: list + elements: dict + suboptions: + name: + description: + - The name of the Security Domain to which the local user is given access. + type: str + required: true + aliases: [ security_domain_name, domain_name ] + roles: + description: + - The Permission Roles of the local user within the Security Domain. + type: list + elements: str + choices: [ fabric_admin, observer, super_admin, support_engineer, approver, designer ] + aliases: [ domains ] + remote_id_claim: + description: + - The remote ID claim of the local user. + type: str + remote_user_authorization: + description: + - To enable/disable the Remote User Authorization of the local user. + - Remote User Authorization is used for signing into Nexus Dashboard when using identity providers that cannot provide authorization claims. + Once this attribute is enabled, the local user ID cannot be used to directly login to Nexus Dashboard. + - It defaults to C(false) when unset during creation. + type: bool + state: + description: + - The desired state of the network resources on the Cisco Nexus Dashboard. + - Use O(state=merged) to create new resources and updates existing ones as defined in your configuration. + Resources on ND that are not specified in the configuration will be left unchanged. + - Use O(state=replaced) to replace the resources specified in the configuration. + - Use O(state=overridden) to enforce the configuration as the single source of truth. + The resources on ND will be modified to exactly match the configuration. + Any resource existing on ND but not present in the configuration will be deleted. Use with extra caution. + - Use O(state=deleted) to remove the resources specified in the configuration from the Cisco Nexus Dashboard. + type: str + default: merged + choices: [ merged, replaced, overridden, deleted ] +extends_documentation_fragment: +- cisco.nd.modules +- cisco.nd.check_mode +notes: +- This module is only supported on Nexus Dashboard having version 4.1.0 or higher. +- This module is not idempotent when creating or updating a local user object when O(config.user_password) is used. +""" + +EXAMPLES = r""" +- name: Create a new local user + cisco.nd.nd_local_user: + config: + - email: user@example.com + login_id: local_user + first_name: User first name + last_name: User last name + user_password: localUserPassword1% + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + name: all + roles: + - observer + - support_engineer + remote_id_claim: remote_user + remote_user_authorization: true + state: merged + register: result + +- name: Create local user with minimal configuration + cisco.nd.nd_local_user: + config: + - login_id: local_user_min + user_password: localUserMinuser_password + security_domain: all + state: merged + +- name: Update local user + cisco.nd.nd_local_user: + config: + - email: udpateduser@example.com + login_id: local_user + first_name: Updated user first name + last_name: Updated user last name + user_password: updatedLocalUserPassword1% + reuse_limitation: 25 + time_interval_limitation: 15 + security_domains: + - name: all + roles: super_admin + - name: ansible_domain + roles: observer + roles: super_admin + remote_id_claim: "" + remote_user_authorization: false + state: replaced + +- name: Delete a local user + cisco.nd.nd_local_user: + config: + - login_id: local_user + state: deleted +""" + +RETURN = r""" +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec, NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resources import NDNetworkResourceModule +from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING + + +# Actions overwrite functions +def quey_all_local_users(nd): + return nd.query_obj(nd.path).get("localusers") + + +def main(): + argument_spec = nd_argument_spec() + argument_spec.update( + config=dict( + type="list", + elements="dict", + options=dict( + email=dict(type="str"), + login_id=dict(type="str", required=True), + first_name=dict(type="str"), + last_name=dict(type="str"), + user_password=dict(type="str", no_log=True), + reuse_limitation=dict(type="int"), + time_interval_limitation=dict(type="int"), + security_domains=dict( + type="list", + elements="dict", + options=dict( + name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), + roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + ), + aliases=["domains"], + ), + remote_id_claim=dict(type="str"), + remote_user_authorization=dict(type="bool"), + ), + ), + override_exceptions=dict(type="list", elements="str"), + state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), + ) + + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + ) + + path = "/api/v1/infra/aaa/localUsers" + identifier_keys = ["loginID"] + actions_overwrite_map = {"query_all": quey_all_local_users} + + nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) + + state = nd.params.get("state") + config = nd.params.get("config") + override_exceptions = nd.params.get("override_exceptions") + new_config = [] + for object in config: + payload = { + "email": object.get("email"), + "firstName": object.get("first_name"), + "lastName": object.get("last_name"), + "loginID": object.get("login_id"), + "password": object.get("user_password"), + "remoteIDClaim": object.get("remote_id_claim"), + "xLaunch": object.get("remote_user_authorization"), + } + + if object.get("security_domains"): + payload["rbac"] = { + "domains": { + security_domain.get("name"): { + "roles": ( + [USER_ROLES_MAPPING.get(role) for role in security_domain["roles"]] if isinstance(security_domain.get("roles"), list) else [] + ) + } + for security_domain in object["security_domains"] + }, + } + if object.get("reuse_limitation") or object.get("time_interval_limitation"): + payload["passwordPolicy"] = { + "reuseLimitation": object.get("reuse_limitation"), + "timeIntervalLimitation": object.get("time_interval_limitation"), + } + new_config.append(payload) + + nd.manage_state(state=state, new_configs=new_config, unwanted_keys=[["passwordPolicy", "passwordChangeTime"], ["userID"]], override_exceptions=override_exceptions) + + nd.exit_json() + + +if __name__ == "__main__": + main() diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml new file mode 100644 index 00000000..77e55cd1 --- /dev/null +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -0,0 +1,134 @@ +# Test code for the ND modules +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +- name: Test that we have a Nexus Dashboard host, username and password + ansible.builtin.fail: + msg: 'Please define the following variables: ansible_host, ansible_user and ansible_password.' + when: ansible_host is not defined or ansible_user is not defined or ansible_password is not defined + +- name: Set vars + ansible.builtin.set_fact: + nd_info: &nd_info + output_level: '{{ api_key_output_level | default("debug") }}' + +- name: Ensure local users do not exist before test starts + cisco.nd.nd_local_user: + <<: *nd_info + config: + - login_id: ansible_local_user + - login_id: ansible_local_user_2 + state: deleted + +# CREATE +- name: Create local users with full and minimum configuration (check mode) + cisco.nd.nd_local_user: &create_local_user + <<: *nd_info + config: + - email: ansibleuser@example.com + login_id: ansible_local_user + first_name: Ansible first name + last_name: Ansible last name + user_password: ansibleLocalUserPassword1%Test + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + - name: all + roles: + - observer + - support_engineer + remote_id_claim: ansible_remote_user + remote_user_authorization: true + - login_id: ansible_local_user_2 + user_password: ansibleLocalUser2Password1%Test + security_domains: + - name: all + state: merged + check_mode: true + register: cm_create_local_user + +- name: Create local users with full and minimum configuration (normal mode) + cisco.nd.nd_local_user: + <<: *create_local_user + register: nm_create_local_user + +# UPDATE +- name: Update all ansible_local_user's attributes (check mode) + cisco.nd.nd_local_user: &update_first_local_user + <<: *nd_info + config: + - email: updatedansibleuser@example.com + login_id: ansible_local_user + first_name: Updated Ansible first name + last_name: Updated Ansible last name + user_password: updatedAnsibleLocalUserPassword1% + reuse_limitation: 25 + time_interval_limitation: 15 + security_domains: + - name: all + roles: super_admin + remote_id_claim: "" + remote_user_authorization: false + state: replaced + check_mode: true + register: cm_update_local_user + +- name: Update local user (normal mode) + cisco.nd.nd_local_user: + <<: *update_first_local_user + register: nm_update_local_user + +- name: Update all ansible_local_user_2's attributes except password + cisco.nd.nd_local_user: &update_second_local_user + <<: *nd_info + config: + - email: secondansibleuser@example.com + login_id: ansible_local_user_2 + first_name: Second Ansible first name + last_name: Second Ansible last name + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + - name: all + roles: fabric_admin + remote_id_claim: ansible_remote_user_2 + remote_user_authorization: true + state: merged + register: nm_update_local_user_2 + +- name: Update all ansible_local_user_2's attributes except password again (idempotency) + cisco.nd.nd_local_user: + <<: *update_second_local_user + register: nm_update_local_user_2_again + + +# DELETE +- name: Delete local user by name (check mode) + cisco.nd.nd_local_user: &delete_local_user + <<: *nd_info + config: + - login_id: ansible_local_user + state: deleted + check_mode: true + register: cm_delete_local_user + +- name: Delete local user by name (normal mode) + cisco.nd.nd_local_user: + <<: *delete_local_user + register: nm_delete_local_user + +- name: Delete local user again (idempotency test) + cisco.nd.nd_local_user: + <<: *delete_local_user + register: nm_delete_local_user_again + + +# CLEAN UP +- name: Ensure local users do not exist + cisco.nd.nd_local_user: + <<: *nd_info + config: + - login_id: ansible_local_user + - login_id: ansible_local_user_2 + state: deleted From a04235c090afab857a9045eb83c035945c106d23 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 15 Jan 2026 11:47:32 -0500 Subject: [PATCH 002/131] [ignore] First Pydantic implementation: Add Pydantic Models for nd_local_user. --- .../module_utils/models/local_user_model.py | 142 ++++++++++++++++++ plugins/module_utils/nd_config_collection.py | 1 + plugins/module_utils/nd_network_resources.py | 2 + plugins/modules/nd_local_user.py | 5 +- 4 files changed, 148 insertions(+), 2 deletions(-) create mode 100644 plugins/module_utils/models/local_user_model.py diff --git a/plugins/module_utils/models/local_user_model.py b/plugins/module_utils/models/local_user_model.py new file mode 100644 index 00000000..f8de1f46 --- /dev/null +++ b/plugins/module_utils/models/local_user_model.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json +from typing import List, Dict, Any, Optional +from pydantic import BaseModel, ConfigDict, Field, field_validator + +# TODO: Add Field validation methods +# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel +# TODO: Maybe define our own baseModel +# TODO: Look at ansible aliases +from pydantic import BaseModel, Field, ConfigDict +from typing import List, Dict, Any, Optional + +class SecurityDomainModel(BaseModel): + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + name: str = Field(alias="name") + roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") + + +class LocalUserModel(BaseModel): + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + email: str = Field(default="", alias="email") + login_id: str = Field(alias="loginID") + first_name: str = Field(default="", alias="firstName") + last_name: str = Field(default="", alias="lastName") + user_password: str = Field(alias="password") + reuse_limitation: int = Field(default=0, alias="reuseLimitation") + time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") + security_domains: List[SecurityDomainModel] = Field(default_factory=list, alias="domains") + remote_id_claim: str = Field(default="", alias="remoteIDClaim") + remote_user_authorization: bool = Field(default=False, alias="xLaunch") + + def to_api_payload(self, user_roles_mapping: Dict[str, str] = None) -> Dict[str, Any]: + """Convert the model to the specific API payload format required.""" + if user_roles_mapping is None: + user_roles_mapping = {} + + base_data = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + + payload = { + "email": base_data.get("email"), + "firstName": base_data.get("firstName"), + "lastName": base_data.get("lastName"), + "loginID": base_data.get("loginID"), + "password": base_data.get("password"), + "remoteIDClaim": base_data.get("remoteIDClaim"), + "xLaunch": base_data.get("xLaunch"), + } + + if self.security_domains: + payload["rbac"] = { + "domains": { + domain.name: { + "roles": [ + user_roles_mapping.get(role, role) for role in domain.roles + ] + } + for domain in self.security_domains + } + } + + if self.reuse_limitation or self.time_interval_limitation: + payload["passwordPolicy"] = { + "reuseLimitation": self.reuse_limitation, + "timeIntervalLimitation": self.time_interval_limitation, + } + + return payload + + @classmethod + def from_api_payload( + cls, + payload: Dict[str, Any], + reverse_user_roles_mapping: Optional[Dict[str, str]] = None + ) -> 'LocalUserModel': + + if reverse_user_roles_mapping is None: + reverse_user_roles_mapping = {} + + user_data = { + "email": payload.get("email", ""), + "loginID": payload.get("loginID", ""), + "firstName": payload.get("firstName", ""), + "lastName": payload.get("lastName", ""), + "password": payload.get("password", ""), + "remoteIDClaim": payload.get("remoteIDClaim", ""), + "xLaunch": payload.get("xLaunch", False), + } + + password_policy = payload.get("passwordPolicy", {}) + user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) + user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) + + domains_data = [] + rbac = payload.get("rbac", {}) + if rbac and "domains" in rbac: + for domain_name, domain_config in rbac["domains"].items(): + # Map API roles back to internal roles + api_roles = domain_config.get("roles", []) + internal_roles = [ + reverse_user_roles_mapping.get(role, role) for role in api_roles + ] + + domain_data = { + "name": domain_name, + "roles": internal_roles + } + domains_data.append(domain_data) + + user_data["domains"] = domains_data + + return cls(**user_data) + + # @classmethod + # def from_api_payload_json( + # cls, + # json_payload: str, + # reverse_user_roles_mapping: Optional[Dict[str, str]] = None + # ) -> 'LocalUserModel': + + # payload = json.loads(json_payload) + # return cls.from_api_payload(payload, reverse_user_roles_mapping) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 1cf86756..8f0058bb 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -20,6 +20,7 @@ from collections import MutableMapping iteritems = lambda d: d.iteritems() +# TODO: Adapt to Pydantic Models # NOTE: Single-Index Hybrid Collection for ND Network Resource Module class NDConfigCollection(MutableMapping): diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index b73b24e7..3b549da1 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -14,6 +14,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED # TODO: Make further enhancement to logs and outputs +# TODO: Adapt to Pydantic Models # NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later class NDNetworkResourceModule(NDModule): @@ -98,6 +99,7 @@ def format_log(self, identifier, status, after_data, sent_payload_data=None): self.nd_logs.append(item_result) # Logs and Outputs formating Operations + # TODO: Move it to different file def add_logs_and_ouputs(self): if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if self.params.get("output_level") in ("debug", "info"): diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 552df3b7..4a5f1ad2 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -181,10 +181,11 @@ # Actions overwrite functions -def quey_all_local_users(nd): +def query_all_local_users(nd): return nd.query_obj(nd.path).get("localusers") +# TODO: Adapt to Pydantic Model def main(): argument_spec = nd_argument_spec() argument_spec.update( @@ -223,7 +224,7 @@ def main(): path = "/api/v1/infra/aaa/localUsers" identifier_keys = ["loginID"] - actions_overwrite_map = {"query_all": quey_all_local_users} + actions_overwrite_map = {"query_all": query_all_local_users} nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) From 7089148fbd458f5a9eb81189627bf9cb4562e560 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 20 Jan 2026 13:17:35 -0500 Subject: [PATCH 003/131] [ignore] Second Pydantic Implementation: Create a NDBaseModel to be inherited from future class models. Modify class models for local_user. --- plugins/module_utils/models/base.py | 57 +++++++ plugins/module_utils/models/local_user.py | 116 ++++++++++++++ .../module_utils/models/local_user_model.py | 142 ------------------ 3 files changed, 173 insertions(+), 142 deletions(-) create mode 100644 plugins/module_utils/models/base.py create mode 100644 plugins/module_utils/models/local_user.py delete mode 100644 plugins/module_utils/models/local_user_model.py diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py new file mode 100644 index 00000000..e7301d14 --- /dev/null +++ b/plugins/module_utils/models/base.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from abc import ABC, abstractmethod +from pydantic import BaseModel, ConfigDict +from typing import List, Dict, Any, Optional, ClassVar + + +class NDBaseModel(BaseModel, ABC): + + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + # TODO: find ways to redifine these var in every + identifiers: ClassVar[List[str]] = [] + use_composite_identifiers: ClassVar[bool] = False + + @abstractmethod + def to_payload(self) -> Dict[str, Any]: + pass + + @classmethod + @abstractmethod + def from_response(cls, response: Dict[str, Any]) -> 'NDBaseModel': + pass + + # TODO: Modify to make it more generic and Pydantic + # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") + def get_identifier_value(self) -> Any: + """Generates the internal map key based on the selected mode.""" + # if self.use_composite_keys: + # # Mode: Composite (Tuple of ALL keys) + # values = [] + # for key in self.identifier_keys: + # val = config.get(key) + # if val is None: + # return None # Missing a required part + # values.append(val) + # return tuple(values) + # else: + # # Mode: Priority (First available key) + # for key in self.identifier_keys: + # if key in config: + # return config[key] + # return None + pass diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py new file mode 100644 index 00000000..7877a5a5 --- /dev/null +++ b/plugins/module_utils/models/local_user.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from pydantic import Field, field_validator +from types import MappingProxyType +from typing import List, Dict, Any, Optional, ClassVar + +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel + +# TODO: Add Field validation methods +# TODO: define our own Field class for string versioning, ansible aliases +# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel +# TODO: Surclass BaseModel -> Priority +# TODO: Look at ansible aliases + +# TODO: use constants.py file in the future +user_roles_mapping = MappingProxyType({}) + + +class LocalUserSecurityDomainModel(NDBaseModel): + + name: str = Field(alias="name") + roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") + + def to_payload(self) -> Dict[str, Any]: + return { + self.name: { + "roles": [ + user_roles_mapping.get(role, role) for role in self.roles + ] + } + } + + @classmethod + def from_response(cls, name: str, domain_config: List[str]) -> 'NDBaseModel': + internal_roles = [user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + + domain_data = { + "name": name, + "roles": internal_roles + } + + return cls(**domain_data) + + +class LocalUserModel(NDBaseModel): + + # TODO: Define a way to generate it (look at NDBaseModel comments) + identifiers: ClassVar[List[str]] = ["login_id"] + + # TODO: Use Optinal to remove default values (get them from API response instead) + email: str = Field(default="", alias="email") + login_id: str = Field(alias="loginID") + first_name: str = Field(default="", alias="firstName") + last_name: str = Field(default="", alias="lastName") + user_password: str = Field(alias="password") + reuse_limitation: int = Field(default=0, alias="reuseLimitation") + time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") + security_domains: List[LocalUserSecurityDomainModel] = Field(default_factory=list, alias="domains") + remote_id_claim: str = Field(default="", alias="remoteIDClaim") + remote_user_authorization: bool = Field(default=False, alias="xLaunch") + + def to_payload(self) -> Dict[str, Any]: + """Convert the model to the specific API payload format required.""" + + payload = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + + if self.security_domains: + payload["rbac"] = {"domains": {}} + for domain in self.security_domains: + payload["rbac"]["domains"].update(domain.to_api_payload()) + + if self.reuse_limitation or self.time_interval_limitation: + payload["passwordPolicy"] = { + "reuseLimitation": self.reuse_limitation, + "timeIntervalLimitation": self.time_interval_limitation, + } + + return payload + + @classmethod + def from_response(cls, payload: Dict[str, Any]) -> 'LocalUserModel': + + if reverse_user_roles_mapping is None: + reverse_user_roles_mapping = {} + + user_data = { + "email": payload.get("email"), + "loginID": payload.get("loginID"), + "firstName": payload.get("firstName"), + "lastName": payload.get("lastName"), + "password": payload.get("password"), + "remoteIDClaim": payload.get("remoteIDClaim"), + "xLaunch": payload.get("xLaunch"), + } + + password_policy = payload.get("passwordPolicy", {}) + user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) + user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) + + domains_data = [] + rbac = payload.get("rbac", {}) + if rbac and "domains" in rbac: + for domain_name, domain_config in rbac["domains"].items(): + domains_data.append(LocalUserSecurityDomainModel.from_api_response(domain_name, domain_config)) + + user_data["domains"] = domains_data + + return cls(**user_data) diff --git a/plugins/module_utils/models/local_user_model.py b/plugins/module_utils/models/local_user_model.py deleted file mode 100644 index f8de1f46..00000000 --- a/plugins/module_utils/models/local_user_model.py +++ /dev/null @@ -1,142 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2025, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -import json -from typing import List, Dict, Any, Optional -from pydantic import BaseModel, ConfigDict, Field, field_validator - -# TODO: Add Field validation methods -# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel -# TODO: Maybe define our own baseModel -# TODO: Look at ansible aliases -from pydantic import BaseModel, Field, ConfigDict -from typing import List, Dict, Any, Optional - -class SecurityDomainModel(BaseModel): - model_config = ConfigDict( - str_strip_whitespace=True, - use_enum_values=True, - validate_assignment=True, - populate_by_name=True, - ) - - name: str = Field(alias="name") - roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") - - -class LocalUserModel(BaseModel): - model_config = ConfigDict( - str_strip_whitespace=True, - use_enum_values=True, - validate_assignment=True, - populate_by_name=True, - ) - - email: str = Field(default="", alias="email") - login_id: str = Field(alias="loginID") - first_name: str = Field(default="", alias="firstName") - last_name: str = Field(default="", alias="lastName") - user_password: str = Field(alias="password") - reuse_limitation: int = Field(default=0, alias="reuseLimitation") - time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") - security_domains: List[SecurityDomainModel] = Field(default_factory=list, alias="domains") - remote_id_claim: str = Field(default="", alias="remoteIDClaim") - remote_user_authorization: bool = Field(default=False, alias="xLaunch") - - def to_api_payload(self, user_roles_mapping: Dict[str, str] = None) -> Dict[str, Any]: - """Convert the model to the specific API payload format required.""" - if user_roles_mapping is None: - user_roles_mapping = {} - - base_data = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) - - payload = { - "email": base_data.get("email"), - "firstName": base_data.get("firstName"), - "lastName": base_data.get("lastName"), - "loginID": base_data.get("loginID"), - "password": base_data.get("password"), - "remoteIDClaim": base_data.get("remoteIDClaim"), - "xLaunch": base_data.get("xLaunch"), - } - - if self.security_domains: - payload["rbac"] = { - "domains": { - domain.name: { - "roles": [ - user_roles_mapping.get(role, role) for role in domain.roles - ] - } - for domain in self.security_domains - } - } - - if self.reuse_limitation or self.time_interval_limitation: - payload["passwordPolicy"] = { - "reuseLimitation": self.reuse_limitation, - "timeIntervalLimitation": self.time_interval_limitation, - } - - return payload - - @classmethod - def from_api_payload( - cls, - payload: Dict[str, Any], - reverse_user_roles_mapping: Optional[Dict[str, str]] = None - ) -> 'LocalUserModel': - - if reverse_user_roles_mapping is None: - reverse_user_roles_mapping = {} - - user_data = { - "email": payload.get("email", ""), - "loginID": payload.get("loginID", ""), - "firstName": payload.get("firstName", ""), - "lastName": payload.get("lastName", ""), - "password": payload.get("password", ""), - "remoteIDClaim": payload.get("remoteIDClaim", ""), - "xLaunch": payload.get("xLaunch", False), - } - - password_policy = payload.get("passwordPolicy", {}) - user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) - user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) - - domains_data = [] - rbac = payload.get("rbac", {}) - if rbac and "domains" in rbac: - for domain_name, domain_config in rbac["domains"].items(): - # Map API roles back to internal roles - api_roles = domain_config.get("roles", []) - internal_roles = [ - reverse_user_roles_mapping.get(role, role) for role in api_roles - ] - - domain_data = { - "name": domain_name, - "roles": internal_roles - } - domains_data.append(domain_data) - - user_data["domains"] = domains_data - - return cls(**user_data) - - # @classmethod - # def from_api_payload_json( - # cls, - # json_payload: str, - # reverse_user_roles_mapping: Optional[Dict[str, str]] = None - # ) -> 'LocalUserModel': - - # payload = json.loads(json_payload) - # return cls.from_api_payload(payload, reverse_user_roles_mapping) From fe944d043b386413bdef482eae3520c8a5667f9b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 22 Jan 2026 01:04:05 -0500 Subject: [PATCH 004/131] [ignore] Pydantic Models: Modify and Clean both local_user.py and base.py based on comments. Add a get method and get_identifier_value function to NDBaseModel. --- plugins/module_utils/models/base.py | 43 ++++++------ plugins/module_utils/models/local_user.py | 82 ++++++++++------------- 2 files changed, 57 insertions(+), 68 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index e7301d14..bdd1b9c2 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -11,6 +11,7 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import List, Dict, Any, Optional, ClassVar +from typing_extensions import Self class NDBaseModel(BaseModel, ABC): @@ -22,7 +23,7 @@ class NDBaseModel(BaseModel, ABC): populate_by_name=True, ) - # TODO: find ways to redifine these var in every + # TODO: find ways to redifine these var in every future NDBaseModels identifiers: ClassVar[List[str]] = [] use_composite_identifiers: ClassVar[bool] = False @@ -32,26 +33,28 @@ def to_payload(self) -> Dict[str, Any]: @classmethod @abstractmethod - def from_response(cls, response: Dict[str, Any]) -> 'NDBaseModel': + def from_response(cls, response: Dict[str, Any]) -> Self: pass - # TODO: Modify to make it more generic and Pydantic + def get(self, field: str, default: Any = None) -> Any: + """Custom get method to mimic dictionary behavior.""" + return getattr(self, field, default) + + # TODO: Modify to make it more generic and Pydantic | might change and be moved in different Generic Class/Model # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") def get_identifier_value(self) -> Any: - """Generates the internal map key based on the selected mode.""" - # if self.use_composite_keys: - # # Mode: Composite (Tuple of ALL keys) - # values = [] - # for key in self.identifier_keys: - # val = config.get(key) - # if val is None: - # return None # Missing a required part - # values.append(val) - # return tuple(values) - # else: - # # Mode: Priority (First available key) - # for key in self.identifier_keys: - # if key in config: - # return config[key] - # return None - pass + """Generates the internal map key based on the selected mode.""" + if self.use_composite_identifiers: + # Mode: Composite (Tuple of ALL keys) + values = [] + for identifier in self.identifiers: + value = self.get(identifier) + if value is None: + return None # Missing a required part | Add Error Handling method here + values.append(value) + return tuple(values) + else: + # Mode: Priority (First available key) + for identifier in self.identifiers: + return self.get(identifier) + return None diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 7877a5a5..28cea27c 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,9 +8,10 @@ __metaclass__ = type -from pydantic import Field, field_validator +from pydantic import Field, field_validator, SecretStr from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar +from typing_extensions import Self from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel @@ -20,7 +21,7 @@ # TODO: Surclass BaseModel -> Priority # TODO: Look at ansible aliases -# TODO: use constants.py file in the future +# TODO: To be moved in constants.py file user_roles_mapping = MappingProxyType({}) @@ -39,15 +40,11 @@ def to_payload(self) -> Dict[str, Any]: } @classmethod - def from_response(cls, name: str, domain_config: List[str]) -> 'NDBaseModel': - internal_roles = [user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] - - domain_data = { - "name": name, - "roles": internal_roles - } - - return cls(**domain_data) + def from_response(cls, name: str, domain_config: List[str]) -> Self: + return cls( + name=name, + roles=[user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + ) class LocalUserModel(NDBaseModel): @@ -55,17 +52,17 @@ class LocalUserModel(NDBaseModel): # TODO: Define a way to generate it (look at NDBaseModel comments) identifiers: ClassVar[List[str]] = ["login_id"] - # TODO: Use Optinal to remove default values (get them from API response instead) - email: str = Field(default="", alias="email") + email: Optional[str] = Field(alias="email") login_id: str = Field(alias="loginID") - first_name: str = Field(default="", alias="firstName") - last_name: str = Field(default="", alias="lastName") - user_password: str = Field(alias="password") - reuse_limitation: int = Field(default=0, alias="reuseLimitation") - time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") - security_domains: List[LocalUserSecurityDomainModel] = Field(default_factory=list, alias="domains") - remote_id_claim: str = Field(default="", alias="remoteIDClaim") - remote_user_authorization: bool = Field(default=False, alias="xLaunch") + first_name: Optional[str] = Field(default="", alias="firstName") + last_name: Optional[str] = Field(default="", alias="lastName") + # TODO: Check secrets manipulation when tracking changes while maintaining security + user_password: Optional[SecretStr] = Field(alias="password") + reuse_limitation: Optional[int] = Field(default=0, alias="reuseLimitation") + time_interval_limitation: Optional[int] = Field(default=0, alias="timeIntervalLimitation") + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(alias="domains") + remote_id_claim: Optional[str] = Field(default="", alias="remoteIDClaim") + remote_user_authorization: Optional[bool] = Field(default=False, alias="xLaunch") def to_payload(self) -> Dict[str, Any]: """Convert the model to the specific API payload format required.""" @@ -86,31 +83,20 @@ def to_payload(self) -> Dict[str, Any]: return payload @classmethod - def from_response(cls, payload: Dict[str, Any]) -> 'LocalUserModel': + def from_response(cls, response: Dict[str, Any]) -> Self: - if reverse_user_roles_mapping is None: - reverse_user_roles_mapping = {} - - user_data = { - "email": payload.get("email"), - "loginID": payload.get("loginID"), - "firstName": payload.get("firstName"), - "lastName": payload.get("lastName"), - "password": payload.get("password"), - "remoteIDClaim": payload.get("remoteIDClaim"), - "xLaunch": payload.get("xLaunch"), - } - - password_policy = payload.get("passwordPolicy", {}) - user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) - user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) - - domains_data = [] - rbac = payload.get("rbac", {}) - if rbac and "domains" in rbac: - for domain_name, domain_config in rbac["domains"].items(): - domains_data.append(LocalUserSecurityDomainModel.from_api_response(domain_name, domain_config)) - - user_data["domains"] = domains_data - - return cls(**user_data) + return cls( + email=response.get("email"), + login_id=response.get("loginID"), + first_name=response.get("firstName"), + last_name=response.get("lastName"), + user_password=response.get("password"), + reuse_limitation=response.get("passwordPolicy", {}).get("reuseLimitation"), + time_interval_limitation=response.get("passwordPolicy", {}).get("timeIntervalLimitation"), + security_domains=[ + LocalUserSecurityDomainModel.from_response(name, domain_config) + for name, domain_config in response.get("rbac", {}).get("domains", {}).items() + ], + remote_id_claim=response.get("remoteIDClaim"), + remote_user_authorization=response.get("xLaunch"), + ) From c4a4da25ffd126002c0db2e637daca46087a908b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 00:56:49 -0500 Subject: [PATCH 005/131] [ignore] Pydantic ND base models and local_user models: Final proposition of core design adding new methods which will be used in NDConfigCollection and NDNetworkResourceModule classes as well as basic error handling and simple docstrings. --- plugins/module_utils/models/base.py | 124 ++++++++++++++---- plugins/module_utils/models/local_user.py | 146 ++++++++++++++-------- 2 files changed, 192 insertions(+), 78 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index bdd1b9c2..a7eabf17 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -10,51 +10,127 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import List, Dict, Any, Optional, ClassVar +from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal from typing_extensions import Self class NDBaseModel(BaseModel, ABC): - + """ + Base model for all Nexus Dashboard API objects. + + Supports three identifier strategies: + - single: One unique required field (e.g., ["login_id"]) + - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) + - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) + """ + model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, validate_assignment=True, populate_by_name=True, + extra='ignore' ) - - # TODO: find ways to redifine these var in every future NDBaseModels + + # Subclasses MUST define these identifiers: ClassVar[List[str]] = [] - use_composite_identifiers: ClassVar[bool] = False - + identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + + # Optional: fields to exclude from diffs (e.g., passwords) + exclude_from_diff: ClassVar[List[str]] = [] + @abstractmethod def to_payload(self) -> Dict[str, Any]: + """ + Convert model to API payload format. + """ pass @classmethod @abstractmethod def from_response(cls, response: Dict[str, Any]) -> Self: + """ + Create model instance from API response. + """ pass - def get(self, field: str, default: Any = None) -> Any: - """Custom get method to mimic dictionary behavior.""" - return getattr(self, field, default) - - # TODO: Modify to make it more generic and Pydantic | might change and be moved in different Generic Class/Model - # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") - def get_identifier_value(self) -> Any: - """Generates the internal map key based on the selected mode.""" - if self.use_composite_identifiers: - # Mode: Composite (Tuple of ALL keys) + def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: + """ + Extract identifier value(s) from this instance: + - single identifier: Returns field value. + - composite identifiers: Returns tuple of all field values. + - hierarchical identifiers: Returns tuple of (field_name, value) for first non-None field. + """ + if not self.identifiers: + raise ValueError(f"{self.__class__.__name__} has no identifiers defined") + + if self.identifier_strategy == "single": + value = getattr(self, self.identifiers[0], None) + if value is None: + raise ValueError( + f"Single identifier field '{self.identifiers[0]}' is None" + ) + return value + + elif self.identifier_strategy == "composite": values = [] - for identifier in self.identifiers: - value = self.get(identifier) + missing = [] + + for field in self.identifiers: + value = getattr(self, field, None) if value is None: - return None # Missing a required part | Add Error Handling method here + missing.append(field) values.append(value) + + # NOTE: might not be needed in the future with field_validator + if missing: + raise ValueError( + f"Composite identifier fields {missing} are None. " + f"All required: {self.identifiers}" + ) + return tuple(values) + + elif self.identifier_strategy == "hierarchical": + for field in self.identifiers: + value = getattr(self, field, None) + if value is not None: + return (field, value) + + raise ValueError( + f"No non-None value in hierarchical fields {self.identifiers}" + ) + else: - # Mode: Priority (First available key) - for identifier in self.identifiers: - return self.get(identifier) - return None + raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") + + def to_diff_dict(self) -> Dict[str, Any]: + """ + Export for diff comparison (excludes sensitive fields). + """ + return self.model_dump( + by_alias=True, + exclude_none=True, + exclude=set(self.exclude_from_diff) + ) + +# NOTE: Maybe make it a seperate BaseModel +class NDNestedModel(NDBaseModel): + """ + Base for nested models without identifiers. + """ + + identifiers: ClassVar[List[str]] = [] + + def to_payload(self) -> Dict[str, Any]: + """ + Convert model to API payload format. + """ + return self.model_dump(by_alias=True, exclude_none=True) + + @classmethod + def from_response(cls, response: Dict[str, Any]) -> Self: + """ + Create model instance from API response. + """ + return cls.model_validate(response) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 28cea27c..b7069126 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -8,95 +8,133 @@ __metaclass__ = type -from pydantic import Field, field_validator, SecretStr +from pydantic import Field, SecretStr from types import MappingProxyType -from typing import List, Dict, Any, Optional, ClassVar +from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self -from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel +from models.base import NDBaseModel, NDNestedModel -# TODO: Add Field validation methods -# TODO: define our own Field class for string versioning, ansible aliases -# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel -# TODO: Surclass BaseModel -> Priority -# TODO: Look at ansible aliases +# TODO: Move it to constants.py and import it +USER_ROLES_MAPPING = MappingProxyType({ + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", +}) -# TODO: To be moved in constants.py file -user_roles_mapping = MappingProxyType({}) +class LocalUserSecurityDomainModel(NDNestedModel): + """Security domain configuration for local user (nested model).""" -class LocalUserSecurityDomainModel(NDBaseModel): - - name: str = Field(alias="name") - roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") - + # Fields + name: str + roles: Optional[List[str]] = None + def to_payload(self) -> Dict[str, Any]: - return { + + return { self.name: { "roles": [ - user_roles_mapping.get(role, role) for role in self.roles + USER_ROLES_MAPPING.get(role, role) + for role in (self.roles or []) ] } } - + @classmethod - def from_response(cls, name: str, domain_config: List[str]) -> Self: + def from_response(cls, name: str, domain_config: Dict[str, Any]) -> Self: + + # NOTE: Maybe create a function from it to be moved to utils.py and to be imported + reverse_mapping = {value: key for key, value in USER_ROLES_MAPPING.items()} + return cls( name=name, - roles=[user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + roles=[ + reverse_mapping.get(role, role) + for role in domain_config.get("roles", []) + ] ) class LocalUserModel(NDBaseModel): + """ + Local user configuration. - # TODO: Define a way to generate it (look at NDBaseModel comments) + Identifier: login_id (single field) + """ + + # Identifier configuration identifiers: ClassVar[List[str]] = ["login_id"] - - email: Optional[str] = Field(alias="email") - login_id: str = Field(alias="loginID") - first_name: Optional[str] = Field(default="", alias="firstName") - last_name: Optional[str] = Field(default="", alias="lastName") - # TODO: Check secrets manipulation when tracking changes while maintaining security - user_password: Optional[SecretStr] = Field(alias="password") - reuse_limitation: Optional[int] = Field(default=0, alias="reuseLimitation") - time_interval_limitation: Optional[int] = Field(default=0, alias="timeIntervalLimitation") - security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(alias="domains") - remote_id_claim: Optional[str] = Field(default="", alias="remoteIDClaim") - remote_user_authorization: Optional[bool] = Field(default=False, alias="xLaunch") - + identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + exclude_from_diff: ClassVar[List[str]] = ["user_password"] + + # Fields + login_id: str = Field(..., alias="loginID") + email: Optional[str] = None + first_name: Optional[str] = Field(default=None, alias="firstName") + last_name: Optional[str] = Field(default=None, alias="lastName") + user_password: Optional[SecretStr] = Field(default=None, alias="password") + reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation") + time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation") + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="domains") + remote_id_claim: Optional[str] = Field(default=None, alias="remoteIDClaim") + remote_user_authorization: Optional[bool] = Field(default=None, alias="xLaunch") + def to_payload(self) -> Dict[str, Any]: - """Convert the model to the specific API payload format required.""" + payload = self.model_dump( + by_alias=True, + exclude={ + 'domains', + 'security_domains', + 'reuseLimitation', + 'reuse_limitation', + 'timeIntervalLimitation', + 'time_interval_limitation' + }, + exclude_none=True + ) - payload = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + if self.user_password: + payload["password"] = self.user_password.get_secret_value() if self.security_domains: payload["rbac"] = {"domains": {}} for domain in self.security_domains: - payload["rbac"]["domains"].update(domain.to_api_payload()) - - if self.reuse_limitation or self.time_interval_limitation: - payload["passwordPolicy"] = { - "reuseLimitation": self.reuse_limitation, - "timeIntervalLimitation": self.time_interval_limitation, - } - + payload["rbac"]["domains"].update(domain.to_payload()) + + if self.reuse_limitation is not None or self.time_interval_limitation is not None: + payload["passwordPolicy"] = {} + if self.reuse_limitation is not None: + payload["passwordPolicy"]["reuseLimitation"] = self.reuse_limitation + if self.time_interval_limitation is not None: + payload["passwordPolicy"]["timeIntervalLimitation"] = self.time_interval_limitation + return payload - + @classmethod def from_response(cls, response: Dict[str, Any]) -> Self: + password_policy = response.get("passwordPolicy", {}) + rbac = response.get("rbac", {}) + domains = rbac.get("domains", {}) + + security_domains = [ + LocalUserSecurityDomainModel.from_response(name, config) + for name, config in domains.items() + ] if domains else None return cls( - email=response.get("email"), login_id=response.get("loginID"), + email=response.get("email"), first_name=response.get("firstName"), last_name=response.get("lastName"), user_password=response.get("password"), - reuse_limitation=response.get("passwordPolicy", {}).get("reuseLimitation"), - time_interval_limitation=response.get("passwordPolicy", {}).get("timeIntervalLimitation"), - security_domains=[ - LocalUserSecurityDomainModel.from_response(name, domain_config) - for name, domain_config in response.get("rbac", {}).get("domains", {}).items() - ], + reuse_limitation=password_policy.get("reuseLimitation"), + time_interval_limitation=password_policy.get("timeIntervalLimitation"), + security_domains=security_domains, remote_id_claim=response.get("remoteIDClaim"), - remote_user_authorization=response.get("xLaunch"), + remote_user_authorization=response.get("xLaunch") ) From d518bf39ed4dc29cc03044e0d3b0ce041c115805 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 13:09:33 -0500 Subject: [PATCH 006/131] [ignore] Pydantic ND Config Collection: Final proposition of core design changing existing methods and adding new ones which will be used in NDNetworkResourceModule class as well as basic error handling and simple docstrings. --- plugins/module_utils/nd_config_collection.py | 515 ++++++++++--------- 1 file changed, 266 insertions(+), 249 deletions(-) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 8f0058bb..2f256d30 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -8,289 +8,306 @@ __metaclass__ = type -import sys +from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable from copy import deepcopy -from functools import reduce -# Python 2 and 3 compatibility (To be removed in the future) -if sys.version_info[0] >= 3: - from collections.abc import MutableMapping - iteritems = lambda d: d.items() -else: - from collections import MutableMapping - iteritems = lambda d: d.iteritems() +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from models.base import NDBaseModel -# TODO: Adapt to Pydantic Models -# NOTE: Single-Index Hybrid Collection for ND Network Resource Module -class NDConfigCollection(MutableMapping): +# Type aliases +# NOTE: Maybe add more type aliases in the future if needed +ModelType = TypeVar('ModelType', bound=NDBaseModel) +IdentifierKey = Union[str, int, Tuple[Any, ...]] - def __init__(self, identifier_keys, data=None, use_composite_keys=False): - self.identifier_keys = identifier_keys - self.use_composite_keys = use_composite_keys - - # Dual Storage - self._list = [] - self._map = {} + +class NDConfigCollection(Generic[ModelType]): + """ + Nexus Dashboard configuration collection for NDBaseModel instances. + """ + + def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType]] = None): + """ + Initialize collection. + """ + self._model_class = model_class - if data: - for item in data: + # Dual storage + self._items: List[ModelType] = [] + self._index: Dict[IdentifierKey, int] = {} + + if items: + for item in items: self.add(item) - # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") - def _get_identifier_value(self, config): - """Generates the internal map key based on the selected mode.""" - if self.use_composite_keys: - # Mode: Composite (Tuple of ALL keys) - values = [] - for key in self.identifier_keys: - val = config.get(key) - if val is None: - return None # Missing a required part - values.append(val) - return tuple(values) - else: - # Mode: Priority (First available key) - for key in self.identifier_keys: - if key in config: - return config[key] - return None - - # Magic Methods - def __getitem__(self, key): - return self._map[key] - - def __setitem__(self, key, value): - if key in self._map: - old_ref = self._map[key] - try: - idx = self._list.index(old_ref) - self._list[idx] = value - self._map[key] = value - except ValueError: - pass - else: - # Add new - self._list.append(value) - self._map[key] = value - - def __delitem__(self, key): - if key in self._map: - obj_ref = self._map[key] - del self._map[key] - self._list.remove(obj_ref) + def _extract_key(self, item: ModelType) -> IdentifierKey: + """ + Extract identifier key from item. + """ + try: + return item.get_identifier_value() + except Exception as e: + raise ValueError(f"Failed to extract identifier: {e}") from e + + def _rebuild_index(self) -> None: + """Rebuild index from scratch (O(n) operation).""" + self._index.clear() + for index, item in enumerate(self._items): + key = self._extract_key(item) + self._index[key] = index + + # Core CRUD Operations + + def add(self, item: ModelType) -> IdentifierKey: + """ + Add item to collection (O(1) operation). + """ + if not isinstance(item, self._model_class): + raise TypeError( + f"Item must be instance of {self._model_class.__name__}, " + f"got {type(item).__name__}" + ) + + key = self._extract_key(item) + + if key in self._index: + raise ValueError( + f"Item with identifier {key} already exists. Use replace() to update" + ) + + position = len(self._items) + self._items.append(item) + self._index[key] = position + + return key + + def get(self, key: IdentifierKey) -> Optional[ModelType]: + """ + Get item by identifier key (O(1) operation). + """ + index = self._index.get(key) + return self._items[index] if index is not None else None + + def replace(self, item: ModelType) -> bool: + """ + Replace existing item with same identifier (O(1) operation). + """ + if not isinstance(item, self._model_class): + raise TypeError( + f"Item must be instance of {self._model_class.__name__}, " + f"got {type(item).__name__}" + ) + + key = self._extract_key(item) + index = self._index.get(key) + + if index is None: + return False + + self._items[index] = item + return True + + def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[ModelType, ModelType], ModelType]] = None) -> ModelType: + """ + Merge item with existing, or add if not present. + """ + key = self._extract_key(item) + existing = self.get(key) + + if existing is None: + self.add(item) + return item + + # Custom or default merge + if custom_merge_function: + merged = custom_merge_function(existing, item) else: - raise KeyError(key) - - def __iter__(self): - return iter(self._map) - - def __len__(self): - return len(self._list) + # Default merge + existing_data = existing.model_dump() + new_data = item.model_dump(exclude_unset=True) + merged_data = self._deep_merge(existing_data, new_data) + merged = self._model_class.model_validate(merged_data) + + self.replace(merged) + return merged - def __eq__(self, other): - if isinstance(other, NDConfigCollection): - return self._list == other._list - elif isinstance(other, list): - return self._list == other - elif isinstance(other, dict): - return self._map == other - return False + def _deep_merge(self, base: Dict, update: Dict) -> Dict: + """Recursively merge dictionaries.""" + result = base.copy() + + for key, value in update.items(): + if value is None: + continue + + if key in result and isinstance(result[key], dict) and isinstance(value, dict): + result[key] = self._deep_merge(result[key], value) + else: + result[key] = value + + return result + + def delete(self, key: IdentifierKey) -> bool: + """ + Delete item by identifier (O(n) operation due to index rebuild) + """ + index = self._index.get(key) + + if index is None: + return False + + del self._items[index] + self._rebuild_index() + + return True + + # Diff Operations + + def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: + """ + Compare single item against collection. + """ + try: + key = self._extract_key(new_item) + except ValueError: + return "new" + + existing = self.get(key) + + if existing is None: + return "new" - def __ne__(self, other): - return not self.__eq__(other) + existing_data = existing.to_diff_dict() + new_data = new_item.to_diff_dict() + + if unwanted_keys: + existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) + new_data = self._remove_unwanted_keys(new_data, unwanted_keys) - def __repr__(self): - return str(self._list) + is_subset = self._issubset(new_data, existing_data) + + return "no_diff" if is_subset else "changed" + + def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> bool: + """ + Check if two collections differ. + """ + if not isinstance(other, NDConfigCollection): + raise TypeError("Argument must be NDConfigCollection") + + if len(self) != len(other): + return True - # Helper Methods - def _filter_dict(self, data, ignore_keys): - return {k: v for k, v in iteritems(data) if k not in ignore_keys} + for item in other: + if self.get_diff_config(item, unwanted_keys) != "no_diff": + return True - def _issubset(self, subset, superset): + for key in self.keys(): + if other.get(key) is None: + return True + + return False + + def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[IdentifierKey]: + """ + Get identifiers in self but not in other. + """ + current_keys = set(self.keys()) + other_keys = set(other.keys()) + return list(current_keys - other_keys) + + def _issubset(self, subset: Any, superset: Any) -> bool: + """Check if subset is contained in superset.""" if type(subset) is not type(superset): return False - + if not isinstance(subset, dict): if isinstance(subset, list): return all(item in superset for item in subset) return subset == superset - - for key, value in iteritems(subset): + + for key, value in subset.items(): if value is None: continue - + if key not in superset: return False - - superset_value = superset.get(key) - - if not self._issubset(value, superset_value): + + if not self._issubset(value, superset[key]): return False + return True - def _remove_unwanted_keys(self, data, unwanted_keys): + def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: + """Remove unwanted keys from dict (supports nested paths).""" + data = deepcopy(data) + for key in unwanted_keys: if isinstance(key, str): if key in data: del data[key] + elif isinstance(key, list) and len(key) > 0: - key_path, last = key[:-1], key[-1] try: - parent = reduce(lambda d, k: d.get(k) if isinstance(d, dict) else None, key_path, data) - if isinstance(parent, dict) and last in parent: - del parent[last] - except (KeyError, TypeError): + parent = data + for k in key[:-1]: + if isinstance(parent, dict) and k in parent: + parent = parent[k] + else: + break + else: + if isinstance(parent, dict) and key[-1] in parent: + del parent[key[-1]] + except (KeyError, TypeError, IndexError): pass + return data - - # Core Operations - def to_list(self): - return self._list + + # Collection Operations - def to_dict(self): - return self._map - - def copy(self): - return NDConfigCollection(self.identifier_keys, deepcopy(self._list), self.use_composite_keys) - - def add(self, config): - ident = self._get_identifier_value(config) - if ident is None: - mode = "Composite" if self.use_composite_keys else "Priority" - raise ValueError("[{0} Mode] Config missing required keys: {1}".format(mode, self.identifier_keys)) - - if ident in self._map: - self.__setitem__(ident, config) - else: - self._list.append(config) - self._map[ident] = config - - def merge(self, new_config): - ident = self._get_identifier_value(new_config) - if ident and ident in self._map: - self._map[ident].update(new_config) - else: - self.add(new_config) - - def replace(self, new_config): - ident = self._get_identifier_value(new_config) - if ident: - self[ident] = new_config - else: - self.add(new_config) - - def remove(self, identifiers): - # Try Map Removal - try: - target_key = self._get_identifier_value(identifiers) - if target_key and target_key in self._map: - self.__delitem__(target_key) - return - except Exception: - pass - - # Fallback: Linear Removal - to_remove = [] - for config in self._list: - match = True - for k, v in iteritems(identifiers): - if config.get(k) != v: - match = False - break - if match: - to_remove.append(self._get_identifier_value(config)) - - for ident in to_remove: - if ident in self._map: - self.__delitem__(ident) - - def get_by_key(self, key, default=None): - return self._map.get(key, default) - - def get_by_idenfiers(self, identifiers, default=None): - # Try Map Lookup - target_key = self._get_identifier_value(identifiers) - if target_key and target_key in self._map: - return self._map[target_key] - - # Fallback: Linear Lookup - valid_search_keys = [k for k in identifiers if k in self.identifier_keys] - if not valid_search_keys: - return default - - for config in self._list: - match = True - for k in valid_search_keys: - if config.get(k) != identifiers[k]: - match = False - break - if match: - return config - return default - - # Diff logic - def get_diff_config(self, new_config, unwanted_keys=None): - unwanted_keys = unwanted_keys or [] - - ident = self._get_identifier_value(new_config) - - if not ident or ident not in self._map: - return "new" - - existing = deepcopy(self._map[ident]) - sent = deepcopy(new_config) - - self._remove_unwanted_keys(existing, unwanted_keys) - self._remove_unwanted_keys(sent, unwanted_keys) - - is_subset = self._issubset(sent, existing) - - if is_subset: - return "no_diff" - else: - return "changed" - - def get_diff_collection(self, new_collection, unwanted_keys=None): - if not isinstance(new_collection, NDConfigCollection): - raise TypeError("Argument must be an NDConfigCollection") - - if len(self) != len(new_collection): - return True - - for item in new_collection.to_list(): - if self.get_diff_config(item, unwanted_keys) != "no_diff": - return True - - for ident in self._map: - if ident not in new_collection._map: - return True - - return False - - def get_diff_identifiers(self, new_collection): - current_identifiers = set(self.config_collection.keys()) - other_identifiers = set(new_collection.config_collection.keys()) - - return list(current_identifiers - other_identifiers) + def __len__(self) -> int: + """Return number of items.""" + return len(self._items) + + def __iter__(self): + """Iterate over items.""" + return iter(self._items) - # Sanitize Operations - def sanitize(self, keys_to_remove=None, values_to_remove=None, remove_none_values=False): - keys_to_remove = keys_to_remove or [] - values_to_remove = values_to_remove or [] + def keys(self) -> List[IdentifierKey]: + """Get all identifier keys.""" + return list(self._index.keys()) - def recursive_clean(obj): - if isinstance(obj, dict): - keys = list(obj.keys()) - for k in keys: - v = obj[k] - if k in keys_to_remove or v in values_to_remove or (remove_none_values and v is None): - del obj[k] - continue - if isinstance(v, (dict, list)): - recursive_clean(v) - elif isinstance(obj, list): - for item in obj: - recursive_clean(item) + def copy(self) -> "NDConfigCollection[ModelType]": + """Create deep copy of collection.""" + return NDConfigCollection( + model_class=self._model_class, + items=deepcopy(self._items) + ) - for item in self._list: - recursive_clean(item) + # Serialization + + def to_list(self, **kwargs) -> List[Dict]: + """ + Export as list of dicts (with aliases). + """ + return [item.model_dump(by_alias=True, exclude_none=True, **kwargs) for item in self._items] + + def to_payload_list(self) -> List[Dict[str, Any]]: + """ + Export as list of API payloads. + """ + return [item.to_payload() for item in self._items] + + @classmethod + def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + """ + Create collection from list of dicts. + """ + items = [model_class.model_validate(item_data) for item_data in data] + return cls(model_class=model_class, items=items) + + @classmethod + def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + """ + Create collection from API response. + """ + items = [model_class.from_response(item_data) for item_data in response_data] + return cls(model_class=model_class, items=items) From b35fa8d394829ce234236729c8505fc683a1022c Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 13:51:54 -0500 Subject: [PATCH 007/131] [ignore] Pydantic Base ND Network Resource Module: Final proposition of core design changing existing methods and adding new ones which will be used in future as a based for ND network resource modules as well as basic error handling and simple docstrings. --- plugins/module_utils/nd_network_resources.py | 561 ++++++++++++++----- 1 file changed, 411 insertions(+), 150 deletions(-) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index 3b549da1..ab7df9e2 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -9,196 +9,457 @@ __metaclass__ = type from copy import deepcopy -from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from typing import Optional, List, Dict, Any, Callable, Literal +from pydantic import ValidationError -# TODO: Make further enhancement to logs and outputs -# TODO: Adapt to Pydantic Models -# NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later -class NDNetworkResourceModule(NDModule): +# TODO: To be replaced with: +# from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +# from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +# from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from nd import NDModule +from nd_config_collection import NDConfigCollection +from models.base import NDBaseModel +from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED - def __init__(self, module, path, identifier_keys, use_composite_keys=False, actions_overwrite_map=None): - super().__init__(module) - # Initial variables +class NDNetworkResourceModule(NDModule): + """ + Generic Network Resource Module for Nexus Dashboard. + """ + + def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_overwrite_map: Optional[Dict[str, Callable]] = None): + """ + Initialize the Network Resource Module. + """ + super().__init__(module) + + # Configuration self.path = path + self.model_class = model_class self.actions_overwrite_map = actions_overwrite_map or {} - self.identifier_keys = identifier_keys - self.use_composite_keys = use_composite_keys - - # Initial data - self.init_all_data = self._query_all() - - # Info ouput - self.existing = NDConfigCollection(identifier_keys, data=self.init_all_data) - self.previous = NDConfigCollection(identifier_keys) - self.proposed = NDConfigCollection(identifier_keys) - self.sent = NDConfigCollection(identifier_keys) - - # Debug output - self.nd_logs = [] - - # Helper variables - self.current_identifier = "" - self.existing_config = {} - self.proposed_config = {} - - # Actions Operations - def actions_overwrite(action): + + # Initialize collections + try: + init_all_data = self._query_all() + + self.existing = NDConfigCollection.from_api_response( + response_data=init_all_data, + model_class=model_class + ) + self.previous = NDConfigCollection(model_class=model_class) + self.proposed = NDConfigCollection(model_class=model_class) + self.sent = NDConfigCollection(model_class=model_class) + + except Exception as e: + self.fail_json( + msg=f"Initialization failed: {str(e)}", + error=str(e) + ) + + # Operation tracking + self.nd_logs: List[Dict[str, Any]] = [] + + # Current operation context + self.current_identifier = None + self.existing_config: Dict[str, Any] = {} + self.proposed_config: Dict[str, Any] = {} + + # Action Decorator + + @staticmethod + def actions_overwrite(action: str): + """ + Decorator to allow overriding default action operations. + """ def decorator(func): def wrapper(self, *args, **kwargs): overwrite_action = self.actions_overwrite_map.get(action) if callable(overwrite_action): - return overwrite_action(self) + return overwrite_action(self, *args, **kwargs) else: return func(self, *args, **kwargs) return wrapper return decorator - + + # Action Operations + @actions_overwrite("create") - def _create(self): - if not self.module.check_mode: + def _create(self) -> Optional[Dict[str, Any]]: + """ + Create a new configuration object. + """ + if self.module.check_mode: + return self.proposed_config + + try: return self.request(path=self.path, method="POST", data=self.proposed_config) - + except Exception as e: + raise Exception(f"Create failed for {self.current_identifier}: {e}") from e + @actions_overwrite("update") - def _update(self): - if not self.module.check_mode: - object_path = "{0}/{1}".format(self.path, self.current_identifier) + def _update(self) -> Optional[Dict[str, Any]]: + """ + Update an existing configuration object. + """ + if self.module.check_mode: + return self.proposed_config + + try: + object_path = f"{self.path}/{self.current_identifier}" return self.request(path=object_path, method="PUT", data=self.proposed_config) - + except Exception as e: + raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + @actions_overwrite("delete") - def _delete(self): - if not self.module.check_mode: - object_path = "{0}/{1}".format(self.path, self.current_identifier) + def _delete(self) -> None: + """Delete a configuration object.""" + if self.module.check_mode: + return + + try: + object_path = f"{self.path}/{self.current_identifier}" self.request(path=object_path, method="DELETE") + except Exception as e: + raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e @actions_overwrite("query_all") - def _query_all(self): - return self.query_obj(self.path) - - def format_log(self, identifier, status, after_data, sent_payload_data=None): - item_result = { + def _query_all(self) -> List[Dict[str, Any]]: + """ + Query all configuration objects from device. + """ + try: + result = self.query_obj(self.path) + return result or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e + + # Logging + + def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: + """ + Create and append a log entry. + """ + log_entry = { "identifier": identifier, "status": status, - "before": self.existing_config, + "before": deepcopy(self.existing_config), "after": deepcopy(after_data) if after_data is not None else self.existing_config, - "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {}, + "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {} } - + + # Add HTTP details if not in check mode if not self.module.check_mode and self.url is not None: - item_result.update( - { - "method": self.method, - "response": self.response, - "status": self.status, - "url": self.url, - } + log_entry.update({ + "method": self.method, + "response": self.response, + "status": self.status, + "url": self.url + }) + + self.nd_logs.append(log_entry) + + # State Management + + def manage_state( + self, state: Literal["merged", "replaced", "overridden", "deleted"], new_configs: List[Dict[str, Any]], unwanted_keys: Optional[List] = None, override_exceptions: Optional[List] = None) -> None: + """ + Manage state according to desired configuration. + """ + unwanted_keys = unwanted_keys or [] + override_exceptions = override_exceptions or [] + + # Parse and validate configs + try: + parsed_items = [] + for config in new_configs: + try: + # Parse config into model + item = self.model_class.model_validate(config) + parsed_items.append(item) + except ValidationError as e: + self.fail_json( + msg=f"Invalid configuration: {e}", + config=config, + validation_errors=e.errors() + ) + return + + # Create proposed collection + self.proposed = NDConfigCollection( + model_class=self.model_class, + items=parsed_items ) + + # Save previous state + self.previous = self.existing.copy() - self.nd_logs.append(item_result) - - # Logs and Outputs formating Operations - # TODO: Move it to different file - def add_logs_and_ouputs(self): - if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - if self.params.get("output_level") in ("debug", "info"): + except Exception as e: + self.fail_json( + msg=f"Failed to prepare configurations: {e}", + error=str(e) + ) + return + + # Execute state operations + if state in ["merged", "replaced", "overridden"]: + self._manage_create_update_state(state, unwanted_keys) + + if state == "overridden": + self._manage_override_deletions(override_exceptions) + + elif state == "deleted": + self._manage_delete_state() + + else: + self.fail_json(msg=f"Invalid state: {state}") + + def _manage_create_update_state(self,state: Literal["merged", "replaced", "overridden"], unwanted_keys: List) -> None: + """ + Handle merged/replaced/overridden states. + """ + for proposed_item in self.proposed: + try: + # Extract identifier + identifier = proposed_item.get_identifier_value() + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + self.existing_config = ( + existing_item.model_dump(by_alias=True, exclude_none=True) + if existing_item + else {} + ) + + # Determine diff status + diff_status = self.existing.get_diff_config( + proposed_item, + unwanted_keys=unwanted_keys + ) + + # No changes needed + if diff_status == "no_diff": + self.format_log( + identifier=identifier, + status="no_change", + after_data=self.existing_config + ) + continue + + # Prepare final config based on state + if state == "merged" and existing_item: + # Merge with existing + merged_item = self.existing.merge(proposed_item) + final_item = merged_item + else: + # Replace or create + if existing_item: + self.existing.replace(proposed_item) + else: + self.existing.add(proposed_item) + final_item = proposed_item + + # Convert to API payload + self.proposed_config = final_item.to_payload() + + # Execute API operation + if diff_status == "changed": + response = self._update() + operation_status = "updated" + else: + response = self._create() + operation_status = "created" + + # Track sent payload + if not self.module.check_mode: + self.sent.add(final_item) + sent_payload = self.proposed_config + else: + sent_payload = None + + # Log operation + self.format_log( + identifier=identifier, + status=operation_status, + after_data=( + response if not self.module.check_mode + else final_item.model_dump(by_alias=True, exclude_none=True) + ), + sent_payload_data=sent_payload + ) + + except Exception as e: + error_msg = f"Failed to process {identifier}: {e}" + + self.format_log( + identifier=identifier, + status="no_change", + after_data=self.existing_config + ) + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + def _manage_override_deletions(self, override_exceptions: List) -> None: + """ + Delete items not in proposed config (for overridden state). + """ + diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + + for identifier in diff_identifiers: + if identifier in override_exceptions: + continue + + try: + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + if not existing_item: + continue + + self.existing_config = existing_item.model_dump( + by_alias=True, + exclude_none=True + ) + + # Execute delete + self._delete() + + # Remove from collection + self.existing.delete(identifier) + + # Log deletion + self.format_log( + identifier=identifier, + status="deleted", + after_data={} + ) + + except Exception as e: + error_msg = f"Failed to delete {identifier}: {e}" + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + def _manage_delete_state(self) -> None: + """Handle deleted state.""" + for proposed_item in self.proposed: + try: + identifier = proposed_item.get_identifier_value() + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + if not existing_item: + # Already deleted or doesn't exist + self.format_log( + identifier=identifier, + status="no_change", + after_data={} + ) + continue + + self.existing_config = existing_item.model_dump( + by_alias=True, + exclude_none=True + ) + + # Execute delete + self._delete() + + # Remove from collection + self.existing.delete(identifier) + + # Log deletion + self.format_log( + identifier=identifier, + status="deleted", + after_data={} + ) + + except Exception as e: + error_msg = f"Failed to delete {identifier}: {e}" + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + # Output Formatting + + def add_logs_and_outputs(self) -> None: + """Add logs and outputs to module result based on output_level.""" + output_level = self.params.get("output_level", "normal") + state = self.params.get("state") + + # Add previous state for certain states and output levels + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + if output_level in ("debug", "info"): self.result["previous"] = self.previous.to_list() + + # Check if there were changes if not self.has_modified and self.previous.get_diff_collection(self.existing): self.result["changed"] = True + + # Add stdout if present if self.stdout: self.result["stdout"] = self.stdout - - if self.params.get("output_level") == "debug": + + # Add debug information + if output_level == "debug": self.result["nd_logs"] = self.nd_logs + if self.url is not None: self.result["httpapi_logs"] = self.httpapi_logs - - if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - self.result["sent"] = self.sent.to_list() - self.result["proposed"] = self.proposed.to_list() - - self.result["current"] = self.existing.to_list() - - # Manage State Operations - def manage_state(self, state, new_configs, unwanted_keys=None, override_exceptions=None): - unwanted_keys = unwanted_keys or [] - override_exceptions = override_exceptions or [] - - self.proposed = NDConfigCollection(self.identifier_keys, data=new_configs) - self.proposed.sanitize() - self.previous = self.existing.copy() - - if state in ["merged", "replaced", "overidden"]: - for identifier, config in self.proposed.items(): - - diff_config_info = self.existing.get_diff_config(config, unwanted_keys) - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self.proposed_config = config - request_response = None - sent_payload = None - status = "no_change" - - if diff_config_info != "no_diff": - if state == "merged": - self.existing.merge(config) - self.proposed_config = self.existing[identifier] - else: - self.existing.replace(config) - - if diff_config_info == "changed": - request_response = self._update() - status = "updated" - else: - request_response = self._create() - status= "created" - - if not self.module.check_mode: - self.sent.add(self.proposed_config) - sent_payload = self.proposed_config - else: - request_response = self.proposed_config - - self.format_log(identifier, status, request_response, sent_payload) - - if state == "overidden": - diff_identifiers = self.previous.get_diff_identifiers(self.proposed) - for identifier in diff_identifiers: - if identifier not in override_exceptions: - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self._delete() - del self.existing[identifier] - self.format_log(identifier, "deleted", after_data={}) + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + self.result["sent"] = self.sent.to_payload_list() + self.result["proposed"] = self.proposed.to_list() - - elif state == "deleted": - for identifier, config in self.proposed.items(): - if identifier in self.existing.keys(): - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self.proposed_config = config - self._delete() - del self.existing[identifier] - self.format_log(identifier, "deleted", after_data={}) - - # Outputs Operations - def fail_json(self, msg, **kwargs): - self.add_logs_and_ouputs() - + # Always include current state + self.result["current"] = self.existing.to_list() + + # Module Exit Methods + + def fail_json(self, msg: str, **kwargs) -> None: + """ + Exit module with failure. + """ + self.add_logs_and_outputs() self.result.update(**kwargs) self.module.fail_json(msg=msg, **self.result) - - def exit_json(self, **kwargs): - self.add_logs_and_ouputs() - + + def exit_json(self, **kwargs) -> None: + """ + Exit module successfully. + """ + self.add_logs_and_outputs() + + # Add diff if module supports it if self.module._diff and self.result.get("changed") is True: - self.result["diff"] = dict( - before=self.previous.to_list(), - after=self.existing.to_list(), - ) - + try: + # Use diff-safe dicts (excludes sensitive fields) + before = [item.to_diff_dict() for item in self.previous] + after = [item.to_diff_dict() for item in self.existing] + + self.result["diff"] = dict( + before=before, + after=after + ) + except Exception: + pass # Don't fail on diff generation + self.result.update(**kwargs) self.module.exit_json(**self.result) From dc9649e7dc6b98959826e15f2e2c71f86eedefdf Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 14:37:44 -0500 Subject: [PATCH 008/131] [ignore] Modify nd_local_user based on Pydantic implementation and changes added to NDNetworkResourceModule. --- plugins/modules/nd_local_user.py | 91 +++++++++++++++----------------- 1 file changed, 43 insertions(+), 48 deletions(-) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 4a5f1ad2..3dcaf1a4 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -175,23 +175,34 @@ """ from ansible.module_utils.basic import AnsibleModule -from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec, NDModule -from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resources import NDNetworkResourceModule -from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +# TODO: To be replaced with: +# from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec +# from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule +# from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +# from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +from module_utils.nd import nd_argument_spec +from module_utils.nd_network_resources import NDNetworkResourceModule +from module_utils.models.local_user import LocalUserModel +from module_utils.constants import USER_ROLES_MAPPING -# Actions overwrite functions -def query_all_local_users(nd): - return nd.query_obj(nd.path).get("localusers") +# NOTE: Maybe Add the overwrite action in the LocalUserModel +def query_all_local_users(nd_module): + """ + Custom query_all action to extract 'localusers' from response. + """ + response = nd_module.query_obj(nd_module.path) + return response.get("localusers", []) -# TODO: Adapt to Pydantic Model +# NOTE: Maybe Add More aliases like in the LocalUserModel / Revisit the argmument_spec def main(): argument_spec = nd_argument_spec() argument_spec.update( config=dict( type="list", elements="dict", + required=True, options=dict( email=dict(type="str"), login_id=dict(type="str", required=True), @@ -221,49 +232,33 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, ) - - path = "/api/v1/infra/aaa/localUsers" - identifier_keys = ["loginID"] - actions_overwrite_map = {"query_all": query_all_local_users} - - nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) - - state = nd.params.get("state") - config = nd.params.get("config") - override_exceptions = nd.params.get("override_exceptions") - new_config = [] - for object in config: - payload = { - "email": object.get("email"), - "firstName": object.get("first_name"), - "lastName": object.get("last_name"), - "loginID": object.get("login_id"), - "password": object.get("user_password"), - "remoteIDClaim": object.get("remote_id_claim"), - "xLaunch": object.get("remote_user_authorization"), - } - - if object.get("security_domains"): - payload["rbac"] = { - "domains": { - security_domain.get("name"): { - "roles": ( - [USER_ROLES_MAPPING.get(role) for role in security_domain["roles"]] if isinstance(security_domain.get("roles"), list) else [] - ) - } - for security_domain in object["security_domains"] - }, - } - if object.get("reuse_limitation") or object.get("time_interval_limitation"): - payload["passwordPolicy"] = { - "reuseLimitation": object.get("reuse_limitation"), - "timeIntervalLimitation": object.get("time_interval_limitation"), + + try: + # Create NDNetworkResourceModule with LocalUserModel + nd_module = NDNetworkResourceModule( + module=module, + path="/api/v1/infra/aaa/localUsers", + model_class=LocalUserModel, + actions_overwrite_map={ + "query_all": query_all_local_users } - new_config.append(payload) - - nd.manage_state(state=state, new_configs=new_config, unwanted_keys=[["passwordPolicy", "passwordChangeTime"], ["userID"]], override_exceptions=override_exceptions) + ) + + # Manage state + nd_module.manage_state( + state=module.params["state"], + new_configs=module.params["config"], + unwanted_keys=[ + ["passwordPolicy", "passwordChangeTime"], # Nested path + ["userID"] # Simple key + ], + override_exceptions=module.params.get("override_exceptions") + ) - nd.exit_json() + nd_module.exit_json() + + except Exception as e: + module.fail_json(msg=f"Module execution failed: {str(e)}") if __name__ == "__main__": From dead72222ba9b04d6cf8bb589ddbe14108e544a0 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 17 Feb 2026 13:46:10 -0500 Subject: [PATCH 009/131] [ignore] Add api_endpoints for configuring endpoints and orchestrators for orchestrating crud api operations with model instances and endpoints. --- plugins/module_utils/api_endpoints/base.py | 178 ++++++++++++++++++ plugins/module_utils/api_endpoints/enums.py | 46 +++++ .../module_utils/api_endpoints/local_user.py | 178 ++++++++++++++++++ plugins/module_utils/api_endpoints/mixins.py | 25 +++ plugins/module_utils/orchestrators/base.py | 79 ++++++++ .../module_utils/orchestrators/local_user.py | 42 +++++ 6 files changed, 548 insertions(+) create mode 100644 plugins/module_utils/api_endpoints/base.py create mode 100644 plugins/module_utils/api_endpoints/enums.py create mode 100644 plugins/module_utils/api_endpoints/local_user.py create mode 100644 plugins/module_utils/api_endpoints/mixins.py create mode 100644 plugins/module_utils/orchestrators/base.py create mode 100644 plugins/module_utils/orchestrators/local_user.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py new file mode 100644 index 00000000..1a9cd768 --- /dev/null +++ b/plugins/module_utils/api_endpoints/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from abc import ABC, abstractmethod +from pydantic import BaseModel, ConfigDict +from typing import Final, Union, Tuple, Any + +IdentifierKey = Union[str, int, Tuple[Any, ...], None] + +class NDBaseSmartEndpoint(BaseModel, ABC): + + # TODO: maybe to be modified in the future + model_config = ConfigDict(validate_assignment=True) + + base_path: str + + @abstractmethod + @property + def path(self) -> str: + pass + + @abstractmethod + @property + def verb(self) -> str: + pass + + # TODO: Maybe to be modifed to be more Pydantic + # TODO: Maybe change function's name + # NOTE: function to set mixins fields from identifiers + @abstractmethod + def set_identifiers(self, identifier: IdentifierKey = None): + pass + + +class NDBasePath: + """ + # Summary + + Centralized API Base Paths + + ## Description + + Provides centralized base path definitions for all ND API endpoints. + This allows API path changes to be managed in a single location. + + ## Usage + + ```python + # Get a complete base path + path = BasePath.control_fabrics("MyFabric", "config-deploy") + # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/MyFabric/config-deploy + + # Build custom paths + path = BasePath.v1("custom", "endpoint") + # Returns: /appcenter/cisco/ndfc/api/v1/custom/endpoint + ``` + + ## Design Notes + + - All base paths are defined as class constants for easy modification + - Helper methods compose paths from base constants + - Use these methods in Pydantic endpoint models to ensure consistency + - If NDFC changes base API paths, only this class needs updating + """ + + # Root API paths + NDFC_API: Final = "/appcenter/cisco/ndfc/api" + ND_INFRA_API: Final = "/api/v1/infra" + ONEMANAGE: Final = "/onemanage" + LOGIN: Final = "/login" + + @classmethod + def api(cls, *segments: str) -> str: + """ + # Summary + + Build path from NDFC API root. + + ## Parameters + + - segments: Path segments to append + + ## Returns + + - Complete path string + + ## Example + + ```python + path = BasePath.api("custom", "endpoint") + # Returns: /appcenter/cisco/ndfc/api/custom/endpoint + ``` + """ + if not segments: + return cls.NDFC_API + return f"{cls.NDFC_API}/{'/'.join(segments)}" + + @classmethod + def v1(cls, *segments: str) -> str: + """ + # Summary + + Build v1 API path. + + ## Parameters + + - segments: Path segments to append after v1 + + ## Returns + + - Complete v1 API path + + ## Example + + ```python + path = BasePath.v1("lan-fabric", "rest") + # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest + ``` + """ + return cls.api("v1", *segments) + + @classmethod + def nd_infra(cls, *segments: str) -> str: + """ + # Summary + + Build ND infra API path. + + ## Parameters + + - segments: Path segments to append after /api/v1/infra + + ## Returns + + - Complete ND infra API path + + ## Example + + ```python + path = BasePath.nd_infra("aaa", "localUsers") + # Returns: /api/v1/infra/aaa/localUsers + ``` + """ + if not segments: + return cls.ND_INFRA_API + return f"{cls.ND_INFRA_API}/{'/'.join(segments)}" + + @classmethod + def nd_infra_aaa(cls, *segments: str) -> str: + """ + # Summary + + Build ND infra AAA API path. + + ## Parameters + + - segments: Path segments to append after aaa (e.g., "localUsers") + + ## Returns + + - Complete ND infra AAA path + + ## Example + + ```python + path = BasePath.nd_infra_aaa("localUsers") + # Returns: /api/v1/infra/aaa/localUsers + ``` + """ + return cls.nd_infra("aaa", *segments) diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py new file mode 100644 index 00000000..afb4dd5c --- /dev/null +++ b/plugins/module_utils/api_endpoints/enums.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Enums used in api_endpoints. +""" +from enum import Enum + + +class VerbEnum(str, Enum): + """ + # Summary + + Enum for HTTP verb values used in endpoints. + + ## Members + + - GET: Represents the HTTP GET method. + - POST: Represents the HTTP POST method. + - PUT: Represents the HTTP PUT method. + - DELETE: Represents the HTTP DELETE method. + """ + + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + + +class BooleanStringEnum(str, Enum): + """ + # Summary + + Enum for boolean string values used in query parameters. + + ## Members + + - TRUE: Represents the string "true". + - FALSE: Represents the string "false". + """ + + TRUE = "true" + FALSE = "false" \ No newline at end of file diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py new file mode 100644 index 00000000..de493e40 --- /dev/null +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +ND Infra AAA LocalUsers endpoint models. + +This module contains endpoint definitions for LocalUsers-related operations +in the ND Infra AAA API. +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type # pylint: disable=invalid-name + +from typing import Literal, Union, Tuple, Any, Final +from mixins import LoginIdMixin +from enums import VerbEnum +from base import NDBaseSmartEndpoint, NDBasePath +from pydantic import Field + +IdentifierKey = Union[str, int, Tuple[Any, ...], None] + +class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): + """ + Base class for ND Infra AAA Local Users endpoints. + + Provides common functionality for all HTTP methods on the + /api/v1/infra/aaa/localUsers endpoint. + """ + + base_path: Final = NDBasePath.nd_infra_aaa("localUsers") + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path. + + ## Returns + + - Complete endpoint path string, optionally including login_id + """ + if self.login_id is not None: + return NDBasePath.nd_infra_aaa("localUsers", self.login_id) + return self.base_path + + def set_identifiers(self, identifier: IdentifierKey = None): + self.login_id = identifier + + +class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users GET Endpoint + + ## Description + + Endpoint to retrieve local users from the ND Infra AAA service. + Optionally retrieve a specific local user by login_id. + + ## Path + + - /api/v1/infra/aaa/localUsers + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - GET + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersGet"] = Field( + default="EpApiV1InfraAaaLocalUsersGet", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.GET + + +class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users POST Endpoint + + ## Description + + Endpoint to create a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers + + ## Verb + + - POST + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( + default="EpApiV1InfraAaaLocalUsersPost", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.POST + + +class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users PUT Endpoint + + ## Description + + Endpoint to update a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - PUT + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( + default="EpApiV1InfraAaaLocalUsersPut", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.PUT + + +class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users DELETE Endpoint + + ## Description + + Endpoint to delete a local user from the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - DELETE + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersDelete"] = Field( + default="EpApiV1InfraAaaLocalUsersDelete", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.DELETE diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py new file mode 100644 index 00000000..8ff3218f --- /dev/null +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Reusable mixin classes for endpoint models. + +This module provides mixin classes that can be composed to add common +fields to endpoint models without duplication. +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type # pylint: disable=invalid-name + +from typing import TYPE_CHECKING, Optional +from pydantic import BaseModel, Field + + +class LoginIdMixin(BaseModel): + """Mixin for endpoints that require login_id parameter.""" + + login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py new file mode 100644 index 00000000..120ea475 --- /dev/null +++ b/plugins/module_utils/orchestrators/base.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ..models.base import NDBaseModel +from ..nd import NDModule +from ..api_endpoints.base import NDBaseSmartEndpoint +from typing import Dict, List, Any, Union, ClassVar, Type +from pydantic import BaseModel + + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] + + +# TODO: Revisit naming them "Orchestrator" +class NDBaseOrchestrator(BaseModel): + + model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] + + # NOTE: if not defined by subclasses, return an error as they are required + post_endpoint: NDBaseSmartEndpoint + put_endpoint: NDBaseSmartEndpoint + delete_endpoint: NDBaseSmartEndpoint + get_endpoint: NDBaseSmartEndpoint + + # NOTE: Module Field is always required + # TODO: Replace it with future sender + module: NDModule + + # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") + # TODO: Explore how to make them even more general + def create(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + return self.module.request(path=self.post_endpoint.base_path, method=self.post_endpoint.verb, data=model_instance.model_dump()) + except Exception as e: + raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e + + def update(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.model_dump()) + except Exception as e: + raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + + def delete(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.delete_endpoint.path, method=self.delete_endpoint.verb) + except Exception as e: + raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e + + def query_one(self, model_instance: NDBaseModel) -> ResponseType: + try: + self.get_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.get_endpoint.path, method=self.get_endpoint.verb) + except Exception as e: + raise Exception(f"Query failed for {self.current_identifier}: {e}") from e + + def query_all(self) -> ResponseType: + try: + result = self.module.query_obj(self.get_endpoint.path) + return result or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py new file mode 100644 index 00000000..b156512c --- /dev/null +++ b/plugins/module_utils/orchestrators/local_user.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from .base import NDBaseOrchestrator +from ..models.local_user import LocalUserModel +from typing import Dict, List, Any, Union, Type +from ..api_endpoints.local_user import ( + EpApiV1InfraAaaLocalUsersPost, + EpApiV1InfraAaaLocalUsersPut, + EpApiV1InfraAaaLocalUsersDelete, + EpApiV1InfraAaaLocalUsersGet, +) + + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] + +class LocalUserOrchestrator(NDBaseOrchestrator): + + model_class = Type[LocalUserModel] + + post_endpoint = EpApiV1InfraAaaLocalUsersPost() + put_endpoint = EpApiV1InfraAaaLocalUsersPut() + delete_endpoint = EpApiV1InfraAaaLocalUsersDelete() + get_endpoint = EpApiV1InfraAaaLocalUsersGet() + + def query_all(self): + """ + Custom query_all action to extract 'localusers' from response. + """ + try: + result = self.module.query_obj(self.get_endpoint.base_path) + return result.get("localusers", []) or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e + \ No newline at end of file From 4b65c259fa575a3a1b4e49fd44413774e861ea48 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 18 Feb 2026 01:23:37 -0500 Subject: [PATCH 010/131] [ignore] Modifiy models/local_user to take full advantage of Pydantic built-in functionalities. Slightly modify models/base.py to enforce identifiers definitions in NDBaseModel subclasses. Added multiple notes to assert next steps. --- plugins/module_utils/models/base.py | 48 ++++- plugins/module_utils/models/local_user.py | 216 ++++++++++++++-------- 2 files changed, 183 insertions(+), 81 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index a7eabf17..5a64c7a9 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -10,10 +10,11 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal +from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional from typing_extensions import Self +# TODO: Revisit identifiers strategy (low priority) class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -22,8 +23,9 @@ class NDBaseModel(BaseModel, ABC): - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) + - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ - + # TODO: revisit initial Model Configurations (low priority) model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, @@ -31,14 +33,38 @@ class NDBaseModel(BaseModel, ABC): populate_by_name=True, extra='ignore' ) - - # Subclasses MUST define these - identifiers: ClassVar[List[str]] = [] - identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + + # TODO: Revisit identifiers strategy (low priority) + identifiers: ClassVar[Optional[List[str]]] = None + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = None # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] + + # TODO: Revisit it with identifiers strategy (low priority) + def __init_subclass__(cls, **kwargs): + """ + Enforce configuration for identifiers definition. + """ + super().__init_subclass__(**kwargs) + + # Skip enforcement for nested models + # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) + if cls.__name__ in ['NDNestedModel']: + return + + if not hasattr(cls, "identifiers") or cls.identifiers is None: + raise ValueError( + f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." + f"Example: `identifiers: ClassVar[Optional[List[str]]] = ['login_id']`" + ) + if not hasattr(cls, "identifier_strategy") or cls.identifier_strategy is None: + raise ValueError( + f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." + f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'none']]] = 'single'`" + ) + # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) @abstractmethod def to_payload(self) -> Dict[str, Any]: """ @@ -54,6 +80,8 @@ def from_response(cls, response: Dict[str, Any]) -> Self: """ pass + # TODO: Revisit this function when revisiting identifier strategy (low priority) + # TODO: Add condition when there is no identifiers (high priority) def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: """ Extract identifier value(s) from this instance: @@ -82,7 +110,7 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: missing.append(field) values.append(value) - # NOTE: might not be needed in the future with field_validator + # NOTE: might be redefined with Pydantic (low priority) if missing: raise ValueError( f"Composite identifier fields {missing} are None. " @@ -104,6 +132,7 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") + def to_diff_dict(self) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). @@ -114,12 +143,13 @@ def to_diff_dict(self) -> Dict[str, Any]: exclude=set(self.exclude_from_diff) ) -# NOTE: Maybe make it a seperate BaseModel +# TODO: Make it a seperated BaseModel (low priority) class NDNestedModel(NDBaseModel): """ Base for nested models without identifiers. """ + # TODO: Configuration Fields to be clearly defined here (low priority) identifiers: ClassVar[List[str]] = [] def to_payload(self) -> Dict[str, Any]: @@ -133,4 +163,4 @@ def from_response(cls, response: Dict[str, Any]) -> Self: """ Create model instance from API response. """ - return cls.model_validate(response) + return cls.model_validate(response, by_alias=True) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index b7069126..4be05991 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,15 +8,15 @@ __metaclass__ = type -from pydantic import Field, SecretStr +from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel -from models.base import NDBaseModel, NDNestedModel +from .base import NDBaseModel, NDNestedModel -# TODO: Move it to constants.py and import it +# TODO: Move it to constants.py and make a reverse class Map for this USER_ROLES_MAPPING = MappingProxyType({ "fabric_admin": "fabric-admin", "observer": "observer", @@ -31,11 +31,13 @@ class LocalUserSecurityDomainModel(NDNestedModel): """Security domain configuration for local user (nested model).""" # Fields - name: str - roles: Optional[List[str]] = None - - def to_payload(self) -> Dict[str, Any]: + name: str = Field(..., alias="name", exclude=True) + roles: Optional[List[str]] = Field(default=None, alias="roles", exclude=True) + + # -- Serialization (Model instance -> API payload) -- + @model_serializer() + def serialize_model(self) -> Dict: return { self.name: { "roles": [ @@ -44,22 +46,12 @@ def to_payload(self) -> Dict[str, Any]: ] } } - - @classmethod - def from_response(cls, name: str, domain_config: Dict[str, Any]) -> Self: - # NOTE: Maybe create a function from it to be moved to utils.py and to be imported - reverse_mapping = {value: key for key, value in USER_ROLES_MAPPING.items()} - - return cls( - name=name, - roles=[ - reverse_mapping.get(role, role) - for role in domain_config.get("roles", []) - ] - ) + # -- Deserialization (API response / Ansible payload -> Model instance) -- + # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed +# TODO: Add field validation (e.g. me, le, choices, etc...) (medium priority) class LocalUserModel(NDBaseModel): """ Local user configuration. @@ -68,73 +60,153 @@ class LocalUserModel(NDBaseModel): """ # Identifier configuration - identifiers: ClassVar[List[str]] = ["login_id"] - identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + # TODO: Revisit this identifiers strategy (low priority) + identifiers: ClassVar[Optional[List[str]]] = ["login_id"] + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "single" + + # Keys management configurations + # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] + unwanted_keys: ClassVar[List[List[str]]]= [ + ["passwordPolicy", "passwordChangeTime"], # Nested path + ["userID"] # Simple key + ] # Fields + # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec login_id: str = Field(..., alias="loginID") - email: Optional[str] = None + email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") last_name: Optional[str] = Field(default=None, alias="lastName") user_password: Optional[SecretStr] = Field(default=None, alias="password") - reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation") - time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation") - security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="domains") + reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation", exclude=True) + time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation", exclude=True) + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="rbac") remote_id_claim: Optional[str] = Field(default=None, alias="remoteIDClaim") remote_user_authorization: Optional[bool] = Field(default=None, alias="xLaunch") - + + # -- Serialization (Model instance -> API payload) -- + + @computed_field(alias="passwordPolicy") + @property + def password_policy(self) -> Optional[Dict[str, int]]: + """Computed nested structure for API payload.""" + if self.reuse_limitation is None and self.time_interval_limitation is None: + return None + + policy = {} + if self.reuse_limitation is not None: + policy["reuseLimitation"] = self.reuse_limitation + if self.time_interval_limitation is not None: + policy["timeIntervalLimitation"] = self.time_interval_limitation + return policy + + @field_serializer("user_password") + def serialize_password(self, value: Optional[SecretStr]) -> Optional[str]: + return value.get_secret_value() if value else None + + + @field_serializer("security_domains") + def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) -> Optional[Dict]: + # NOTE: exclude `None` values and empty list (-> should we exclude empty list?) + if not value: + return None + + domains_dict = {} + for domain in value: + domains_dict.update(domain.to_payload()) + + return { + "domains": domains_dict + } + + def to_payload(self) -> Dict[str, Any]: - payload = self.model_dump( - by_alias=True, - exclude={ - 'domains', - 'security_domains', - 'reuseLimitation', - 'reuse_limitation', - 'timeIntervalLimitation', - 'time_interval_limitation' - }, - exclude_none=True - ) + return self.model_dump(by_alias=True, exclude_none=True) - if self.user_password: - payload["password"] = self.user_password.get_secret_value() + # -- Deserialization (API response / Ansible payload -> Model instance) -- - if self.security_domains: - payload["rbac"] = {"domains": {}} - for domain in self.security_domains: - payload["rbac"]["domains"].update(domain.to_payload()) + @model_validator(mode="before") + @classmethod + def deserialize_password_policy(cls, data: Any) -> Any: + if not isinstance(data, dict): + return data - if self.reuse_limitation is not None or self.time_interval_limitation is not None: - payload["passwordPolicy"] = {} - if self.reuse_limitation is not None: - payload["passwordPolicy"]["reuseLimitation"] = self.reuse_limitation - if self.time_interval_limitation is not None: - payload["passwordPolicy"]["timeIntervalLimitation"] = self.time_interval_limitation + password_policy = data.get("passwordPolicy") - return payload - + if password_policy and isinstance(password_policy, dict): + if "reuseLimitation" in password_policy: + data["reuse_limitation"] = password_policy["reuseLimitation"] + if "timeIntervalLimitation" in password_policy: + data["time_interval_limitation"] = password_policy["timeIntervalLimitation"] + + # Remove the nested structure from data to avoid conflicts + # (since it's a computed field, not a real field) + data.pop("passwordPolicy", None) + + return data + + @field_validator("security_domains", mode="before") @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - password_policy = response.get("passwordPolicy", {}) - rbac = response.get("rbac", {}) - domains = rbac.get("domains", {}) + def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: + if value is None: + return None + + # If already in list format (Ansible module representation), return as-is + if isinstance(value, list): + return value + + # If in the nested dict format (API representation) + if isinstance(value, dict) and "domains" in value: + domains_dict = value["domains"] + domains_list = [] + + for domain_name, domain_data in domains_dict.items(): + domains_list.append({ + "name": domain_name, + "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])] + }) + + return domains_list - security_domains = [ - LocalUserSecurityDomainModel.from_response(name, config) - for name, config in domains.items() - ] if domains else None + return value + + # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) + @classmethod + def from_response(cls, response: Dict[str, Any]) -> Self: + return cls.model_validate(response, by_alias=True) - return cls( - login_id=response.get("loginID"), - email=response.get("email"), - first_name=response.get("firstName"), - last_name=response.get("lastName"), - user_password=response.get("password"), - reuse_limitation=password_policy.get("reuseLimitation"), - time_interval_limitation=password_policy.get("timeIntervalLimitation"), - security_domains=security_domains, - remote_id_claim=response.get("remoteIDClaim"), - remote_user_authorization=response.get("xLaunch") + + # -- Extra -- + + # TODO: to generate from Fields (low priority) + def get_argument_spec(self): + return dict( + config=dict( + type="list", + elements="dict", + required=True, + options=dict( + email=dict(type="str"), + login_id=dict(type="str", required=True), + first_name=dict(type="str"), + last_name=dict(type="str"), + user_password=dict(type="str", no_log=True), + reuse_limitation=dict(type="int"), + time_interval_limitation=dict(type="int"), + security_domains=dict( + type="list", + elements="dict", + options=dict( + name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), + roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + ), + aliases=["domains"], + ), + remote_id_claim=dict(type="str"), + remote_user_authorization=dict(type="bool"), + ), + ), + override_exceptions=dict(type="list", elements="str"), + state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), ) From 229af5a80e6ac966c4cdcf3f7574713aeead6960 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 24 Feb 2026 12:57:37 -0500 Subject: [PATCH 011/131] [ignore] Adapt the Network Resource Module architecture for ND to smart endpoints and Pydantic models modification (works for merge and replace states). Add comments for next steps. --- plugins/module_utils/api_endpoints/base.py | 5 +- .../module_utils/api_endpoints/local_user.py | 1 + plugins/module_utils/models/base.py | 25 ++- plugins/module_utils/models/local_user.py | 12 +- plugins/module_utils/nd_config_collection.py | 76 ++------ plugins/module_utils/nd_network_resources.py | 163 ++++++------------ plugins/module_utils/orchestrators/base.py | 27 +-- .../module_utils/orchestrators/local_user.py | 12 +- plugins/module_utils/utils.py | 26 ++- plugins/modules/nd_local_user.py | 63 +------ 10 files changed, 159 insertions(+), 251 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 1a9cd768..747c3283 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -15,11 +15,14 @@ IdentifierKey = Union[str, int, Tuple[Any, ...], None] +# TODO: Rename it to APIEndpoint +# NOTE: This is a very minimalist endpoint package -> needs to be enhanced class NDBaseSmartEndpoint(BaseModel, ABC): # TODO: maybe to be modified in the future model_config = ConfigDict(validate_assignment=True) + # TODO: to remove base_path: str @abstractmethod @@ -34,7 +37,7 @@ def verb(self) -> str: # TODO: Maybe to be modifed to be more Pydantic # TODO: Maybe change function's name - # NOTE: function to set mixins fields from identifiers + # NOTE: function to set endpoints attribute fields from identifiers @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): pass diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index de493e40..61f52ad8 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -31,6 +31,7 @@ class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): /api/v1/infra/aaa/localUsers endpoint. """ + # TODO: Remove it base_path: Final = NDBasePath.nd_infra_aaa("localUsers") @property diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 5a64c7a9..db7fd9ae 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -40,6 +40,7 @@ class NDBaseModel(BaseModel, ABC): # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] + unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) def __init_subclass__(cls, **kwargs): @@ -65,8 +66,9 @@ def __init_subclass__(cls, **kwargs): ) # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) + # NOTE: Should we use keyword arguments? @abstractmethod - def to_payload(self) -> Dict[str, Any]: + def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ @@ -74,7 +76,7 @@ def to_payload(self) -> Dict[str, Any]: @classmethod @abstractmethod - def from_response(cls, response: Dict[str, Any]) -> Self: + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: """ Create model instance from API response. """ @@ -142,6 +144,25 @@ def to_diff_dict(self) -> Dict[str, Any]: exclude_none=True, exclude=set(self.exclude_from_diff) ) + + # NOTE: initialize and return a deep copy of the instance? + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... + def merge(self, other_model: "NDBaseModel") -> Self: + if not isinstance(other_model, type(self)): + # TODO: Change error message + return TypeError("models are not of the same type.") + + for field, value in other_model: + if value is None: + continue + + current_value = getattr(self, field) + if isinstance(current_value, NDBaseModel) and isinstance(value, NDBaseModel): + setattr(self, field, current_value.merge(value)) + + else: + setattr(self, field, value) + return self # TODO: Make it a seperated BaseModel (low priority) class NDNestedModel(NDBaseModel): diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 4be05991..ea511097 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -67,14 +67,14 @@ class LocalUserModel(NDBaseModel): # Keys management configurations # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] - unwanted_keys: ClassVar[List[List[str]]]= [ + unwanted_keys: ClassVar[List]= [ ["passwordPolicy", "passwordChangeTime"], # Nested path ["userID"] # Simple key ] # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec - login_id: str = Field(..., alias="loginID") + login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") last_name: Optional[str] = Field(default=None, alias="lastName") @@ -121,8 +121,8 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) } - def to_payload(self) -> Dict[str, Any]: - return self.model_dump(by_alias=True, exclude_none=True) + def to_payload(self, **kwargs) -> Dict[str, Any]: + return self.model_dump(by_alias=True, exclude_none=True, **kwargs) # -- Deserialization (API response / Ansible payload -> Model instance) -- @@ -173,8 +173,8 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - return cls.model_validate(response, by_alias=True) + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(response, by_alias=True, **kwargs) # -- Extra -- diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 2f256d30..a25287aa 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -12,24 +12,26 @@ from copy import deepcopy # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from models.base import NDBaseModel +from .models.base import NDBaseModel +from .utils import issubset # Type aliases # NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) +# TODO: Defined the same acros multiple files -> maybe move to constants.py IdentifierKey = Union[str, int, Tuple[Any, ...]] - +# TODO:Might make it a Pydantic RootModel (low priority but medium impact on NDNetworkResourceModule) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. """ - def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType]] = None): + def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = None): """ Initialize collection. """ - self._model_class = model_class + self._model_class: ModelType = model_class # Dual storage self._items: List[ModelType] = [] @@ -39,6 +41,7 @@ def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType] for item in items: self.add(item) + # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ Extract identifier key from item. @@ -48,6 +51,7 @@ def _extract_key(self, item: ModelType) -> IdentifierKey: except Exception as e: raise ValueError(f"Failed to extract identifier: {e}") from e + # TODO: optimize it -> only needed for delete method (low priority) def _rebuild_index(self) -> None: """Rebuild index from scratch (O(n) operation).""" self._index.clear() @@ -105,8 +109,8 @@ def replace(self, item: ModelType) -> bool: self._items[index] = item return True - - def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[ModelType, ModelType], ModelType]] = None) -> ModelType: + + def merge(self, item: ModelType) -> ModelType: """ Merge item with existing, or add if not present. """ @@ -116,35 +120,11 @@ def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[Model if existing is None: self.add(item) return item - - # Custom or default merge - if custom_merge_function: - merged = custom_merge_function(existing, item) else: - # Default merge - existing_data = existing.model_dump() - new_data = item.model_dump(exclude_unset=True) - merged_data = self._deep_merge(existing_data, new_data) - merged = self._model_class.model_validate(merged_data) - + merged = existing.merge(item) self.replace(merged) return merged - - def _deep_merge(self, base: Dict, update: Dict) -> Dict: - """Recursively merge dictionaries.""" - result = base.copy() - - for key, value in update.items(): - if value is None: - continue - - if key in result and isinstance(result[key], dict) and isinstance(value, dict): - result[key] = self._deep_merge(result[key], value) - else: - result[key] = value - - return result - + def delete(self, key: IdentifierKey) -> bool: """ Delete item by identifier (O(n) operation due to index rebuild) @@ -161,6 +141,7 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations + # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. @@ -182,7 +163,7 @@ def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Unio existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) new_data = self._remove_unwanted_keys(new_data, unwanted_keys) - is_subset = self._issubset(new_data, existing_data) + is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" @@ -214,28 +195,7 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I other_keys = set(other.keys()) return list(current_keys - other_keys) - def _issubset(self, subset: Any, superset: Any) -> bool: - """Check if subset is contained in superset.""" - if type(subset) is not type(superset): - return False - - if not isinstance(subset, dict): - if isinstance(subset, list): - return all(item in superset for item in subset) - return subset == superset - - for key, value in subset.items(): - if value is None: - continue - - if key not in superset: - return False - - if not self._issubset(value, superset[key]): - return False - - return True - + # TODO: Maybe not necessary def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) @@ -282,8 +242,8 @@ def copy(self) -> "NDConfigCollection[ModelType]": items=deepcopy(self._items) ) - # Serialization - + # Collection Serialization + def to_list(self, **kwargs) -> List[Dict]: """ Export as list of dicts (with aliases). @@ -301,7 +261,7 @@ def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigC """ Create collection from list of dicts. """ - items = [model_class.model_validate(item_data) for item_data in data] + items = [model_class.model_validate(item_data, by_name=True) for item_data in data] return cls(model_class=model_class, items=items) @classmethod diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index ab7df9e2..d52fb9de 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -9,8 +9,9 @@ __metaclass__ = type from copy import deepcopy -from typing import Optional, List, Dict, Any, Callable, Literal +from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError +from ansible.module_utils.basic import AnsibleModule # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule @@ -20,36 +21,48 @@ from nd import NDModule from nd_config_collection import NDConfigCollection from models.base import NDBaseModel +from .orchestrators.base import NDBaseOrchestrator from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED - +# TODO: replace path and verbs with smart Endpoint (Top priority) +# TODO: Rename it (low priority) +# TODO: Revisit Deserialization in every method (high priority) class NDNetworkResourceModule(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_overwrite_map: Optional[Dict[str, Callable]] = None): + def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ + # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: + # nd_module = NDModule() super().__init__(module) # Configuration - self.path = path + # TODO: make sure `model_class` is the same as the one in `model_orchestrator`. if not, error out (high priority) self.model_class = model_class - self.actions_overwrite_map = actions_overwrite_map or {} + self.model_orchestrator = model_orchestrator(module=module) + # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) + self.state = self.params["state"] + self.ansible_config = self.params["config"] + # Initialize collections + # TODO: Revisit collections initialization especially `init_all_data` (medium priority) + # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) + self.nd_config_collection = NDConfigCollection[model_class] try: - init_all_data = self._query_all() + init_all_data = self.model_orchestrator.query_all() - self.existing = NDConfigCollection.from_api_response( + self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, model_class=model_class ) - self.previous = NDConfigCollection(model_class=model_class) - self.proposed = NDConfigCollection(model_class=model_class) - self.sent = NDConfigCollection(model_class=model_class) + self.previous = self.nd_config_collection(model_class=model_class) + self.proposed = self.nd_config_collection(model_class=model_class) + self.sent = self.nd_config_collection(model_class=model_class) except Exception as e: self.fail_json( @@ -59,83 +72,10 @@ def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_ov # Operation tracking self.nd_logs: List[Dict[str, Any]] = [] - - # Current operation context - self.current_identifier = None - self.existing_config: Dict[str, Any] = {} - self.proposed_config: Dict[str, Any] = {} - - # Action Decorator - - @staticmethod - def actions_overwrite(action: str): - """ - Decorator to allow overriding default action operations. - """ - def decorator(func): - def wrapper(self, *args, **kwargs): - overwrite_action = self.actions_overwrite_map.get(action) - if callable(overwrite_action): - return overwrite_action(self, *args, **kwargs) - else: - return func(self, *args, **kwargs) - return wrapper - return decorator - - # Action Operations - - @actions_overwrite("create") - def _create(self) -> Optional[Dict[str, Any]]: - """ - Create a new configuration object. - """ - if self.module.check_mode: - return self.proposed_config - - try: - return self.request(path=self.path, method="POST", data=self.proposed_config) - except Exception as e: - raise Exception(f"Create failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("update") - def _update(self) -> Optional[Dict[str, Any]]: - """ - Update an existing configuration object. - """ - if self.module.check_mode: - return self.proposed_config - - try: - object_path = f"{self.path}/{self.current_identifier}" - return self.request(path=object_path, method="PUT", data=self.proposed_config) - except Exception as e: - raise Exception(f"Update failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("delete") - def _delete(self) -> None: - """Delete a configuration object.""" - if self.module.check_mode: - return - - try: - object_path = f"{self.path}/{self.current_identifier}" - self.request(path=object_path, method="DELETE") - except Exception as e: - raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("query_all") - def _query_all(self) -> List[Dict[str, Any]]: - """ - Query all configuration objects from device. - """ - try: - result = self.query_obj(self.path) - return result or [] - except Exception as e: - raise Exception(f"Query all failed: {e}") from e - + # Logging - + # NOTE: format log placeholder + # TODO: use a proper logger (low priority) def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: """ Create and append a log entry. @@ -159,20 +99,20 @@ def format_log(self, identifier, status: Literal["created", "updated", "deleted" self.nd_logs.append(log_entry) - # State Management - - def manage_state( - self, state: Literal["merged", "replaced", "overridden", "deleted"], new_configs: List[Dict[str, Any]], unwanted_keys: Optional[List] = None, override_exceptions: Optional[List] = None) -> None: + # State Management (core function) + # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) + def manage_state(self) -> None: """ Manage state according to desired configuration. """ unwanted_keys = unwanted_keys or [] - override_exceptions = override_exceptions or [] # Parse and validate configs + # TODO: move it to init() (top priority) + # TODO: Modify it if NDConfigCollection becomes a Pydantic RootModel (low priority) try: parsed_items = [] - for config in new_configs: + for config in self.ansible_config: try: # Parse config into model item = self.model_class.model_validate(config) @@ -186,7 +126,7 @@ def manage_state( return # Create proposed collection - self.proposed = NDConfigCollection( + self.proposed = self.nd_config_collection( model_class=self.model_class, items=parsed_items ) @@ -202,27 +142,29 @@ def manage_state( return # Execute state operations - if state in ["merged", "replaced", "overridden"]: - self._manage_create_update_state(state, unwanted_keys) + if self.state in ["merged", "replaced", "overridden"]: + self._manage_create_update_state() - if state == "overridden": - self._manage_override_deletions(override_exceptions) + if self.state == "overridden": + self._manage_override_deletions() - elif state == "deleted": + elif self.state == "deleted": self._manage_delete_state() + # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) else: - self.fail_json(msg=f"Invalid state: {state}") + self.fail_json(msg=f"Invalid state: {self.state}") - def _manage_create_update_state(self,state: Literal["merged", "replaced", "overridden"], unwanted_keys: List) -> None: + + def _manage_create_update_state(self) -> None: """ Handle merged/replaced/overridden states. """ for proposed_item in self.proposed: try: # Extract identifier + # TODO: Remove self.current_identifier, get it directly into the action functions identifier = proposed_item.get_identifier_value() - self.current_identifier = identifier existing_item = self.existing.get(identifier) self.existing_config = ( @@ -232,10 +174,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr ) # Determine diff status - diff_status = self.existing.get_diff_config( - proposed_item, - unwanted_keys=unwanted_keys - ) + diff_status = self.existing.get_diff_config(proposed_item) # No changes needed if diff_status == "no_diff": @@ -247,7 +186,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr continue # Prepare final config based on state - if state == "merged" and existing_item: + if self.state == "merged" and existing_item: # Merge with existing merged_item = self.existing.merge(proposed_item) final_item = merged_item @@ -264,16 +203,16 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr # Execute API operation if diff_status == "changed": - response = self._update() + response = self.model_orchestrator.update(final_item) operation_status = "updated" else: - response = self._create() + response = self.model_orchestrator.create(final_item) operation_status = "created" # Track sent payload if not self.module.check_mode: self.sent.add(final_item) - sent_payload = self.proposed_config + sent_payload = final_item else: sent_payload = None @@ -297,7 +236,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr after_data=self.existing_config ) - if not self.module.params.get("ignore_errors", False): + if not self.params.get("ignore_errors", False): self.fail_json( msg=error_msg, identifier=str(identifier), @@ -305,6 +244,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr ) return + # TODO: Refactor with orchestrator (Top priority) def _manage_override_deletions(self, override_exceptions: List) -> None: """ Delete items not in proposed config (for overridden state). @@ -351,6 +291,7 @@ def _manage_override_deletions(self, override_exceptions: List) -> None: ) return + # TODO: Refactor with orchestrator (Top priority) def _manage_delete_state(self) -> None: """Handle deleted state.""" for proposed_item in self.proposed: @@ -398,7 +339,7 @@ def _manage_delete_state(self) -> None: return # Output Formatting - + # TODO: move to separate Class (results) -> align it with rest_send PR def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.params.get("output_level", "normal") diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 120ea475..e2d9fa75 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -24,39 +24,43 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - post_endpoint: NDBaseSmartEndpoint - put_endpoint: NDBaseSmartEndpoint - delete_endpoint: NDBaseSmartEndpoint - get_endpoint: NDBaseSmartEndpoint + # TODO: change name from http method to crud (e.g. post -> create) + post_endpoint: Type[NDBaseSmartEndpoint] + put_endpoint: Type[NDBaseSmartEndpoint] + delete_endpoint: Type[NDBaseSmartEndpoint] + get_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender module: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - # TODO: Explore how to make them even more general + # TODO: Explore new ways to make them even more general + # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: - return self.module.request(path=self.post_endpoint.base_path, method=self.post_endpoint.verb, data=model_instance.model_dump()) + api_endpoint = self.post_endpoint() + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e + # TODO: Make the same changes as create() with local api_endpoint variable def update(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.model_dump()) + return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Update failed for {self.current_identifier}: {e}") from e def delete(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) @@ -71,7 +75,8 @@ def query_one(self, model_instance: NDBaseModel) -> ResponseType: except Exception as e: raise Exception(f"Query failed for {self.current_identifier}: {e}") from e - def query_all(self) -> ResponseType: + # TODO: Revisit the straegy around the query_all (see local_user's case) + def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: result = self.module.query_obj(self.get_endpoint.path) return result or [] diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index b156512c..3810fa83 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -9,8 +9,10 @@ __metaclass__ = type from .base import NDBaseOrchestrator +from ..models.base import NDBaseModel from ..models.local_user import LocalUserModel from typing import Dict, List, Any, Union, Type +from ..api_endpoints.base import NDBaseSmartEndpoint from ..api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, @@ -23,12 +25,12 @@ class LocalUserOrchestrator(NDBaseOrchestrator): - model_class = Type[LocalUserModel] + model_class: Type[NDBaseModel] = LocalUserModel - post_endpoint = EpApiV1InfraAaaLocalUsersPost() - put_endpoint = EpApiV1InfraAaaLocalUsersPut() - delete_endpoint = EpApiV1InfraAaaLocalUsersDelete() - get_endpoint = EpApiV1InfraAaaLocalUsersGet() + post_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost + put_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete + get_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 5bf0a0f0..72ccbcd7 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -9,6 +9,7 @@ __metaclass__ = type from copy import deepcopy +from typing import Any def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): @@ -29,4 +30,27 @@ def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remo for index, item in enumerate(v): if isinstance(item, dict): result[k][index] = sanitize_dict(item, keys, values) - return result \ No newline at end of file + return result + + +def issubset(subset: Any, superset: Any) -> bool: + """Check if subset is contained in superset.""" + if type(subset) is not type(superset): + return False + + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + + for key, value in subset.items(): + if value is None: + continue + + if key not in superset: + return False + + if not issubset(value, superset[key]): + return False + + return True diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 3dcaf1a4..901549fb 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -180,53 +180,15 @@ # from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel # from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING -from module_utils.nd import nd_argument_spec -from module_utils.nd_network_resources import NDNetworkResourceModule -from module_utils.models.local_user import LocalUserModel -from module_utils.constants import USER_ROLES_MAPPING +from ..module_utils.nd import nd_argument_spec +from ..module_utils.nd_network_resources import NDNetworkResourceModule +from ..module_utils.models.local_user import LocalUserModel +from ..module_utils.orchestrators.local_user import LocalUserOrchestrator -# NOTE: Maybe Add the overwrite action in the LocalUserModel -def query_all_local_users(nd_module): - """ - Custom query_all action to extract 'localusers' from response. - """ - response = nd_module.query_obj(nd_module.path) - return response.get("localusers", []) - - -# NOTE: Maybe Add More aliases like in the LocalUserModel / Revisit the argmument_spec def main(): argument_spec = nd_argument_spec() - argument_spec.update( - config=dict( - type="list", - elements="dict", - required=True, - options=dict( - email=dict(type="str"), - login_id=dict(type="str", required=True), - first_name=dict(type="str"), - last_name=dict(type="str"), - user_password=dict(type="str", no_log=True), - reuse_limitation=dict(type="int"), - time_interval_limitation=dict(type="int"), - security_domains=dict( - type="list", - elements="dict", - options=dict( - name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), - roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), - ), - aliases=["domains"], - ), - remote_id_claim=dict(type="str"), - remote_user_authorization=dict(type="bool"), - ), - ), - override_exceptions=dict(type="list", elements="str"), - state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), - ) + argument_spec.update(LocalUserModel.get_argument_spec()) module = AnsibleModule( argument_spec=argument_spec, @@ -237,23 +199,12 @@ def main(): # Create NDNetworkResourceModule with LocalUserModel nd_module = NDNetworkResourceModule( module=module, - path="/api/v1/infra/aaa/localUsers", model_class=LocalUserModel, - actions_overwrite_map={ - "query_all": query_all_local_users - } + model_orchestrator=LocalUserOrchestrator, ) # Manage state - nd_module.manage_state( - state=module.params["state"], - new_configs=module.params["config"], - unwanted_keys=[ - ["passwordPolicy", "passwordChangeTime"], # Nested path - ["userID"] # Simple key - ], - override_exceptions=module.params.get("override_exceptions") - ) + nd_module.manage_state() nd_module.exit_json() From b04a82bd123e566f5790a73c33e3ef7fc6d77ea4 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 25 Feb 2026 08:24:28 -0500 Subject: [PATCH 012/131] [ignore] Default to none and update condition for regarding in models/base.py. --- plugins/module_utils/models/base.py | 8 +++++--- plugins/module_utils/models/local_user.py | 1 + 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index db7fd9ae..4ddeacd0 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,6 +15,7 @@ # TODO: Revisit identifiers strategy (low priority) +# TODO: add kwargs to every sub method class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -26,6 +27,7 @@ class NDBaseModel(BaseModel, ABC): - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ # TODO: revisit initial Model Configurations (low priority) + # TODO: enable extra model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, @@ -36,7 +38,7 @@ class NDBaseModel(BaseModel, ABC): # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = None + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "none" # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] @@ -51,7 +53,7 @@ def __init_subclass__(cls, **kwargs): # Skip enforcement for nested models # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) - if cls.__name__ in ['NDNestedModel']: + if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return if not hasattr(cls, "identifiers") or cls.identifiers is None: @@ -146,7 +148,7 @@ def to_diff_dict(self) -> Dict[str, Any]: ) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel") -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index ea511097..77307d07 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -74,6 +74,7 @@ class LocalUserModel(NDBaseModel): # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec + # TODO: use extra for generating argument_spec (low priority) login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") From 48b62be0d33dfd494bf1b94cc974596130ddf208 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:38:50 -0500 Subject: [PATCH 013/131] [ignore] Add choice for when no identifier is needed. Add quick comments and changes to models/local_user.py and api_endpoints/base.py --- plugins/module_utils/api_endpoints/base.py | 6 ++--- plugins/module_utils/models/base.py | 29 +++++++++++----------- plugins/module_utils/models/local_user.py | 6 ++--- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 747c3283..90ef5c87 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -35,9 +35,9 @@ def path(self) -> str: def verb(self) -> str: pass - # TODO: Maybe to be modifed to be more Pydantic - # TODO: Maybe change function's name - # NOTE: function to set endpoints attribute fields from identifiers + # TODO: Maybe to be modifed to be more Pydantic (low priority) + # TODO: Maybe change function's name (low priority) + # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): pass diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 4ddeacd0..159acb93 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,7 +15,6 @@ # TODO: Revisit identifiers strategy (low priority) -# TODO: add kwargs to every sub method class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -24,24 +23,24 @@ class NDBaseModel(BaseModel, ABC): - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) - - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) + - singleton: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ # TODO: revisit initial Model Configurations (low priority) - # TODO: enable extra model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, validate_assignment=True, populate_by_name=True, - extra='ignore' + extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "none" + # TODO: Rvisit no identifiers strategy naming (`singleton`) (low priority) + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" # Optional: fields to exclude from diffs (e.g., passwords) - exclude_from_diff: ClassVar[List[str]] = [] + exclude_from_diff: ClassVar[List] = [] unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) @@ -52,7 +51,7 @@ def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) # Skip enforcement for nested models - # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) + # TODO: Remove if `NDNestedModel` is a separated BaseModel (low conditional priority) if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -64,11 +63,10 @@ def __init_subclass__(cls, **kwargs): if not hasattr(cls, "identifier_strategy") or cls.identifier_strategy is None: raise ValueError( f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." - f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'none']]] = 'single'`" + f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'singleton']]] = 'single'`" ) # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) - # NOTE: Should we use keyword arguments? @abstractmethod def to_payload(self, **kwargs) -> Dict[str, Any]: """ @@ -85,16 +83,15 @@ def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: pass # TODO: Revisit this function when revisiting identifier strategy (low priority) - # TODO: Add condition when there is no identifiers (high priority) - def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: + def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ Extract identifier value(s) from this instance: - single identifier: Returns field value. - composite identifiers: Returns tuple of all field values. - hierarchical identifiers: Returns tuple of (field_name, value) for first non-None field. """ - if not self.identifiers: - raise ValueError(f"{self.__class__.__name__} has no identifiers defined") + if not self.identifiers and self.identifier_strategy != "singleton": + raise ValueError(f"{self.__class__.__name__} must have identifiers defined with its current identifier strategy: `{self.identifier_strategy}`") if self.identifier_strategy == "single": value = getattr(self, self.identifiers[0], None) @@ -133,6 +130,10 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: f"No non-None value in hierarchical fields {self.identifiers}" ) + # TODO: Revisit condition when there is no identifiers (low priority) + elif self.identifier_strategy == "singleton": + return self.identifier_strategy + else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") @@ -166,7 +167,7 @@ def merge(self, other_model: "NDBaseModel") -> Self: setattr(self, field, value) return self -# TODO: Make it a seperated BaseModel (low priority) +# TODO: Make it a seperated BaseModel? (low conditional priority) class NDNestedModel(NDBaseModel): """ Base for nested models without identifiers. diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 77307d07..ed09666d 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -16,7 +16,7 @@ # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel from .base import NDBaseModel, NDNestedModel -# TODO: Move it to constants.py and make a reverse class Map for this +# TODO: Move it to constants.py and make a reverse class Map for this (low priority) USER_ROLES_MAPPING = MappingProxyType({ "fabric_admin": "fabric-admin", "observer": "observer", @@ -51,7 +51,7 @@ def serialize_model(self) -> Dict: # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed -# TODO: Add field validation (e.g. me, le, choices, etc...) (medium priority) +# TODO: Add field validation (e.g. me, le, choices, etc...) (low priority) class LocalUserModel(NDBaseModel): """ Local user configuration. @@ -62,7 +62,7 @@ class LocalUserModel(NDBaseModel): # Identifier configuration # TODO: Revisit this identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = ["login_id"] - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "single" + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "single" # Keys management configurations # TODO: Revisit these configurations (low priority) From e2101f97683e37260bf943c899a711a2751d4775 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:42:12 -0500 Subject: [PATCH 014/131] [ignore] Complete orchestrators/base.py by making simple CRUD operations methods that work for single_identifier strategy (meant to be overridden if needed). --- plugins/module_utils/orchestrators/base.py | 48 ++++++++++--------- .../module_utils/orchestrators/local_user.py | 9 ++-- 2 files changed, 29 insertions(+), 28 deletions(-) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index e2d9fa75..611f39a6 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -24,61 +24,63 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - # TODO: change name from http method to crud (e.g. post -> create) - post_endpoint: Type[NDBaseSmartEndpoint] - put_endpoint: Type[NDBaseSmartEndpoint] + create_endpoint: Type[NDBaseSmartEndpoint] + update_endpoint: Type[NDBaseSmartEndpoint] delete_endpoint: Type[NDBaseSmartEndpoint] - get_endpoint: Type[NDBaseSmartEndpoint] + query_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required - # TODO: Replace it with future sender + # TODO: Replace it with future sender (low priority) module: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - # TODO: Explore new ways to make them even more general + # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) # TODO: Revisit Deserialization - def create(self, model_instance: NDBaseModel) -> ResponseType: + def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - api_endpoint = self.post_endpoint() + api_endpoint = self.create_endpoint() return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e - # TODO: Make the same changes as create() with local api_endpoint variable - def update(self, model_instance: NDBaseModel) -> ResponseType: + def update(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.to_payload()) + api_endpoint = self.update_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: - raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + raise Exception(f"Update failed for {model_instance.get_identifier_value()}: {e}") from e - def delete(self, model_instance: NDBaseModel) -> ResponseType: + def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.delete_endpoint.path, method=self.delete_endpoint.verb) + api_endpoint = self.delete_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: - raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e + raise Exception(f"Delete failed for {model_instance.get_identifier_value()}: {e}") from e - def query_one(self, model_instance: NDBaseModel) -> ResponseType: + def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - self.get_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.get_endpoint.path, method=self.get_endpoint.verb) + api_endpoint = self.query_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + self.query_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: - raise Exception(f"Query failed for {self.current_identifier}: {e}") from e + raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.get_endpoint.path) + result = self.module.query_obj(self.query_endpoint.path) return result or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e \ No newline at end of file + raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 3810fa83..caacc5aa 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -27,18 +27,17 @@ class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - post_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost - put_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut + create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - get_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.get_endpoint.base_path) + result = self.module.query_obj(self.query_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e - \ No newline at end of file From b41b5b8ce7f9d9b6582158087b2e69f975210ba6 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:44:23 -0500 Subject: [PATCH 015/131] [ignore] Fix and in nd_config_collections.py. Move to utils.py. --- plugins/module_utils/nd_config_collection.py | 42 +++----------------- plugins/module_utils/utils.py | 29 +++++++++++++- 2 files changed, 34 insertions(+), 37 deletions(-) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index a25287aa..fa6662c9 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -18,10 +18,10 @@ # Type aliases # NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) -# TODO: Defined the same acros multiple files -> maybe move to constants.py +# TODO: Defined the same acros multiple files -> maybe move to constants.py (low priority) IdentifierKey = Union[str, int, Tuple[Any, ...]] -# TODO:Might make it a Pydantic RootModel (low priority but medium impact on NDNetworkResourceModule) +# TODO: Make it a Pydantic RootModel? (low conditional priority but medium impact on NDNetworkResourceModule) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. @@ -59,7 +59,7 @@ def _rebuild_index(self) -> None: key = self._extract_key(item) self._index[key] = index - # Core CRUD Operations + # Core Operations def add(self, item: ModelType) -> IdentifierKey: """ @@ -142,7 +142,7 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) - def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: + def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. """ @@ -158,16 +158,12 @@ def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Unio existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() - - if unwanted_keys: - existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) - new_data = self._remove_unwanted_keys(new_data, unwanted_keys) is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" - def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> bool: + def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: """ Check if two collections differ. """ @@ -178,7 +174,7 @@ def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_k return True for item in other: - if self.get_diff_config(item, unwanted_keys) != "no_diff": + if self.get_diff_config(item) != "no_diff": return True for key in self.keys(): @@ -195,32 +191,6 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I other_keys = set(other.keys()) return list(current_keys - other_keys) - # TODO: Maybe not necessary - def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: - """Remove unwanted keys from dict (supports nested paths).""" - data = deepcopy(data) - - for key in unwanted_keys: - if isinstance(key, str): - if key in data: - del data[key] - - elif isinstance(key, list) and len(key) > 0: - try: - parent = data - for k in key[:-1]: - if isinstance(parent, dict) and k in parent: - parent = parent[k] - else: - break - else: - if isinstance(parent, dict) and key[-1] in parent: - del parent[key[-1]] - except (KeyError, TypeError, IndexError): - pass - - return data - # Collection Operations def __len__(self) -> int: diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 72ccbcd7..a7c1d3dc 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -9,7 +9,7 @@ __metaclass__ = type from copy import deepcopy -from typing import Any +from typing import Any, Dict, List, Union def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): @@ -54,3 +54,30 @@ def issubset(subset: Any, superset: Any) -> bool: return False return True + + +# TODO: Might not necessary with Pydantic validation and serialization built-in methods +def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: + """Remove unwanted keys from dict (supports nested paths).""" + data = deepcopy(data) + + for key in unwanted_keys: + if isinstance(key, str): + if key in data: + del data[key] + + elif isinstance(key, list) and len(key) > 0: + try: + parent = data + for k in key[:-1]: + if isinstance(parent, dict) and k in parent: + parent = parent[k] + else: + break + else: + if isinstance(parent, dict) and key[-1] in parent: + del parent[key[-1]] + except (KeyError, TypeError, IndexError): + pass + + return data From 9cdfa661ef894b911739e6400bf7ffe8ad9f7f28 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 14:01:24 -0500 Subject: [PATCH 016/131] [ignore] Rename NDNetworkResourceModule to NDStateMachine. Add file for NDNestedModel. Add types.file. Various Renaming and small Modifications across the repo. WIP. --- plugins/module_utils/api_endpoints/base.py | 2 +- .../module_utils/api_endpoints/local_user.py | 3 +- plugins/module_utils/constants.py | 21 ++++--- plugins/module_utils/models/base.py | 58 +++++++------------ plugins/module_utils/models/local_user.py | 28 ++++----- plugins/module_utils/models/nested.py | 22 +++++++ plugins/module_utils/nd.py | 5 -- plugins/module_utils/nd_config_collection.py | 28 +++++---- ...twork_resources.py => nd_state_machine.py} | 23 ++++---- plugins/module_utils/orchestrators/base.py | 8 +-- .../module_utils/orchestrators/local_user.py | 5 +- plugins/module_utils/types.py | 14 +++++ plugins/modules/nd_local_user.py | 7 +-- 13 files changed, 115 insertions(+), 109 deletions(-) create mode 100644 plugins/module_utils/models/nested.py rename plugins/module_utils/{nd_network_resources.py => nd_state_machine.py} (95%) create mode 100644 plugins/module_utils/types.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 90ef5c87..0355a1de 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -12,8 +12,8 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import Final, Union, Tuple, Any +from ..types import IdentifierKey -IdentifierKey = Union[str, int, Tuple[Any, ...], None] # TODO: Rename it to APIEndpoint # NOTE: This is a very minimalist endpoint package -> needs to be enhanced diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 61f52ad8..666782ab 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -20,8 +20,7 @@ from enums import VerbEnum from base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field - -IdentifierKey = Union[str, int, Tuple[Any, ...], None] +from ..types import IdentifierKey class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index cbba61b3..7bb7e95d 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -9,6 +9,18 @@ __metaclass__ = type +from typing import Dict +from types import MappingProxyType +from copy import deepcopy + +class NDConstantMapping(Dict): + + def __init__(self, data: Dict): + new_dict = deepcopy(data) + for k,v in data.items(): + new_dict[v] = k + return MappingProxyType(new_dict) + OBJECT_TYPES = { "tenant": "OST_TENANT", "vrf": "OST_VRF", @@ -175,12 +187,3 @@ ND_SETUP_NODE_DEPLOYMENT_TYPE = {"physical": "cimc", "virtual": "vnode"} BACKUP_TYPE = {"config_only": "config-only", None: "config-only", "": "config-only", "full": "full"} - -USER_ROLES_MAPPING = { - "fabric_admin": "fabric-admin", - "observer": "observer", - "super_admin": "super-admin", - "support_engineer": "support-engineer", - "approver": "approver", - "designer": "designer", -} diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 159acb93..ca672fd5 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,6 +15,7 @@ # TODO: Revisit identifiers strategy (low priority) +# NOTE: what about List of NestedModels? -> make it a separate Sub Model class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -36,11 +37,12 @@ class NDBaseModel(BaseModel, ABC): # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - # TODO: Rvisit no identifiers strategy naming (`singleton`) (low priority) + # TODO: Revisit no identifiers strategy naming (`singleton` -> `unique`, `unnamed`) (low priority) identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List] = [] + # TODO: To be removed in the future (see local_user model) unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) @@ -51,7 +53,6 @@ def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) # Skip enforcement for nested models - # TODO: Remove if `NDNestedModel` is a separated BaseModel (low conditional priority) if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -65,22 +66,26 @@ def __init_subclass__(cls, **kwargs): f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'singleton']]] = 'single'`" ) - - # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) - @abstractmethod + def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ - pass + return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - @classmethod - @abstractmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + def to_config(self, **kwargs) -> Dict[str, Any]: """ - Create model instance from API response. + Convert model to Ansible config format. """ - pass + return self.model_dump(by_name=True, exclude_none=True, **kwargs) + + @classmethod + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(response, by_alias=True, **kwargs) + + @classmethod + def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(ansible_config, by_name=True, **kwargs) # TODO: Revisit this function when revisiting identifier strategy (low priority) def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: @@ -132,25 +137,26 @@ def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: # TODO: Revisit condition when there is no identifiers (low priority) elif self.identifier_strategy == "singleton": - return self.identifier_strategy + return None else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") - def to_diff_dict(self) -> Dict[str, Any]: + def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ return self.model_dump( by_alias=True, exclude_none=True, - exclude=set(self.exclude_from_diff) + exclude=set(self.exclude_from_diff), + **kwargs ) # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace - def merge(self, other_model: "NDBaseModel") -> Self: + def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") @@ -166,25 +172,3 @@ def merge(self, other_model: "NDBaseModel") -> Self: else: setattr(self, field, value) return self - -# TODO: Make it a seperated BaseModel? (low conditional priority) -class NDNestedModel(NDBaseModel): - """ - Base for nested models without identifiers. - """ - - # TODO: Configuration Fields to be clearly defined here (low priority) - identifiers: ClassVar[List[str]] = [] - - def to_payload(self) -> Dict[str, Any]: - """ - Convert model to API payload format. - """ - return self.model_dump(by_alias=True, exclude_none=True) - - @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - """ - Create model instance from API response. - """ - return cls.model_validate(response, by_alias=True) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index ed09666d..dba35aee 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -13,11 +13,14 @@ from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel -from .base import NDBaseModel, NDNestedModel - -# TODO: Move it to constants.py and make a reverse class Map for this (low priority) -USER_ROLES_MAPPING = MappingProxyType({ +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from .base import NDBaseModel +from .nested import NDNestedModel +from ..constants import NDConstantMapping + +# Constant defined here as it is only used in this model +USER_ROLES_MAPPING = NDConstantMapping({ "fabric_admin": "fabric-admin", "observer": "observer", "super_admin": "super-admin", @@ -31,7 +34,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): """Security domain configuration for local user (nested model).""" # Fields - name: str = Field(..., alias="name", exclude=True) + name: str = Field(alias="name", exclude=True) roles: Optional[List[str]] = Field(default=None, alias="roles", exclude=True) # -- Serialization (Model instance -> API payload) -- @@ -47,8 +50,7 @@ def serialize_model(self) -> Dict: } } - # -- Deserialization (API response / Ansible payload -> Model instance) -- - # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed + # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity # TODO: Add field validation (e.g. me, le, choices, etc...) (low priority) @@ -121,10 +123,6 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) "domains": domains_dict } - - def to_payload(self, **kwargs) -> Dict[str, Any]: - return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - # -- Deserialization (API response / Ansible payload -> Model instance) -- @model_validator(mode="before") @@ -172,12 +170,6 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: return value - # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) - @classmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: - return cls.model_validate(response, by_alias=True, **kwargs) - - # -- Extra -- # TODO: to generate from Fields (low priority) diff --git a/plugins/module_utils/models/nested.py b/plugins/module_utils/models/nested.py new file mode 100644 index 00000000..f2560819 --- /dev/null +++ b/plugins/module_utils/models/nested.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import List, ClassVar +from .base import NDBaseModel + + +class NDNestedModel(NDBaseModel): + """ + Base for nested models without identifiers. + """ + + # NOTE: model_config, ClassVar, and Fields can be overwritten here if needed + + identifiers: ClassVar[List[str]] = [] diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 5f528bb8..07af68e5 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -239,13 +239,8 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: -<<<<<<< HEAD if data: info = self.connection.send_request(method, uri, json.dumps(data)) -======= - if data is not None: - info = conn.send_request(method, uri, json.dumps(data)) ->>>>>>> 7c967c3 ([minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher.) else: info = self.connection.send_request(method, uri) self.result["data"] = data diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index fa6662c9..364519b8 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -14,14 +14,12 @@ # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from .models.base import NDBaseModel from .utils import issubset +from .types import IdentifierKey # Type aliases -# NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) -# TODO: Defined the same acros multiple files -> maybe move to constants.py (low priority) -IdentifierKey = Union[str, int, Tuple[Any, ...]] -# TODO: Make it a Pydantic RootModel? (low conditional priority but medium impact on NDNetworkResourceModule) + class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. @@ -156,9 +154,9 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha if existing is None: return "new" + # TODO: make a diff class level method for NDBaseModel existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() - is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" @@ -214,30 +212,30 @@ def copy(self) -> "NDConfigCollection[ModelType]": # Collection Serialization - def to_list(self, **kwargs) -> List[Dict]: + def to_ansible_config(self, **kwargs) -> List[Dict]: """ - Export as list of dicts (with aliases). + Export as an Ansible config. """ - return [item.model_dump(by_alias=True, exclude_none=True, **kwargs) for item in self._items] + return [item.to_config(**kwargs) for item in self._items] - def to_payload_list(self) -> List[Dict[str, Any]]: + def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: """ Export as list of API payloads. """ - return [item.to_payload() for item in self._items] + return [item.to_payload(**kwargs) for item in self._items] @classmethod - def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ - Create collection from list of dicts. + Create collection from Ansible config. """ - items = [model_class.model_validate(item_data, by_name=True) for item_data in data] + items = [model_class.from_config(item_data, **kwargs) for item_data in data] return cls(model_class=model_class, items=items) @classmethod - def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ Create collection from API response. """ - items = [model_class.from_response(item_data) for item_data in response_data] + items = [model_class.from_response(item_data, **kwargs) for item_data in response_data] return cls(model_class=model_class, items=items) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_state_machine.py similarity index 95% rename from plugins/module_utils/nd_network_resources.py rename to plugins/module_utils/nd_state_machine.py index d52fb9de..5306bfe8 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_state_machine.py @@ -24,26 +24,24 @@ from .orchestrators.base import NDBaseOrchestrator from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: replace path and verbs with smart Endpoint (Top priority) -# TODO: Rename it (low priority) + # TODO: Revisit Deserialization in every method (high priority) -class NDNetworkResourceModule(NDModule): +class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_orchestrator: Type[NDBaseOrchestrator]): + def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: # nd_module = NDModule() super().__init__(module) - + # Configuration - # TODO: make sure `model_class` is the same as the one in `model_orchestrator`. if not, error out (high priority) - self.model_class = model_class self.model_orchestrator = model_orchestrator(module=module) + self.model_class = self.model_orchestrator.model_class # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) self.state = self.params["state"] self.ansible_config = self.params["config"] @@ -52,17 +50,17 @@ def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_ # Initialize collections # TODO: Revisit collections initialization especially `init_all_data` (medium priority) # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) - self.nd_config_collection = NDConfigCollection[model_class] + self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, - model_class=model_class + model_class=self.model_class ) - self.previous = self.nd_config_collection(model_class=model_class) - self.proposed = self.nd_config_collection(model_class=model_class) - self.sent = self.nd_config_collection(model_class=model_class) + self.previous = self.nd_config_collection(model_class=self.model_class) + self.proposed = self.nd_config_collection(model_class=self.model_class) + self.sent = self.nd_config_collection(model_class=self.model_class) except Exception as e: self.fail_json( @@ -340,6 +338,7 @@ def _manage_delete_state(self) -> None: # Output Formatting # TODO: move to separate Class (results) -> align it with rest_send PR + # TODO: return a defined ordered list of config (for integration test) def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.params.get("output_level", "normal") diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 611f39a6..db72b740 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -27,7 +27,8 @@ class NDBaseOrchestrator(BaseModel): create_endpoint: Type[NDBaseSmartEndpoint] update_endpoint: Type[NDBaseSmartEndpoint] delete_endpoint: Type[NDBaseSmartEndpoint] - query_endpoint: Type[NDBaseSmartEndpoint] + query_one_endpoint: Type[NDBaseSmartEndpoint] + query_all_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) @@ -70,9 +71,8 @@ def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - api_endpoint = self.query_endpoint() + api_endpoint = self.query_one_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - self.query_endpoint.set_identifiers(model_instance.get_identifier_value()) return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e @@ -80,7 +80,7 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.query_endpoint.path) + result = self.module.query_obj(self.query_all_endpoint.path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index caacc5aa..ef2aa36a 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -30,14 +30,15 @@ class LocalUserOrchestrator(NDBaseOrchestrator): create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - query_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.query_endpoint.base_path) + result = self.module.query_obj(self.query_all_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/types.py b/plugins/module_utils/types.py new file mode 100644 index 00000000..124aedd5 --- /dev/null +++ b/plugins/module_utils/types.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Any, Union, Tuple + + +IdentifierKey = Union[str, int, Tuple[Any, ...]] diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 901549fb..67fb3e80 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -181,7 +181,7 @@ # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel # from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING from ..module_utils.nd import nd_argument_spec -from ..module_utils.nd_network_resources import NDNetworkResourceModule +from ..module_utils.nd_state_machine import NDStateMachine from ..module_utils.models.local_user import LocalUserModel from ..module_utils.orchestrators.local_user import LocalUserOrchestrator @@ -194,12 +194,11 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, ) - + try: # Create NDNetworkResourceModule with LocalUserModel - nd_module = NDNetworkResourceModule( + nd_module = NDStateMachine( module=module, - model_class=LocalUserModel, model_orchestrator=LocalUserOrchestrator, ) From df99ff3d086459f6310a0f17c94748176552ebec Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 14:09:18 -0500 Subject: [PATCH 017/131] [ignore] Make a small change to NDModule request function. --- plugins/module_utils/nd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 07af68e5..42b1b118 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -239,7 +239,7 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: - if data: + if data is not None: info = self.connection.send_request(method, uri, json.dumps(data)) else: info = self.connection.send_request(method, uri) From 96dde6cd8decb2e0a7ddf424cc3ac2cd3b5c00e2 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Mon, 2 Mar 2026 17:59:17 -0500 Subject: [PATCH 018/131] [ignore] Modify nd_state_machine to work with orchestrators/models/api_endpoints. Adapt api_endpoints, models, orchestrators accordingly. Integration Tests passing for nd_local_user module. Still WIP. --- plugins/module_utils/api_endpoints/base.py | 6 +- .../module_utils/api_endpoints/local_user.py | 6 +- plugins/module_utils/constants.py | 9 +- plugins/module_utils/models/base.py | 3 +- plugins/module_utils/models/local_user.py | 5 +- plugins/module_utils/nd_state_machine.py | 237 ++++++++---------- plugins/module_utils/orchestrators/base.py | 34 ++- .../module_utils/orchestrators/local_user.py | 2 +- plugins/modules/nd_local_user.py | 4 +- requirements.txt | 3 +- .../network-integration.requirements.txt | 3 +- 11 files changed, 140 insertions(+), 172 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 0355a1de..832476ed 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -25,13 +25,13 @@ class NDBaseSmartEndpoint(BaseModel, ABC): # TODO: to remove base_path: str - @abstractmethod @property + @abstractmethod def path(self) -> str: pass - - @abstractmethod + @property + @abstractmethod def verb(self) -> str: pass diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 666782ab..cae1326b 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -16,9 +16,9 @@ __metaclass__ = type # pylint: disable=invalid-name from typing import Literal, Union, Tuple, Any, Final -from mixins import LoginIdMixin -from enums import VerbEnum -from base import NDBaseSmartEndpoint, NDBasePath +from .mixins import LoginIdMixin +from .enums import VerbEnum +from .base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field from ..types import IdentifierKey diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 7bb7e95d..784a7f51 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -16,10 +16,13 @@ class NDConstantMapping(Dict): def __init__(self, data: Dict): - new_dict = deepcopy(data) + self.new_dict = deepcopy(data) for k,v in data.items(): - new_dict[v] = k - return MappingProxyType(new_dict) + self.new_dict[v] = k + self.new_dict = MappingProxyType(self.new_dict) + + def get_dict(self): + return self.new_dict OBJECT_TYPES = { "tenant": "OST_TENANT", diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index ca672fd5..7b569a58 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -32,6 +32,7 @@ class NDBaseModel(BaseModel, ABC): use_enum_values=True, validate_assignment=True, populate_by_name=True, + arbitrary_types_allowed=True, extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) @@ -77,7 +78,7 @@ def to_config(self, **kwargs) -> Dict[str, Any]: """ Convert model to Ansible config format. """ - return self.model_dump(by_name=True, exclude_none=True, **kwargs) + return self.model_dump(by_alias=False, exclude_none=True, **kwargs) @classmethod def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index dba35aee..713d6040 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -27,7 +27,7 @@ "support_engineer": "support-engineer", "approver": "approver", "designer": "designer", -}) +}).get_dict() class LocalUserSecurityDomainModel(NDNestedModel): @@ -173,7 +173,8 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # -- Extra -- # TODO: to generate from Fields (low priority) - def get_argument_spec(self): + @classmethod + def get_argument_spec(cls) -> Dict: return dict( config=dict( type="list", diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 5306bfe8..5b1f770c 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -16,16 +16,16 @@ # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule # from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -# from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +# from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey # from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -from nd import NDModule -from nd_config_collection import NDConfigCollection -from models.base import NDBaseModel +from .nd import NDModule +from .nd_config_collection import NDConfigCollection from .orchestrators.base import NDBaseOrchestrator -from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from .types import IdentifierKey +from .constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: Revisit Deserialization in every method (high priority) class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. @@ -35,16 +35,21 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest """ Initialize the Network Resource Module. """ - # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: + # TODO: Revisit Module initialization and configuration # nd_module = NDModule() - super().__init__(module) + self.module = module + self.nd_module = NDModule(module) + + # Operation tracking + self.nd_logs: List[Dict[str, Any]] = [] + self.result: Dict[str, Any] = {"changed": False} # Configuration - self.model_orchestrator = model_orchestrator(module=module) + self.model_orchestrator = model_orchestrator(sender=self.nd_module) self.model_class = self.model_orchestrator.model_class # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) - self.state = self.params["state"] - self.ansible_config = self.params["config"] + self.state = self.module.params["state"] + self.ansible_config = self.module.params.get("config", []) # Initialize collections @@ -53,46 +58,64 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() - + self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, model_class=self.model_class ) - self.previous = self.nd_config_collection(model_class=self.model_class) + # Save previous state + self.previous = self.existing.copy() self.proposed = self.nd_config_collection(model_class=self.model_class) self.sent = self.nd_config_collection(model_class=self.model_class) - + + for config in self.ansible_config: + try: + # Parse config into model + item = self.model_class.from_config(config) + self.proposed.add(item) + except ValidationError as e: + self.fail_json( + msg=f"Invalid configuration: {e}", + config=config, + validation_errors=e.errors() + ) + return + except Exception as e: self.fail_json( msg=f"Initialization failed: {str(e)}", error=str(e) ) - - # Operation tracking - self.nd_logs: List[Dict[str, Any]] = [] # Logging # NOTE: format log placeholder # TODO: use a proper logger (low priority) - def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: + def format_log( + self, + identifier: IdentifierKey, + operation_status: Literal["no_change", "created", "updated", "deleted"], + before: Optional[Dict[str, Any]] = None, + after: Optional[Dict[str, Any]] = None, + payload: Optional[Dict[str, Any]] = None, + ) -> None: """ Create and append a log entry. """ log_entry = { "identifier": identifier, - "status": status, - "before": deepcopy(self.existing_config), - "after": deepcopy(after_data) if after_data is not None else self.existing_config, - "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {} + "operation_status": operation_status, + "before": before, + "after": after, + "payload": payload, } # Add HTTP details if not in check mode - if not self.module.check_mode and self.url is not None: + if not self.module.check_mode and self.nd_module.url is not None: log_entry.update({ - "method": self.method, - "response": self.response, - "status": self.status, - "url": self.url + "method": self.nd_module.method, + "response": self.nd_module.response, + "status": self.nd_module.status, + "url": self.nd_module.url }) self.nd_logs.append(log_entry) @@ -103,42 +126,6 @@ def manage_state(self) -> None: """ Manage state according to desired configuration. """ - unwanted_keys = unwanted_keys or [] - - # Parse and validate configs - # TODO: move it to init() (top priority) - # TODO: Modify it if NDConfigCollection becomes a Pydantic RootModel (low priority) - try: - parsed_items = [] - for config in self.ansible_config: - try: - # Parse config into model - item = self.model_class.model_validate(config) - parsed_items.append(item) - except ValidationError as e: - self.fail_json( - msg=f"Invalid configuration: {e}", - config=config, - validation_errors=e.errors() - ) - return - - # Create proposed collection - self.proposed = self.nd_config_collection( - model_class=self.model_class, - items=parsed_items - ) - - # Save previous state - self.previous = self.existing.copy() - - except Exception as e: - self.fail_json( - msg=f"Failed to prepare configurations: {e}", - error=str(e) - ) - return - # Execute state operations if self.state in ["merged", "replaced", "overridden"]: self._manage_create_update_state() @@ -159,18 +146,10 @@ def _manage_create_update_state(self) -> None: Handle merged/replaced/overridden states. """ for proposed_item in self.proposed: + # Extract identifier + identifier = proposed_item.get_identifier_value() + existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: - # Extract identifier - # TODO: Remove self.current_identifier, get it directly into the action functions - identifier = proposed_item.get_identifier_value() - - existing_item = self.existing.get(identifier) - self.existing_config = ( - existing_item.model_dump(by_alias=True, exclude_none=True) - if existing_item - else {} - ) - # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) @@ -178,51 +157,44 @@ def _manage_create_update_state(self) -> None: if diff_status == "no_diff": self.format_log( identifier=identifier, - status="no_change", - after_data=self.existing_config + operation_status="no_change", + before=existing_config, + after=existing_config, ) continue # Prepare final config based on state - if self.state == "merged" and existing_item: + if self.state == "merged": # Merge with existing merged_item = self.existing.merge(proposed_item) final_item = merged_item else: # Replace or create - if existing_item: + if diff_status == "changed": self.existing.replace(proposed_item) else: self.existing.add(proposed_item) final_item = proposed_item - - # Convert to API payload - self.proposed_config = final_item.to_payload() - + # Execute API operation if diff_status == "changed": - response = self.model_orchestrator.update(final_item) + if not self.module.check_mode: + response = self.model_orchestrator.update(final_item) + self.sent.add(final_item) operation_status = "updated" - else: - response = self.model_orchestrator.create(final_item) + elif diff_status == "new": + if not self.module.check_mode: + response = self.model_orchestrator.create(final_item) + self.sent.add(final_item) operation_status = "created" - # Track sent payload - if not self.module.check_mode: - self.sent.add(final_item) - sent_payload = final_item - else: - sent_payload = None - # Log operation self.format_log( identifier=identifier, - status=operation_status, - after_data=( - response if not self.module.check_mode - else final_item.model_dump(by_alias=True, exclude_none=True) - ), - sent_payload_data=sent_payload + operation_status=operation_status, + before=existing_config, + after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), + payload=final_item.to_payload(), ) except Exception as e: @@ -230,11 +202,12 @@ def _manage_create_update_state(self) -> None: self.format_log( identifier=identifier, - status="no_change", - after_data=self.existing_config + operation_status="no_change", + before=existing_config, + after=existing_config, ) - if not self.params.get("ignore_errors", False): + if not self.module.params.get("ignore_errors", False): self.fail_json( msg=error_msg, identifier=str(identifier), @@ -243,30 +216,21 @@ def _manage_create_update_state(self) -> None: return # TODO: Refactor with orchestrator (Top priority) - def _manage_override_deletions(self, override_exceptions: List) -> None: + def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ diff_identifiers = self.previous.get_diff_identifiers(self.proposed) for identifier in diff_identifiers: - if identifier in override_exceptions: - continue - try: - self.current_identifier = identifier - existing_item = self.existing.get(identifier) if not existing_item: continue - - self.existing_config = existing_item.model_dump( - by_alias=True, - exclude_none=True - ) - + # Execute delete - self._delete() + if not self.module.check_mode: + response = self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) @@ -274,8 +238,10 @@ def _manage_override_deletions(self, override_exceptions: List) -> None: # Log deletion self.format_log( identifier=identifier, - status="deleted", - after_data={} + operation_status="deleted", + before=existing_item.to_config(), + after={}, + ) except Exception as e: @@ -295,25 +261,21 @@ def _manage_delete_state(self) -> None: for proposed_item in self.proposed: try: identifier = proposed_item.get_identifier_value() - self.current_identifier = identifier existing_item = self.existing.get(identifier) if not existing_item: # Already deleted or doesn't exist self.format_log( identifier=identifier, - status="no_change", - after_data={} + operation_status="no_change", + before={}, + after={}, ) continue - self.existing_config = existing_item.model_dump( - by_alias=True, - exclude_none=True - ) - # Execute delete - self._delete() + if not self.module.check_mode: + response = self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) @@ -321,8 +283,9 @@ def _manage_delete_state(self) -> None: # Log deletion self.format_log( identifier=identifier, - status="deleted", - after_data={} + operation_status="deleted", + before=existing_item.to_config(), + after={}, ) except Exception as e: @@ -341,35 +304,35 @@ def _manage_delete_state(self) -> None: # TODO: return a defined ordered list of config (for integration test) def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" - output_level = self.params.get("output_level", "normal") - state = self.params.get("state") + output_level = self.module.params.get("output_level", "normal") + state = self.module.params.get("state") # Add previous state for certain states and output levels if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if output_level in ("debug", "info"): - self.result["previous"] = self.previous.to_list() + self.result["previous"] = self.previous.to_ansible_config() # Check if there were changes - if not self.has_modified and self.previous.get_diff_collection(self.existing): + if self.previous.get_diff_collection(self.existing): self.result["changed"] = True # Add stdout if present - if self.stdout: - self.result["stdout"] = self.stdout + if self.nd_module.stdout: + self.result["stdout"] = self.nd_module.stdout # Add debug information if output_level == "debug": self.result["nd_logs"] = self.nd_logs - if self.url is not None: - self.result["httpapi_logs"] = self.httpapi_logs + if self.nd_module.url is not None: + self.result["httpapi_logs"] = self.nd_module.httpapi_logs if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: self.result["sent"] = self.sent.to_payload_list() - self.result["proposed"] = self.proposed.to_list() + self.result["proposed"] = self.proposed.to_ansible_config() # Always include current state - self.result["current"] = self.existing.to_list() + self.result["current"] = self.existing.to_ansible_config() # Module Exit Methods diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index db72b740..924ea4b0 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -11,8 +11,8 @@ from ..models.base import NDBaseModel from ..nd import NDModule from ..api_endpoints.base import NDBaseSmartEndpoint -from typing import Dict, List, Any, Union, ClassVar, Type -from pydantic import BaseModel +from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from pydantic import BaseModel, ConfigDict ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] @@ -21,6 +21,13 @@ # TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): + model_config = ConfigDict( + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + arbitrary_types_allowed=True, + ) + model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required @@ -32,40 +39,31 @@ class NDBaseOrchestrator(BaseModel): # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) - module: NDModule + sender: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.create_endpoint() - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e def update(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.update_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Update failed for {model_instance.get_identifier_value()}: {e}") from e def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.delete_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Delete failed for {model_instance.get_identifier_value()}: {e}") from e @@ -73,14 +71,14 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: api_endpoint = self.query_one_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e # TODO: Revisit the straegy around the query_all (see local_user's case) - def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: + def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.query_all_endpoint.path) + result = self.sender.query_obj(self.query_all_endpoint.path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index ef2aa36a..46a4ea07 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -38,7 +38,7 @@ def query_all(self): Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.query_all_endpoint.base_path) + result = self.sender.query_obj(self.query_all_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 67fb3e80..b6acee72 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -177,9 +177,9 @@ from ansible.module_utils.basic import AnsibleModule # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec -# from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule +# from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -# from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator from ..module_utils.nd import nd_argument_spec from ..module_utils.nd_state_machine import NDStateMachine from ..module_utils.models.local_user import LocalUserModel diff --git a/requirements.txt b/requirements.txt index 514632d1..98907e9a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ requests_toolbelt jsonpath-ng -lxml \ No newline at end of file +lxml +pydantic==2.12.5 \ No newline at end of file diff --git a/tests/integration/network-integration.requirements.txt b/tests/integration/network-integration.requirements.txt index 514632d1..98907e9a 100644 --- a/tests/integration/network-integration.requirements.txt +++ b/tests/integration/network-integration.requirements.txt @@ -1,3 +1,4 @@ requests_toolbelt jsonpath-ng -lxml \ No newline at end of file +lxml +pydantic==2.12.5 \ No newline at end of file From 7afd933fabf0014643d6d600c86fc6614c7f1033 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 11:46:31 -0500 Subject: [PATCH 019/131] [ignore] Add proper path dependencies and Ran black formatting. --- plugins/module_utils/api_endpoints/base.py | 5 +- plugins/module_utils/api_endpoints/enums.py | 2 +- .../module_utils/api_endpoints/local_user.py | 13 +- plugins/module_utils/api_endpoints/mixins.py | 2 +- plugins/module_utils/constants.py | 7 +- plugins/module_utils/models/base.py | 60 +++--- plugins/module_utils/models/local_user.py | 75 +++----- plugins/module_utils/models/nested.py | 2 +- plugins/module_utils/nd_config_collection.py | 94 +++++----- plugins/module_utils/nd_state_machine.py | 171 +++++++----------- plugins/module_utils/orchestrators/base.py | 9 +- .../module_utils/orchestrators/local_user.py | 12 +- plugins/module_utils/utils.py | 16 +- plugins/modules/nd_api_key.py | 1 - plugins/modules/nd_local_user.py | 25 +-- 15 files changed, 204 insertions(+), 290 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 832476ed..954c1f6a 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -12,13 +12,12 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import Final, Union, Tuple, Any -from ..types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey # TODO: Rename it to APIEndpoint # NOTE: This is a very minimalist endpoint package -> needs to be enhanced class NDBaseSmartEndpoint(BaseModel, ABC): - # TODO: maybe to be modified in the future model_config = ConfigDict(validate_assignment=True) @@ -29,7 +28,7 @@ class NDBaseSmartEndpoint(BaseModel, ABC): @abstractmethod def path(self) -> str: pass - + @property @abstractmethod def verb(self) -> str: diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py index afb4dd5c..ced62ba7 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/api_endpoints/enums.py @@ -43,4 +43,4 @@ class BooleanStringEnum(str, Enum): """ TRUE = "true" - FALSE = "false" \ No newline at end of file + FALSE = "false" diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index cae1326b..72639495 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -16,11 +16,12 @@ __metaclass__ = type # pylint: disable=invalid-name from typing import Literal, Union, Tuple, Any, Final -from .mixins import LoginIdMixin -from .enums import VerbEnum -from .base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field -from ..types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey + class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ @@ -105,7 +106,7 @@ class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): """ class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( - default="EpApiV1InfraAaaLocalUsersPost", + default="EpApiV1InfraAaaLocalUsersPost", description="Class name for backward compatibility", frozen=True, ) @@ -136,7 +137,7 @@ class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): """ class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( - default="EpApiV1InfraAaaLocalUsersPut", + default="EpApiV1InfraAaaLocalUsersPut", description="Class name for backward compatibility", frozen=True, ) diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py index 8ff3218f..9516c9ce 100644 --- a/plugins/module_utils/api_endpoints/mixins.py +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -22,4 +22,4 @@ class LoginIdMixin(BaseModel): """Mixin for endpoints that require login_id parameter.""" - login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") \ No newline at end of file + login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 784a7f51..afa0a2b0 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -13,17 +13,18 @@ from types import MappingProxyType from copy import deepcopy -class NDConstantMapping(Dict): +class NDConstantMapping(Dict): def __init__(self, data: Dict): self.new_dict = deepcopy(data) - for k,v in data.items(): + for k, v in data.items(): self.new_dict[v] = k self.new_dict = MappingProxyType(self.new_dict) - + def get_dict(self): return self.new_dict + OBJECT_TYPES = { "tenant": "OST_TENANT", "vrf": "OST_VRF", diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 7b569a58..94fb9cc5 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -19,13 +19,14 @@ class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. - + Supports three identifier strategies: - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) - singleton: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ + # TODO: revisit initial Model Configurations (low priority) model_config = ConfigDict( str_strip_whitespace=True, @@ -33,14 +34,14 @@ class NDBaseModel(BaseModel, ABC): validate_assignment=True, populate_by_name=True, arbitrary_types_allowed=True, - extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs + extra="allow", # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None # TODO: Revisit no identifiers strategy naming (`singleton` -> `unique`, `unnamed`) (low priority) identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" - + # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List] = [] # TODO: To be removed in the future (see local_user model) @@ -52,7 +53,7 @@ def __init_subclass__(cls, **kwargs): Enforce configuration for identifiers definition. """ super().__init_subclass__(**kwargs) - + # Skip enforcement for nested models if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -73,7 +74,7 @@ def to_payload(self, **kwargs) -> Dict[str, Any]: Convert model to API payload format. """ return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - + def to_config(self, **kwargs) -> Dict[str, Any]: """ Convert model to Ansible config format. @@ -83,11 +84,11 @@ def to_config(self, **kwargs) -> Dict[str, Any]: @classmethod def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: return cls.model_validate(response, by_alias=True, **kwargs) - + @classmethod def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: return cls.model_validate(ansible_config, by_name=True, **kwargs) - + # TODO: Revisit this function when revisiting identifier strategy (low priority) def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ @@ -98,74 +99,61 @@ def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ if not self.identifiers and self.identifier_strategy != "singleton": raise ValueError(f"{self.__class__.__name__} must have identifiers defined with its current identifier strategy: `{self.identifier_strategy}`") - + if self.identifier_strategy == "single": value = getattr(self, self.identifiers[0], None) if value is None: - raise ValueError( - f"Single identifier field '{self.identifiers[0]}' is None" - ) + raise ValueError(f"Single identifier field '{self.identifiers[0]}' is None") return value - + elif self.identifier_strategy == "composite": values = [] missing = [] - + for field in self.identifiers: value = getattr(self, field, None) if value is None: missing.append(field) values.append(value) - + # NOTE: might be redefined with Pydantic (low priority) if missing: - raise ValueError( - f"Composite identifier fields {missing} are None. " - f"All required: {self.identifiers}" - ) - + raise ValueError(f"Composite identifier fields {missing} are None. " f"All required: {self.identifiers}") + return tuple(values) - + elif self.identifier_strategy == "hierarchical": for field in self.identifiers: value = getattr(self, field, None) if value is not None: return (field, value) - - raise ValueError( - f"No non-None value in hierarchical fields {self.identifiers}" - ) - + + raise ValueError(f"No non-None value in hierarchical fields {self.identifiers}") + # TODO: Revisit condition when there is no identifiers (low priority) elif self.identifier_strategy == "singleton": return None - + else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") - def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ - return self.model_dump( - by_alias=True, - exclude_none=True, - exclude=set(self.exclude_from_diff), - **kwargs - ) - + return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) + # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") - + for field, value in other_model: if value is None: continue - + current_value = getattr(self, field) if isinstance(current_value, NDBaseModel) and isinstance(value, NDBaseModel): setattr(self, field, current_value.merge(value)) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 713d6040..e759a6fb 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,26 +8,25 @@ __metaclass__ = type -from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self - -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel -from .base import NDBaseModel -from .nested import NDNestedModel -from ..constants import NDConstantMapping +from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping # Constant defined here as it is only used in this model -USER_ROLES_MAPPING = NDConstantMapping({ - "fabric_admin": "fabric-admin", - "observer": "observer", - "super_admin": "super-admin", - "support_engineer": "support-engineer", - "approver": "approver", - "designer": "designer", -}).get_dict() +USER_ROLES_MAPPING = NDConstantMapping( + { + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", + } +).get_dict() class LocalUserSecurityDomainModel(NDNestedModel): @@ -41,14 +40,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): @model_serializer() def serialize_model(self) -> Dict: - return { - self.name: { - "roles": [ - USER_ROLES_MAPPING.get(role, role) - for role in (self.roles or []) - ] - } - } + return {self.name: {"roles": [USER_ROLES_MAPPING.get(role, role) for role in (self.roles or [])]}} # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity @@ -60,7 +52,7 @@ class LocalUserModel(NDBaseModel): Identifier: login_id (single field) """ - + # Identifier configuration # TODO: Revisit this identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = ["login_id"] @@ -69,11 +61,8 @@ class LocalUserModel(NDBaseModel): # Keys management configurations # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] - unwanted_keys: ClassVar[List]= [ - ["passwordPolicy", "passwordChangeTime"], # Nested path - ["userID"] # Simple key - ] - + unwanted_keys: ClassVar[List] = [["passwordPolicy", "passwordChangeTime"], ["userID"]] # Nested path # Simple key + # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec # TODO: use extra for generating argument_spec (low priority) @@ -96,7 +85,7 @@ def password_policy(self) -> Optional[Dict[str, int]]: """Computed nested structure for API payload.""" if self.reuse_limitation is None and self.time_interval_limitation is None: return None - + policy = {} if self.reuse_limitation is not None: policy["reuseLimitation"] = self.reuse_limitation @@ -108,7 +97,6 @@ def password_policy(self) -> Optional[Dict[str, int]]: def serialize_password(self, value: Optional[SecretStr]) -> Optional[str]: return value.get_secret_value() if value else None - @field_serializer("security_domains") def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) -> Optional[Dict]: # NOTE: exclude `None` values and empty list (-> should we exclude empty list?) @@ -119,9 +107,7 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) for domain in value: domains_dict.update(domain.to_payload()) - return { - "domains": domains_dict - } + return {"domains": domains_dict} # -- Deserialization (API response / Ansible payload -> Model instance) -- @@ -132,17 +118,17 @@ def deserialize_password_policy(cls, data: Any) -> Any: return data password_policy = data.get("passwordPolicy") - + if password_policy and isinstance(password_policy, dict): if "reuseLimitation" in password_policy: data["reuse_limitation"] = password_policy["reuseLimitation"] if "timeIntervalLimitation" in password_policy: data["time_interval_limitation"] = password_policy["timeIntervalLimitation"] - + # Remove the nested structure from data to avoid conflicts # (since it's a computed field, not a real field) data.pop("passwordPolicy", None) - + return data @field_validator("security_domains", mode="before") @@ -150,24 +136,21 @@ def deserialize_password_policy(cls, data: Any) -> Any: def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: if value is None: return None - + # If already in list format (Ansible module representation), return as-is if isinstance(value, list): return value - + # If in the nested dict format (API representation) if isinstance(value, dict) and "domains" in value: domains_dict = value["domains"] domains_list = [] - + for domain_name, domain_data in domains_dict.items(): - domains_list.append({ - "name": domain_name, - "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])] - }) - + domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])]}) + return domains_list - + return value # -- Extra -- diff --git a/plugins/module_utils/models/nested.py b/plugins/module_utils/models/nested.py index f2560819..0573e5f8 100644 --- a/plugins/module_utils/models/nested.py +++ b/plugins/module_utils/models/nested.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import List, ClassVar -from .base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel class NDNestedModel(NDBaseModel): diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 364519b8..1aa0e2ec 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -10,27 +10,26 @@ from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable from copy import deepcopy +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from .models.base import NDBaseModel -from .utils import issubset -from .types import IdentifierKey # Type aliases -ModelType = TypeVar('ModelType', bound=NDBaseModel) +ModelType = TypeVar("ModelType", bound=NDBaseModel) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. """ - + def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = None): """ Initialize collection. """ self._model_class: ModelType = model_class - + # Dual storage self._items: List[ModelType] = [] self._index: Dict[IdentifierKey, int] = {} @@ -38,7 +37,7 @@ def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = No if items: for item in items: self.add(item) - + # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ @@ -48,7 +47,7 @@ def _extract_key(self, item: ModelType) -> IdentifierKey: return item.get_identifier_value() except Exception as e: raise ValueError(f"Failed to extract identifier: {e}") from e - + # TODO: optimize it -> only needed for delete method (low priority) def _rebuild_index(self) -> None: """Rebuild index from scratch (O(n) operation).""" @@ -56,55 +55,47 @@ def _rebuild_index(self) -> None: for index, item in enumerate(self._items): key = self._extract_key(item) self._index[key] = index - + # Core Operations - + def add(self, item: ModelType) -> IdentifierKey: """ Add item to collection (O(1) operation). """ if not isinstance(item, self._model_class): - raise TypeError( - f"Item must be instance of {self._model_class.__name__}, " - f"got {type(item).__name__}" - ) - + raise TypeError(f"Item must be instance of {self._model_class.__name__}, " f"got {type(item).__name__}") + key = self._extract_key(item) - + if key in self._index: - raise ValueError( - f"Item with identifier {key} already exists. Use replace() to update" - ) - + raise ValueError(f"Item with identifier {key} already exists. Use replace() to update") + position = len(self._items) self._items.append(item) self._index[key] = position - + return key - + def get(self, key: IdentifierKey) -> Optional[ModelType]: """ Get item by identifier key (O(1) operation). """ index = self._index.get(key) return self._items[index] if index is not None else None - + def replace(self, item: ModelType) -> bool: """ Replace existing item with same identifier (O(1) operation). """ if not isinstance(item, self._model_class): - raise TypeError( - f"Item must be instance of {self._model_class.__name__}, " - f"got {type(item).__name__}" - ) - + raise TypeError(f"Item must be instance of {self._model_class.__name__}, " f"got {type(item).__name__}") + key = self._extract_key(item) index = self._index.get(key) - + if index is None: return False - + self._items[index] = item return True @@ -114,7 +105,7 @@ def merge(self, item: ModelType) -> ModelType: """ key = self._extract_key(item) existing = self.get(key) - + if existing is None: self.add(item) return item @@ -128,17 +119,17 @@ def delete(self, key: IdentifierKey) -> bool: Delete item by identifier (O(n) operation due to index rebuild) """ index = self._index.get(key) - + if index is None: return False - + del self._items[index] self._rebuild_index() - + return True - + # Diff Operations - + # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "changed"]: """ @@ -148,9 +139,9 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha key = self._extract_key(new_item) except ValueError: return "new" - + existing = self.get(key) - + if existing is None: return "new" @@ -158,16 +149,16 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() is_subset = issubset(new_data, existing_data) - + return "no_diff" if is_subset else "changed" - + def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: """ Check if two collections differ. """ if not isinstance(other, NDConfigCollection): raise TypeError("Argument must be NDConfigCollection") - + if len(self) != len(other): return True @@ -178,9 +169,9 @@ def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: for key in self.keys(): if other.get(key) is None: return True - + return False - + def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[IdentifierKey]: """ Get identifiers in self but not in other. @@ -190,11 +181,11 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I return list(current_keys - other_keys) # Collection Operations - + def __len__(self) -> int: """Return number of items.""" return len(self._items) - + def __iter__(self): """Iterate over items.""" return iter(self._items) @@ -205,10 +196,7 @@ def keys(self) -> List[IdentifierKey]: def copy(self) -> "NDConfigCollection[ModelType]": """Create deep copy of collection.""" - return NDConfigCollection( - model_class=self._model_class, - items=deepcopy(self._items) - ) + return NDConfigCollection(model_class=self._model_class, items=deepcopy(self._items)) # Collection Serialization @@ -217,13 +205,13 @@ def to_ansible_config(self, **kwargs) -> List[Dict]: Export as an Ansible config. """ return [item.to_config(**kwargs) for item in self._items] - + def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: """ Export as list of API payloads. """ return [item.to_payload(**kwargs) for item in self._items] - + @classmethod def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ @@ -231,7 +219,7 @@ def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **k """ items = [model_class.from_config(item_data, **kwargs) for item_data in data] return cls(model_class=model_class, items=items) - + @classmethod def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 5b1f770c..be5849d4 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -12,31 +12,25 @@ from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError from ansible.module_utils.basic import AnsibleModule - -# TODO: To be replaced with: -# from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -# from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator -# from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -from .nd import NDModule -from .nd_config_collection import NDConfigCollection -from .orchestrators.base import NDBaseOrchestrator -from .types import IdentifierKey -from .constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +# TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) +# TODO: class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - + def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ - # TODO: Revisit Module initialization and configuration - # nd_module = NDModule() + # TODO: Revisit Module initialization and configuration with rest_send self.module = module self.nd_module = NDModule(module) @@ -51,18 +45,13 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.state = self.module.params["state"] self.ansible_config = self.module.params.get("config", []) - # Initialize collections - # TODO: Revisit collections initialization especially `init_all_data` (medium priority) # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() - self.existing = self.nd_config_collection.from_api_response( - response_data=init_all_data, - model_class=self.model_class - ) + self.existing = self.nd_config_collection.from_api_response(response_data=init_all_data, model_class=self.model_class) # Save previous state self.previous = self.existing.copy() self.proposed = self.nd_config_collection(model_class=self.model_class) @@ -74,30 +63,23 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest item = self.model_class.from_config(config) self.proposed.add(item) except ValidationError as e: - self.fail_json( - msg=f"Invalid configuration: {e}", - config=config, - validation_errors=e.errors() - ) + self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) return except Exception as e: - self.fail_json( - msg=f"Initialization failed: {str(e)}", - error=str(e) - ) + self.fail_json(msg=f"Initialization failed: {str(e)}", error=str(e)) # Logging # NOTE: format log placeholder # TODO: use a proper logger (low priority) def format_log( - self, - identifier: IdentifierKey, - operation_status: Literal["no_change", "created", "updated", "deleted"], - before: Optional[Dict[str, Any]] = None, - after: Optional[Dict[str, Any]] = None, - payload: Optional[Dict[str, Any]] = None, - ) -> None: + self, + identifier: IdentifierKey, + operation_status: Literal["no_change", "created", "updated", "deleted"], + before: Optional[Dict[str, Any]] = None, + after: Optional[Dict[str, Any]] = None, + payload: Optional[Dict[str, Any]] = None, + ) -> None: """ Create and append a log entry. """ @@ -108,18 +90,15 @@ def format_log( "after": after, "payload": payload, } - + # Add HTTP details if not in check mode if not self.module.check_mode and self.nd_module.url is not None: - log_entry.update({ - "method": self.nd_module.method, - "response": self.nd_module.response, - "status": self.nd_module.status, - "url": self.nd_module.url - }) - + log_entry.update( + {"method": self.nd_module.method, "response": self.nd_module.response, "status": self.nd_module.status, "url": self.nd_module.url} + ) + self.nd_logs.append(log_entry) - + # State Management (core function) # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) def manage_state(self) -> None: @@ -129,17 +108,17 @@ def manage_state(self) -> None: # Execute state operations if self.state in ["merged", "replaced", "overridden"]: self._manage_create_update_state() - + if self.state == "overridden": self._manage_override_deletions() - + elif self.state == "deleted": self._manage_delete_state() - + # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) + # TODO: boil down an Exception instead of using `fail_json` method else: self.fail_json(msg=f"Invalid state: {self.state}") - def _manage_create_update_state(self) -> None: """ @@ -152,7 +131,7 @@ def _manage_create_update_state(self) -> None: try: # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) - + # No changes needed if diff_status == "no_diff": self.format_log( @@ -162,7 +141,7 @@ def _manage_create_update_state(self) -> None: after=existing_config, ) continue - + # Prepare final config based on state if self.state == "merged": # Merge with existing @@ -187,7 +166,7 @@ def _manage_create_update_state(self) -> None: response = self.model_orchestrator.create(final_item) self.sent.add(final_item) operation_status = "created" - + # Log operation self.format_log( identifier=identifier, @@ -196,32 +175,27 @@ def _manage_create_update_state(self) -> None: after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), payload=final_item.to_payload(), ) - + except Exception as e: error_msg = f"Failed to process {identifier}: {e}" - + self.format_log( identifier=identifier, operation_status="no_change", before=existing_config, after=existing_config, ) - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - - # TODO: Refactor with orchestrator (Top priority) + def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ diff_identifiers = self.previous.get_diff_identifiers(self.proposed) - + for identifier in diff_identifiers: try: existing_item = self.existing.get(identifier) @@ -231,37 +205,31 @@ def _manage_override_deletions(self) -> None: # Execute delete if not self.module.check_mode: response = self.model_orchestrator.delete(existing_item) - + # Remove from collection self.existing.delete(identifier) - + # Log deletion self.format_log( identifier=identifier, operation_status="deleted", before=existing_item.to_config(), after={}, - ) - + except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - - # TODO: Refactor with orchestrator (Top priority) + def _manage_delete_state(self) -> None: """Handle deleted state.""" for proposed_item in self.proposed: try: identifier = proposed_item.get_identifier_value() - + existing_item = self.existing.get(identifier) if not existing_item: # Already deleted or doesn't exist @@ -272,14 +240,14 @@ def _manage_delete_state(self) -> None: after={}, ) continue - + # Execute delete if not self.module.check_mode: response = self.model_orchestrator.delete(existing_item) - + # Remove from collection self.existing.delete(identifier) - + # Log deletion self.format_log( identifier=identifier, @@ -287,18 +255,14 @@ def _manage_delete_state(self) -> None: before=existing_item.to_config(), after={}, ) - + except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - + # Output Formatting # TODO: move to separate Class (results) -> align it with rest_send PR # TODO: return a defined ordered list of config (for integration test) @@ -306,36 +270,36 @@ def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.module.params.get("output_level", "normal") state = self.module.params.get("state") - + # Add previous state for certain states and output levels if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if output_level in ("debug", "info"): self.result["previous"] = self.previous.to_ansible_config() - + # Check if there were changes if self.previous.get_diff_collection(self.existing): self.result["changed"] = True - + # Add stdout if present if self.nd_module.stdout: self.result["stdout"] = self.nd_module.stdout - + # Add debug information if output_level == "debug": self.result["nd_logs"] = self.nd_logs - + if self.nd_module.url is not None: self.result["httpapi_logs"] = self.nd_module.httpapi_logs - + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: self.result["sent"] = self.sent.to_payload_list() self.result["proposed"] = self.proposed.to_ansible_config() - + # Always include current state self.result["current"] = self.existing.to_ansible_config() - + # Module Exit Methods - + def fail_json(self, msg: str, **kwargs) -> None: """ Exit module with failure. @@ -343,26 +307,23 @@ def fail_json(self, msg: str, **kwargs) -> None: self.add_logs_and_outputs() self.result.update(**kwargs) self.module.fail_json(msg=msg, **self.result) - + def exit_json(self, **kwargs) -> None: """ Exit module successfully. """ self.add_logs_and_outputs() - + # Add diff if module supports it if self.module._diff and self.result.get("changed") is True: try: # Use diff-safe dicts (excludes sensitive fields) before = [item.to_diff_dict() for item in self.previous] after = [item.to_diff_dict() for item in self.existing] - - self.result["diff"] = dict( - before=before, - after=after - ) + + self.result["diff"] = dict(before=before, after=after) except Exception: pass # Don't fail on diff generation - + self.result.update(**kwargs) self.module.exit_json(**self.result) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 924ea4b0..f9a63fa1 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,11 +8,11 @@ __metaclass__ = type -from ..models.base import NDBaseModel -from ..nd import NDModule -from ..api_endpoints.base import NDBaseSmartEndpoint -from typing import Dict, List, Any, Union, ClassVar, Type, Optional from pydantic import BaseModel, ConfigDict +from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] @@ -20,7 +20,6 @@ # TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): - model_config = ConfigDict( use_enum_values=True, validate_assignment=True, diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 46a4ea07..04f7707f 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -8,12 +8,12 @@ __metaclass__ = type -from .base import NDBaseOrchestrator -from ..models.base import NDBaseModel -from ..models.local_user import LocalUserModel from typing import Dict, List, Any, Union, Type -from ..api_endpoints.base import NDBaseSmartEndpoint -from ..api_endpoints.local_user import ( +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, EpApiV1InfraAaaLocalUsersDelete, @@ -23,8 +23,8 @@ ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] -class LocalUserOrchestrator(NDBaseOrchestrator): +class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index a7c1d3dc..0bf7cfc8 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -37,22 +37,22 @@ def issubset(subset: Any, superset: Any) -> bool: """Check if subset is contained in superset.""" if type(subset) is not type(superset): return False - + if not isinstance(subset, dict): if isinstance(subset, list): return all(item in superset for item in subset) return subset == superset - + for key, value in subset.items(): if value is None: continue - + if key not in superset: return False - + if not issubset(value, superset[key]): return False - + return True @@ -60,12 +60,12 @@ def issubset(subset: Any, superset: Any) -> bool: def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) - + for key in unwanted_keys: if isinstance(key, str): if key in data: del data[key] - + elif isinstance(key, list) and len(key) > 0: try: parent = data @@ -79,5 +79,5 @@ def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) del parent[key[-1]] except (KeyError, TypeError, IndexError): pass - + return data diff --git a/plugins/modules/nd_api_key.py b/plugins/modules/nd_api_key.py index c00428a9..1a3e4823 100644 --- a/plugins/modules/nd_api_key.py +++ b/plugins/modules/nd_api_key.py @@ -146,7 +146,6 @@ def main(): nd.existing = nd.previous = nd.query_objs(path, key="apiKeys") if state == "present": - if len(api_key_name) > 32 or len(api_key_name) < 1: nd.fail_json("A length of 1 to 32 characters is allowed.") elif re.search(r"[^a-zA-Z0-9_.-]", api_key_name): diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index b6acee72..a6972c07 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -175,15 +175,10 @@ """ from ansible.module_utils.basic import AnsibleModule -# TODO: To be replaced with: -# from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec -# from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine -# from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator -from ..module_utils.nd import nd_argument_spec -from ..module_utils.nd_state_machine import NDStateMachine -from ..module_utils.models.local_user import LocalUserModel -from ..module_utils.orchestrators.local_user import LocalUserOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec +from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine +from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator def main(): @@ -196,17 +191,17 @@ def main(): ) try: - # Create NDNetworkResourceModule with LocalUserModel - nd_module = NDStateMachine( + # Initialize StateMachine + nd_state_machine = NDStateMachine( module=module, model_orchestrator=LocalUserOrchestrator, ) - + # Manage state - nd_module.manage_state() + nd_state_machine.manage_state() + + nd_state_machine.exit_json() - nd_module.exit_json() - except Exception as e: module.fail_json(msg=f"Module execution failed: {str(e)}") From 08b011b5b8e4a855e75eb21ab518bf9c0c22acfd Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:02:02 -0500 Subject: [PATCH 020/131] [ignore] Clean code for sanity purposes (except Pydantic import checks. --- plugins/module_utils/api_endpoints/base.py | 2 +- plugins/module_utils/api_endpoints/enums.py | 5 +++++ plugins/module_utils/api_endpoints/local_user.py | 4 ++-- plugins/module_utils/api_endpoints/mixins.py | 2 +- plugins/module_utils/models/base.py | 5 +++-- plugins/module_utils/models/local_user.py | 2 -- plugins/module_utils/nd_config_collection.py | 3 +-- plugins/module_utils/nd_state_machine.py | 1 - plugins/module_utils/orchestrators/base.py | 6 ++---- plugins/module_utils/orchestrators/local_user.py | 8 +++----- plugins/module_utils/orchestrators/types.py | 13 +++++++++++++ plugins/module_utils/types.py | 1 - 12 files changed, 31 insertions(+), 21 deletions(-) create mode 100644 plugins/module_utils/orchestrators/types.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 954c1f6a..8428ffe8 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -11,7 +11,7 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import Final, Union, Tuple, Any +from typing import Final from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py index ced62ba7..18a7f5eb 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/api_endpoints/enums.py @@ -7,6 +7,11 @@ """ Enums used in api_endpoints. """ + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + from enum import Enum diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 72639495..890b38e7 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -13,9 +13,9 @@ from __future__ import absolute_import, division, print_function -__metaclass__ = type # pylint: disable=invalid-name +__metaclass__ = type -from typing import Literal, Union, Tuple, Any, Final +from typing import Literal, Final from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py index 9516c9ce..56cdcfc5 100644 --- a/plugins/module_utils/api_endpoints/mixins.py +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -15,7 +15,7 @@ __metaclass__ = type # pylint: disable=invalid-name -from typing import TYPE_CHECKING, Optional +from typing import Optional from pydantic import BaseModel, Field diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 94fb9cc5..8cdcc765 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -8,7 +8,7 @@ __metaclass__ = type -from abc import ABC, abstractmethod +from abc import ABC from pydantic import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional from typing_extensions import Self @@ -144,7 +144,8 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? + # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index e759a6fb..fe2f2bb5 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,9 +8,7 @@ __metaclass__ = type -from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal -from typing_extensions import Self from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 1aa0e2ec..5fd9886d 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -8,13 +8,12 @@ __metaclass__ = type -from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable +from typing import TypeVar, Generic, Optional, List, Dict, Any, Literal from copy import deepcopy from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey - # Type aliases ModelType = TypeVar("ModelType", bound=NDBaseModel) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index be5849d4..923f0b69 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,7 +8,6 @@ __metaclass__ = type -from copy import deepcopy from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError from ansible.module_utils.basic import AnsibleModule diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index f9a63fa1..4df0797d 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -9,13 +9,11 @@ __metaclass__ = type from pydantic import BaseModel, ConfigDict -from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint - - -ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType # TODO: Revisit naming them "Orchestrator" diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 04f7707f..d30b29f8 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -8,11 +8,12 @@ __metaclass__ = type -from typing import Dict, List, Any, Union, Type +from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, @@ -21,9 +22,6 @@ ) -ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] - - class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel @@ -33,7 +31,7 @@ class LocalUserOrchestrator(NDBaseOrchestrator): query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet - def query_all(self): + def query_all(self) -> ResponseType: """ Custom query_all action to extract 'localusers' from response. """ diff --git a/plugins/module_utils/orchestrators/types.py b/plugins/module_utils/orchestrators/types.py new file mode 100644 index 00000000..b721c65b --- /dev/null +++ b/plugins/module_utils/orchestrators/types.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Any, Union, List, Dict + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] diff --git a/plugins/module_utils/types.py b/plugins/module_utils/types.py index 124aedd5..3111a095 100644 --- a/plugins/module_utils/types.py +++ b/plugins/module_utils/types.py @@ -10,5 +10,4 @@ from typing import Any, Union, Tuple - IdentifierKey = Union[str, int, Tuple[Any, ...]] From 8d72e067612f54e866659862be27880a3cb202dc Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:18:35 -0500 Subject: [PATCH 021/131] [ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues. --- .../{api_endpoints => endpoints}/base.py | 2 +- .../{api_endpoints => endpoints}/enums.py | 2 +- .../{api_endpoints => endpoints}/mixins.py | 2 +- .../v1/infra_aaa_local_users.py} | 32 +++++++++---------- plugins/module_utils/nd_state_machine.py | 1 + plugins/module_utils/orchestrators/base.py | 4 +-- .../module_utils/orchestrators/local_user.py | 24 +++++++------- 7 files changed, 34 insertions(+), 33 deletions(-) rename plugins/module_utils/{api_endpoints => endpoints}/base.py (97%) rename plugins/module_utils/{api_endpoints => endpoints}/enums.py (97%) rename plugins/module_utils/{api_endpoints => endpoints}/mixins.py (93%) rename plugins/module_utils/{api_endpoints/local_user.py => endpoints/v1/infra_aaa_local_users.py} (74%) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/endpoints/base.py similarity index 97% rename from plugins/module_utils/api_endpoints/base.py rename to plugins/module_utils/endpoints/base.py index 8428ffe8..d188b16a 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -12,7 +12,7 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import Final -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import IdentifierKey # TODO: Rename it to APIEndpoint diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/endpoints/enums.py similarity index 97% rename from plugins/module_utils/api_endpoints/enums.py rename to plugins/module_utils/endpoints/enums.py index 18a7f5eb..802b8fe8 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/endpoints/enums.py @@ -5,7 +5,7 @@ # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) """ -Enums used in api_endpoints. +Enums used in endpoints. """ from __future__ import absolute_import, division, print_function diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/endpoints/mixins.py similarity index 93% rename from plugins/module_utils/api_endpoints/mixins.py rename to plugins/module_utils/endpoints/mixins.py index 56cdcfc5..36c83d1b 100644 --- a/plugins/module_utils/api_endpoints/mixins.py +++ b/plugins/module_utils/endpoints/mixins.py @@ -13,7 +13,7 @@ from __future__ import absolute_import, division, print_function -__metaclass__ = type # pylint: disable=invalid-name +__metaclass__ = type from typing import Optional from pydantic import BaseModel, Field diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py similarity index 74% rename from plugins/module_utils/api_endpoints/local_user.py rename to plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 890b38e7..1e1d7823 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -16,14 +16,14 @@ __metaclass__ = type from typing import Literal, Final -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): +class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ Base class for ND Infra AAA Local Users endpoints. @@ -53,7 +53,7 @@ def set_identifiers(self, identifier: IdentifierKey = None): self.login_id = identifier -class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersGet(_V1InfraAaaLocalUsersBase): """ # Summary @@ -74,8 +74,8 @@ class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): - GET """ - class_name: Literal["EpApiV1InfraAaaLocalUsersGet"] = Field( - default="EpApiV1InfraAaaLocalUsersGet", + class_name: Literal["V1InfraAaaLocalUsersGet"] = Field( + default="V1InfraAaaLocalUsersGet", description="Class name for backward compatibility", frozen=True, ) @@ -86,7 +86,7 @@ def verb(self) -> VerbEnum: return VerbEnum.GET -class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersPost(_V1InfraAaaLocalUsersBase): """ # Summary @@ -105,8 +105,8 @@ class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): - POST """ - class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( - default="EpApiV1InfraAaaLocalUsersPost", + class_name: Literal["V1InfraAaaLocalUsersPost"] = Field( + default="V1InfraAaaLocalUsersPost", description="Class name for backward compatibility", frozen=True, ) @@ -117,7 +117,7 @@ def verb(self) -> VerbEnum: return VerbEnum.POST -class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersPut(_V1InfraAaaLocalUsersBase): """ # Summary @@ -136,8 +136,8 @@ class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): - PUT """ - class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( - default="EpApiV1InfraAaaLocalUsersPut", + class_name: Literal["V1InfraAaaLocalUsersPut"] = Field( + default="V1InfraAaaLocalUsersPut", description="Class name for backward compatibility", frozen=True, ) @@ -148,7 +148,7 @@ def verb(self) -> VerbEnum: return VerbEnum.PUT -class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersDelete(_V1InfraAaaLocalUsersBase): """ # Summary @@ -167,8 +167,8 @@ class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): - DELETE """ - class_name: Literal["EpApiV1InfraAaaLocalUsersDelete"] = Field( - default="EpApiV1InfraAaaLocalUsersDelete", + class_name: Literal["V1InfraAaaLocalUsersDelete"] = Field( + default="V1InfraAaaLocalUsersDelete", description="Class name for backward compatibility", frozen=True, ) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 923f0b69..ae0a67ce 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -125,6 +125,7 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier + response = {} identifier = proposed_item.get_identifier_value() existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 4df0797d..8c84de8e 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,8 +12,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType # TODO: Revisit naming them "Orchestrator" diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index d30b29f8..bea4a486 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,24 +12,24 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( - EpApiV1InfraAaaLocalUsersPost, - EpApiV1InfraAaaLocalUsersPut, - EpApiV1InfraAaaLocalUsersDelete, - EpApiV1InfraAaaLocalUsersGet, +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( + V1InfraAaaLocalUsersPost, + V1InfraAaaLocalUsersPut, + V1InfraAaaLocalUsersDelete, + V1InfraAaaLocalUsersGet, ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete + query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet def query_all(self) -> ResponseType: """ From 88c0bdfa3b13b58027c8402c4e378322de677de7 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 15:06:04 -0500 Subject: [PATCH 022/131] [ignore] Remove NDModule inheritence from NDStateMachine. Add first iteration of (Mock Pydantic objects/methods) to pass sanity checks for Pydantic importation. --- plugins/module_utils/nd_state_machine.py | 6 +- plugins/module_utils/pydantic_compat.py | 200 +++++++++++++++++++++++ 2 files changed, 203 insertions(+), 3 deletions(-) create mode 100644 plugins/module_utils/pydantic_compat.py diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index ae0a67ce..e68010fb 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -19,8 +19,8 @@ # TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) -# TODO: -class NDStateMachine(NDModule): +# TODO: Remove inheritence from NDModule (Top Priority) +class NDStateMachine: """ Generic Network Resource Module for Nexus Dashboard. """ @@ -31,7 +31,7 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest """ # TODO: Revisit Module initialization and configuration with rest_send self.module = module - self.nd_module = NDModule(module) + self.nd_module = NDModule(self.module) # Operation tracking self.nd_logs: List[Dict[str, Any]] = [] diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py new file mode 100644 index 00000000..f1d90fe3 --- /dev/null +++ b/plugins/module_utils/pydantic_compat.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +# pylint: disable=too-few-public-methods +""" +Pydantic compatibility layer. + +This module provides a single location for Pydantic imports with fallback +implementations when Pydantic is not available. This ensures consistent +behavior across all modules and follows the DRY principle. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +import traceback +from typing import TYPE_CHECKING, Any, Callable, Union + +if TYPE_CHECKING: + # Type checkers always see the real Pydantic types + from pydantic import ( + AfterValidator, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + PydanticExperimentalWarning, + StrictBool, + ValidationError, + field_serializer, + model_serializer, + field_validator, + model_validator, + validator, + ) +else: + # Runtime: try to import, with fallback + try: + from pydantic import ( + AfterValidator, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + PydanticExperimentalWarning, + StrictBool, + ValidationError, + field_serializer, + model_serializer, + field_validator, + model_validator, + validator, + ) + except ImportError: + HAS_PYDANTIC = False # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR: Union[str, None] = traceback.format_exc() # pylint: disable=invalid-name + + # Fallback: Minimal BaseModel replacement + class BaseModel: + """Fallback BaseModel when pydantic is not available.""" + + model_config = {"validate_assignment": False, "use_enum_values": False} + + def __init__(self, **kwargs): + """Accept keyword arguments and set them as attributes.""" + for key, value in kwargs.items(): + setattr(self, key, value) + + def model_dump(self, exclude_none: bool = False, exclude_defaults: bool = False) -> dict: # pylint: disable=unused-argument + """Return a dictionary of field names and values. + + Args: + exclude_none: If True, exclude fields with None values + exclude_defaults: Accepted for API compatibility but not implemented in fallback + """ + result = {} + for key, value in self.__dict__.items(): + if exclude_none and value is None: + continue + result[key] = value + return result + + # Fallback: ConfigDict that does nothing + def ConfigDict(**kwargs) -> dict: # pylint: disable=unused-argument,invalid-name + """Pydantic ConfigDict fallback when pydantic is not available.""" + return kwargs + + # Fallback: Field that does nothing + def Field(**kwargs) -> Any: # pylint: disable=unused-argument,invalid-name + """Pydantic Field fallback when pydantic is not available.""" + if "default_factory" in kwargs: + return kwargs["default_factory"]() + return kwargs.get("default") + + # Fallback: field_serializer decorator that does nothing + def field_serializer(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic field_serializer fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: model_serializer decorator that does nothing + def model_serializer(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic model_serializer fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: field_validator decorator that does nothing + def field_validator(*args, **kwargs) -> Callable[..., Any]: # pylint: disable=unused-argument,invalid-name + """Pydantic field_validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: AfterValidator that returns the function unchanged + def AfterValidator(func): # pylint: disable=invalid-name + """Pydantic AfterValidator fallback when pydantic is not available.""" + return func + + # Fallback: BeforeValidator that returns the function unchanged + def BeforeValidator(func): # pylint: disable=invalid-name + """Pydantic BeforeValidator fallback when pydantic is not available.""" + return func + + # Fallback: PydanticExperimentalWarning + PydanticExperimentalWarning = Warning + + # Fallback: StrictBool + StrictBool = bool + + # Fallback: ValidationError + class ValidationError(Exception): + """ + Pydantic ValidationError fallback when pydantic is not available. + """ + + def __init__(self, message="A custom error occurred."): + self.message = message + super().__init__(self.message) + + def __str__(self): + return f"ValidationError: {self.message}" + + # Fallback: model_validator decorator that does nothing + def model_validator(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic model_validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: validator decorator that does nothing + def validator(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + else: + HAS_PYDANTIC = True # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name + +# Set HAS_PYDANTIC for when TYPE_CHECKING is True +if TYPE_CHECKING: + HAS_PYDANTIC = True # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name + +__all__ = [ + "AfterValidator", + "BaseModel", + "BeforeValidator", + "ConfigDict", + "Field", + "HAS_PYDANTIC", + "PYDANTIC_IMPORT_ERROR", + "PydanticExperimentalWarning", + "StrictBool", + "ValidationError", + "field_serializer", + "model_serializer", + "field_validator", + "model_validator", + "validator", +] From 25b621a7c8e688cbfce176549c89ea273b8d8d7e Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 17:19:47 -0500 Subject: [PATCH 023/131] [ignore] Rename NDBaseSmartEndpoint to NDBaseEndpoint. Fix importation issues. --- plugins/module_utils/endpoints/base.py | 4 ++-- .../endpoints/v1/infra_aaa_local_users.py | 4 ++-- plugins/module_utils/orchestrators/base.py | 14 +++++++------- plugins/module_utils/orchestrators/local_user.py | 14 +++++++------- plugins/modules/nd_local_user.py | 5 ++++- 5 files changed, 22 insertions(+), 19 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index d188b16a..c097dcf1 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -12,12 +12,12 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import Final -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey # TODO: Rename it to APIEndpoint # NOTE: This is a very minimalist endpoint package -> needs to be enhanced -class NDBaseSmartEndpoint(BaseModel, ABC): +class NDBaseEndpoint(BaseModel, ABC): # TODO: maybe to be modified in the future model_config = ConfigDict(validate_assignment=True) diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 1e1d7823..0008b188 100644 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -18,12 +18,12 @@ from typing import Literal, Final from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath from pydantic import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): +class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseEndpoint): """ Base class for ND Infra AAA Local Users endpoints. diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 8c84de8e..b0e34b61 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,8 +12,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType # TODO: Revisit naming them "Orchestrator" @@ -28,11 +28,11 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - create_endpoint: Type[NDBaseSmartEndpoint] - update_endpoint: Type[NDBaseSmartEndpoint] - delete_endpoint: Type[NDBaseSmartEndpoint] - query_one_endpoint: Type[NDBaseSmartEndpoint] - query_all_endpoint: Type[NDBaseSmartEndpoint] + create_endpoint: Type[NDBaseEndpoint] + update_endpoint: Type[NDBaseEndpoint] + delete_endpoint: Type[NDBaseEndpoint] + query_one_endpoint: Type[NDBaseEndpoint] + query_all_endpoint: Type[NDBaseEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index bea4a486..5e52a00b 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,8 +12,8 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( V1InfraAaaLocalUsersPost, V1InfraAaaLocalUsersPut, @@ -25,11 +25,11 @@ class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet + create_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersDelete + query_one_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet def query_all(self) -> ResponseType: """ diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index a6972c07..6f296065 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -198,8 +198,11 @@ def main(): ) # Manage state + # TODO: return module output class object: + # output = nd_state_machine.manage_state() + # module.exit_json(**output) nd_state_machine.manage_state() - + nd_state_machine.exit_json() except Exception as e: From 6e81ee0f97a0ebaf84b1a811d28ba27c32f909ed Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 4 Mar 2026 11:12:27 -0500 Subject: [PATCH 024/131] [ignore] Replace all pydantic imports with pydantic_compat. Fix sanity issues. --- plugins/module_utils/constants.py | 4 ++++ plugins/module_utils/endpoints/base.py | 2 +- plugins/module_utils/endpoints/mixins.py | 2 +- .../endpoints/v1/infra_aaa_local_users.py | 2 +- plugins/module_utils/models/base.py | 9 ++++----- plugins/module_utils/models/local_user.py | 19 ++++++++++++------ plugins/module_utils/nd_state_machine.py | 2 +- plugins/module_utils/orchestrators/base.py | 2 +- plugins/module_utils/pydantic_compat.py | 20 ++++++++++++++++++- plugins/modules/nd_local_user.py | 3 ++- tests/config.yml | 3 +++ 11 files changed, 50 insertions(+), 18 deletions(-) create mode 100644 tests/config.yml diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index afa0a2b0..563041a0 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -16,6 +16,7 @@ class NDConstantMapping(Dict): def __init__(self, data: Dict): + self.data = data self.new_dict = deepcopy(data) for k, v in data.items(): self.new_dict[v] = k @@ -24,6 +25,9 @@ def __init__(self, data: Dict): def get_dict(self): return self.new_dict + def get_original_data(self): + return list(self.data.keys()) + OBJECT_TYPES = { "tenant": "OST_TENANT", diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index c097dcf1..c258ea07 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -10,7 +10,7 @@ __metaclass__ = type from abc import ABC, abstractmethod -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import Final from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey diff --git a/plugins/module_utils/endpoints/mixins.py b/plugins/module_utils/endpoints/mixins.py index 36c83d1b..28ece4c2 100644 --- a/plugins/module_utils/endpoints/mixins.py +++ b/plugins/module_utils/endpoints/mixins.py @@ -16,7 +16,7 @@ __metaclass__ = type from typing import Optional -from pydantic import BaseModel, Field +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, Field class LoginIdMixin(BaseModel): diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 0008b188..d1013e24 100644 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -19,7 +19,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath -from pydantic import Field +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 8cdcc765..67ce5de0 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,9 +9,8 @@ __metaclass__ = type from abc import ABC -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional -from typing_extensions import Self # TODO: Revisit identifiers strategy (low priority) @@ -82,11 +81,11 @@ def to_config(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=False, exclude_none=True, **kwargs) @classmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + def from_response(cls, response: Dict[str, Any], **kwargs) -> "NDBaseModel": return cls.model_validate(response, by_alias=True, **kwargs) @classmethod - def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: + def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> "NDBaseModel": return cls.model_validate(ansible_config, by_name=True, **kwargs) # TODO: Revisit this function when revisiting identifier strategy (low priority) @@ -146,7 +145,7 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace - def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: + def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index fe2f2bb5..0575c1be 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -9,7 +9,15 @@ __metaclass__ = type from typing import List, Dict, Any, Optional, ClassVar, Literal -from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ( + Field, + SecretStr, + model_serializer, + field_serializer, + field_validator, + model_validator, + computed_field, +) from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping @@ -24,7 +32,7 @@ "approver": "approver", "designer": "designer", } -).get_dict() +) class LocalUserSecurityDomainModel(NDNestedModel): @@ -38,7 +46,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): @model_serializer() def serialize_model(self) -> Dict: - return {self.name: {"roles": [USER_ROLES_MAPPING.get(role, role) for role in (self.roles or [])]}} + return {self.name: {"roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in (self.roles or [])]}} # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity @@ -145,7 +153,7 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: domains_list = [] for domain_name, domain_data in domains_dict.items(): - domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])]}) + domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in domain_data.get("roles", [])]}) return domains_list @@ -174,7 +182,7 @@ def get_argument_spec(cls) -> Dict: elements="dict", options=dict( name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), - roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + roles=dict(type="list", elements="str", choices=USER_ROLES_MAPPING.get_original_data()), ), aliases=["domains"], ), @@ -182,6 +190,5 @@ def get_argument_spec(cls) -> Dict: remote_user_authorization=dict(type="bool"), ), ), - override_exceptions=dict(type="list", elements="str"), state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), ) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index e68010fb..81d6a966 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import Optional, List, Dict, Any, Literal, Type -from pydantic import ValidationError +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index b0e34b61..1a3b1921 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,7 +8,7 @@ __metaclass__ = type -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py index f1d90fe3..e8924cd2 100644 --- a/plugins/module_utils/pydantic_compat.py +++ b/plugins/module_utils/pydantic_compat.py @@ -32,12 +32,14 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, model_serializer, field_validator, model_validator, validator, + computed_field, ) else: # Runtime: try to import, with fallback @@ -50,12 +52,14 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, model_serializer, field_validator, model_validator, validator, + computed_field, ) except ImportError: HAS_PYDANTIC = False # pylint: disable=invalid-name @@ -106,7 +110,7 @@ def decorator(func): return func return decorator - + # Fallback: model_serializer decorator that does nothing def model_serializer(*args, **kwargs): # pylint: disable=unused-argument """Pydantic model_serializer fallback when pydantic is not available.""" @@ -125,6 +129,15 @@ def decorator(func): return decorator + # Fallback: computed_field decorator that does nothing + def computed_field(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic computed_field fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + # Fallback: AfterValidator that returns the function unchanged def AfterValidator(func): # pylint: disable=invalid-name """Pydantic AfterValidator fallback when pydantic is not available.""" @@ -141,6 +154,9 @@ def BeforeValidator(func): # pylint: disable=invalid-name # Fallback: StrictBool StrictBool = bool + # Fallback: SecretStr + SecretStr = str + # Fallback: ValidationError class ValidationError(Exception): """ @@ -191,10 +207,12 @@ def decorator(func): "PYDANTIC_IMPORT_ERROR", "PydanticExperimentalWarning", "StrictBool", + "SecretStr", "ValidationError", "field_serializer", "model_serializer", "field_validator", "model_validator", "validator", + "computed_field", ] diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 6f296065..65f2e464 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -27,6 +27,7 @@ - The list of the local users to configure. type: list elements: dict + required: True suboptions: email: description: @@ -202,7 +203,7 @@ def main(): # output = nd_state_machine.manage_state() # module.exit_json(**output) nd_state_machine.manage_state() - + nd_state_machine.exit_json() except Exception as e: diff --git a/tests/config.yml b/tests/config.yml new file mode 100644 index 00000000..7cf024ab --- /dev/null +++ b/tests/config.yml @@ -0,0 +1,3 @@ +modules: + # Limit Python version to control node Python versions + python_requires: controller From 520625b73f65718b58881a228a7a3b569889a43b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 6 Mar 2026 13:35:38 -0500 Subject: [PATCH 025/131] [ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly --- plugins/module_utils/endpoints/base.py | 2 - plugins/module_utils/models/base.py | 7 +- plugins/module_utils/models/local_user.py | 3 +- plugins/module_utils/nd_config_collection.py | 3 +- plugins/module_utils/nd_output.py | 70 +++++++ plugins/module_utils/nd_state_machine.py | 186 +++---------------- plugins/module_utils/orchestrators/base.py | 3 - plugins/module_utils/utils.py | 2 +- plugins/modules/nd_local_user.py | 12 +- tests/integration/inventory.networking | 10 +- 10 files changed, 112 insertions(+), 186 deletions(-) create mode 100644 plugins/module_utils/nd_output.py diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index c258ea07..5400516f 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -15,10 +15,8 @@ from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# TODO: Rename it to APIEndpoint # NOTE: This is a very minimalist endpoint package -> needs to be enhanced class NDBaseEndpoint(BaseModel, ABC): - # TODO: maybe to be modified in the future model_config = ConfigDict(validate_assignment=True) # TODO: to remove diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 67ce5de0..14c04945 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -143,12 +143,11 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? - # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": if not isinstance(other_model, type(self)): - # TODO: Change error message - return TypeError("models are not of the same type.") + return TypeError( + f"NDBaseModel.merge method requires models of the same type. self of type {type(self)} and other_model of type {type(other_model)}" + ) for field, value in other_model: if value is None: diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 0575c1be..e2e7faf8 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -71,7 +71,6 @@ class LocalUserModel(NDBaseModel): # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec - # TODO: use extra for generating argument_spec (low priority) login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") @@ -161,7 +160,7 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # -- Extra -- - # TODO: to generate from Fields (low priority) + # TODO: to generate from Fields: use extra for generating argument_spec (low priority) @classmethod def get_argument_spec(cls) -> Dict: return dict( diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 5fd9886d..1f751822 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -37,7 +37,6 @@ def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = No for item in items: self.add(item) - # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ Extract identifier key from item. @@ -144,7 +143,7 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha if existing is None: return "new" - # TODO: make a diff class level method for NDBaseModel + # TODO: make a diff class level method for NDBaseModel (high priority) existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() is_subset = issubset(new_data, existing_data) diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py new file mode 100644 index 00000000..027592df --- /dev/null +++ b/plugins/module_utils/nd_output.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Dict, Any, Optional, List, Union +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection + + +class NDOutput: + def __init__(self, module: AnsibleModule): + self._output_level: str = module.params.get("output_level", "normal") + self._changed: bool = False + self._before: Union[NDConfigCollection, List] = [] + self._after: Union[NDConfigCollection, List] = [] + self._diff: Union[NDConfigCollection, List] = [] + self._proposed: Union[NDConfigCollection, List] = [] + self._logs: List = [] + self._extra: Dict[str, Any] = {} + + def format(self, **kwargs): + if isinstance(self._before, NDConfigCollection) and isinstance(self._after, NDConfigCollection) and self._before.get_diff_collection(self._after): + self._changed = True + + output = { + "output_level": self._output_level, + "changed": self._changed, + "after": self._after.to_ansible_config() if isinstance(self._after, NDConfigCollection) else self._after, + "before": self._before.to_ansible_config() if isinstance(self._before, NDConfigCollection) else self._before, + "diff": self._diff.to_ansible_config() if isinstance(self._diff, NDConfigCollection) else self._diff, + } + + if self._output_level in ("debug", "info"): + output["proposed"] = self._proposed.to_ansible_config() if isinstance(self._proposed, NDConfigCollection) else self._proposed + if self._output_level == "debug": + output["logs"] = "Not yet implemented" + + if self._extra: + output.update(self._extra) + + output.update(**kwargs) + + return output + + def assign( + self, + after: Optional[NDConfigCollection] = None, + before: Optional[NDConfigCollection] = None, + diff: Optional[NDConfigCollection] = None, + proposed: Optional[NDConfigCollection] = None, + logs: Optional[List] = None, + **kwargs + ) -> None: + if isinstance(after, NDConfigCollection): + self._after = after + if isinstance(before, NDConfigCollection): + self._before = before + if isinstance(diff, NDConfigCollection): + self._diff = diff + if isinstance(proposed, NDConfigCollection): + self._proposed = proposed + if isinstance(logs, List): + self._logs = logs + self._extra.update(**kwargs) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 81d6a966..4146926e 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,18 +8,15 @@ __metaclass__ = type -from typing import Optional, List, Dict, Any, Literal, Type +from typing import List, Dict, Any, Type from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator -from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) -# TODO: Remove inheritence from NDModule (Top Priority) class NDStateMachine: """ Generic Network Resource Module for Nexus Dashboard. @@ -34,29 +31,27 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_module = NDModule(self.module) # Operation tracking - self.nd_logs: List[Dict[str, Any]] = [] - self.result: Dict[str, Any] = {"changed": False} + self.output = NDOutput(self.module) # Configuration self.model_orchestrator = model_orchestrator(sender=self.nd_module) self.model_class = self.model_orchestrator.model_class - # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) + # TODO: Revisit these class variables when udpating Module intialization and configuration (low priority) self.state = self.module.params["state"] - self.ansible_config = self.module.params.get("config", []) # Initialize collections - # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) self.nd_config_collection = NDConfigCollection[self.model_class] try: - init_all_data = self.model_orchestrator.query_all() - - self.existing = self.nd_config_collection.from_api_response(response_data=init_all_data, model_class=self.model_class) - # Save previous state - self.previous = self.existing.copy() - self.proposed = self.nd_config_collection(model_class=self.model_class) + response_data = self.model_orchestrator.query_all() + # State of configuration objects in ND before change execution + self.before = self.nd_config_collection.from_api_response(response_data=response_data, model_class=self.model_class) + # State of current configuration objects in ND during change execution + self.existing = self.before.copy() + # Ongoing collection of configuration objects that were changed self.sent = self.nd_config_collection(model_class=self.model_class) - - for config in self.ansible_config: + # Collection of configuration objects given by user + self.proposed = self.nd_config_collection(model_class=self.model_class) + for config in self.module.params.get("config", []): try: # Parse config into model item = self.model_class.from_config(config) @@ -64,42 +59,11 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest except ValidationError as e: self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) return - + self.output.assign(after=self.existing, before=self.before, proposed=self.proposed) except Exception as e: - self.fail_json(msg=f"Initialization failed: {str(e)}", error=str(e)) - - # Logging - # NOTE: format log placeholder - # TODO: use a proper logger (low priority) - def format_log( - self, - identifier: IdentifierKey, - operation_status: Literal["no_change", "created", "updated", "deleted"], - before: Optional[Dict[str, Any]] = None, - after: Optional[Dict[str, Any]] = None, - payload: Optional[Dict[str, Any]] = None, - ) -> None: - """ - Create and append a log entry. - """ - log_entry = { - "identifier": identifier, - "operation_status": operation_status, - "before": before, - "after": after, - "payload": payload, - } - - # Add HTTP details if not in check mode - if not self.module.check_mode and self.nd_module.url is not None: - log_entry.update( - {"method": self.nd_module.method, "response": self.nd_module.response, "status": self.nd_module.status, "url": self.nd_module.url} - ) - - self.nd_logs.append(log_entry) + self.fail_json(msg=f"NDStateMachine initialization failed: {str(e)}", error=str(e)) # State Management (core function) - # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) def manage_state(self) -> None: """ Manage state according to desired configuration. @@ -114,7 +78,6 @@ def manage_state(self) -> None: elif self.state == "deleted": self._manage_delete_state() - # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) # TODO: boil down an Exception instead of using `fail_json` method else: self.fail_json(msg=f"Invalid state: {self.state}") @@ -125,28 +88,19 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier - response = {} identifier = proposed_item.get_identifier_value() - existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) # No changes needed if diff_status == "no_diff": - self.format_log( - identifier=identifier, - operation_status="no_change", - before=existing_config, - after=existing_config, - ) continue # Prepare final config based on state if self.state == "merged": # Merge with existing - merged_item = self.existing.merge(proposed_item) - final_item = merged_item + final_item = self.existing.merge(proposed_item) else: # Replace or create if diff_status == "changed": @@ -158,34 +112,18 @@ def _manage_create_update_state(self) -> None: # Execute API operation if diff_status == "changed": if not self.module.check_mode: - response = self.model_orchestrator.update(final_item) + self.model_orchestrator.update(final_item) self.sent.add(final_item) - operation_status = "updated" elif diff_status == "new": if not self.module.check_mode: - response = self.model_orchestrator.create(final_item) + self.model_orchestrator.create(final_item) self.sent.add(final_item) - operation_status = "created" # Log operation - self.format_log( - identifier=identifier, - operation_status=operation_status, - before=existing_config, - after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), - payload=final_item.to_payload(), - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to process {identifier}: {e}" - - self.format_log( - identifier=identifier, - operation_status="no_change", - before=existing_config, - after=existing_config, - ) - if not self.module.params.get("ignore_errors", False): self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return @@ -194,7 +132,7 @@ def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ - diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + diff_identifiers = self.before.get_diff_identifiers(self.proposed) for identifier in diff_identifiers: try: @@ -204,18 +142,13 @@ def _manage_override_deletions(self) -> None: # Execute delete if not self.module.check_mode: - response = self.model_orchestrator.delete(existing_item) + self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) # Log deletion - self.format_log( - identifier=identifier, - operation_status="deleted", - before=existing_item.to_config(), - after={}, - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" @@ -232,29 +165,17 @@ def _manage_delete_state(self) -> None: existing_item = self.existing.get(identifier) if not existing_item: - # Already deleted or doesn't exist - self.format_log( - identifier=identifier, - operation_status="no_change", - before={}, - after={}, - ) continue # Execute delete if not self.module.check_mode: - response = self.model_orchestrator.delete(existing_item) + self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) # Log deletion - self.format_log( - identifier=identifier, - operation_status="deleted", - before=existing_item.to_config(), - after={}, - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" @@ -263,67 +184,10 @@ def _manage_delete_state(self) -> None: self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - # Output Formatting - # TODO: move to separate Class (results) -> align it with rest_send PR - # TODO: return a defined ordered list of config (for integration test) - def add_logs_and_outputs(self) -> None: - """Add logs and outputs to module result based on output_level.""" - output_level = self.module.params.get("output_level", "normal") - state = self.module.params.get("state") - - # Add previous state for certain states and output levels - if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - if output_level in ("debug", "info"): - self.result["previous"] = self.previous.to_ansible_config() - - # Check if there were changes - if self.previous.get_diff_collection(self.existing): - self.result["changed"] = True - - # Add stdout if present - if self.nd_module.stdout: - self.result["stdout"] = self.nd_module.stdout - - # Add debug information - if output_level == "debug": - self.result["nd_logs"] = self.nd_logs - - if self.nd_module.url is not None: - self.result["httpapi_logs"] = self.nd_module.httpapi_logs - - if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - self.result["sent"] = self.sent.to_payload_list() - self.result["proposed"] = self.proposed.to_ansible_config() - - # Always include current state - self.result["current"] = self.existing.to_ansible_config() - # Module Exit Methods def fail_json(self, msg: str, **kwargs) -> None: """ Exit module with failure. """ - self.add_logs_and_outputs() - self.result.update(**kwargs) - self.module.fail_json(msg=msg, **self.result) - - def exit_json(self, **kwargs) -> None: - """ - Exit module successfully. - """ - self.add_logs_and_outputs() - - # Add diff if module supports it - if self.module._diff and self.result.get("changed") is True: - try: - # Use diff-safe dicts (excludes sensitive fields) - before = [item.to_diff_dict() for item in self.previous] - after = [item.to_diff_dict() for item in self.existing] - - self.result["diff"] = dict(before=before, after=after) - except Exception: - pass # Don't fail on diff generation - - self.result.update(**kwargs) - self.module.exit_json(**self.result) + self.module.fail_json(msg=msg) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 1a3b1921..1a8b4f10 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -16,7 +16,6 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -# TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): model_config = ConfigDict( use_enum_values=True, @@ -40,7 +39,6 @@ class NDBaseOrchestrator(BaseModel): # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) - # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: api_endpoint = self.create_endpoint() @@ -72,7 +70,6 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e - # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: result = self.sender.query_obj(self.query_all_endpoint.path) diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 0bf7cfc8..e09bd499 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -56,7 +56,7 @@ def issubset(subset: Any, superset: Any) -> bool: return True -# TODO: Might not necessary with Pydantic validation and serialization built-in methods +# TODO: Might not necessary with Pydantic validation and serialization built-in methods (see models/local_user) def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 65f2e464..d1d871fe 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -128,10 +128,10 @@ reuse_limitation: 20 time_interval_limitation: 10 security_domains: - name: all - roles: - - observer - - support_engineer + - name: all + roles: + - observer + - support_engineer remote_id_claim: remote_user remote_user_authorization: true state: merged @@ -204,10 +204,10 @@ def main(): # module.exit_json(**output) nd_state_machine.manage_state() - nd_state_machine.exit_json() + module.exit_json(**nd_state_machine.output.format()) except Exception as e: - module.fail_json(msg=f"Module execution failed: {str(e)}") + module.fail_json(msg=f"Module execution failed: {str(e)}", **nd_state_machine.output.format()) if __name__ == "__main__": diff --git a/tests/integration/inventory.networking b/tests/integration/inventory.networking index 6b37d8f3..2aa818d7 100644 --- a/tests/integration/inventory.networking +++ b/tests/integration/inventory.networking @@ -1,15 +1,15 @@ [nd] -nd ansible_host= +nd-test ansible_host=10.48.161.120 [nd:vars] ansible_connection=ansible.netcommon.httpapi -ansible_python_interpreter=/usr/bin/python3.9 +ansible_python_interpreter=/usr/bin/python3.12 ansible_network_os=cisco.nd.nd ansible_httpapi_validate_certs=False ansible_httpapi_use_ssl=True ansible_httpapi_use_proxy=True -ansible_user=ansible_github_ci -ansible_password= +ansible_user=admin +ansible_password=C1sco123 insights_group= site_name= site_host= @@ -28,4 +28,4 @@ external_management_service_ip= external_data_service_ip= data_ip= data_gateway= -service_package_host=173.36.219.254 +service_package_host=173.36.219.254 From 36d57592896995dd7f513cdc0cb5769da0b76faa Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 10 Mar 2026 13:36:50 -0400 Subject: [PATCH 026/131] [ignore] Update NDOutput class. Remove all fail_json dependencies in NDStateMachineand add custom Exception for it in common/exceptions dir. Set json mode for to_diff_dict method in NDBaseModel. --- plugins/module_utils/common/exceptions.py | 17 +++++++++++++ plugins/module_utils/models/base.py | 4 +-- plugins/module_utils/nd_output.py | 7 +++-- plugins/module_utils/nd_state_machine.py | 31 +++++++---------------- 4 files changed, 31 insertions(+), 28 deletions(-) create mode 100644 plugins/module_utils/common/exceptions.py diff --git a/plugins/module_utils/common/exceptions.py b/plugins/module_utils/common/exceptions.py new file mode 100644 index 00000000..f0ae4400 --- /dev/null +++ b/plugins/module_utils/common/exceptions.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +class NDStateMachineError(Exception): + """ + Raised when NDStateMachine is failing. + """ + + pass diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 14c04945..30e5de5e 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -72,7 +72,7 @@ def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ - return self.model_dump(by_alias=True, exclude_none=True, **kwargs) + return self.model_dump(by_alias=True, exclude_none=True, mode="json", **kwargs) def to_config(self, **kwargs) -> Dict[str, Any]: """ @@ -140,7 +140,7 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ - return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) + return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), mode="json", **kwargs) # NOTE: initialize and return a deep copy of the instance? def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py index 027592df..dbfc2cd2 100644 --- a/plugins/module_utils/nd_output.py +++ b/plugins/module_utils/nd_output.py @@ -9,13 +9,12 @@ __metaclass__ = type from typing import Dict, Any, Optional, List, Union -from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection class NDOutput: - def __init__(self, module: AnsibleModule): - self._output_level: str = module.params.get("output_level", "normal") + def __init__(self, output_level: str): + self._output_level: str = output_level self._changed: bool = False self._before: Union[NDConfigCollection, List] = [] self._after: Union[NDConfigCollection, List] = [] @@ -24,7 +23,7 @@ def __init__(self, module: AnsibleModule): self._logs: List = [] self._extra: Dict[str, Any] = {} - def format(self, **kwargs): + def format(self, **kwargs) -> Dict[str, Any]: if isinstance(self._before, NDConfigCollection) and isinstance(self._after, NDConfigCollection) and self._before.get_diff_collection(self._after): self._changed = True diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 4146926e..68901986 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,13 +8,14 @@ __metaclass__ = type -from typing import List, Dict, Any, Type +from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError class NDStateMachine: @@ -31,7 +32,7 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_module = NDModule(self.module) # Operation tracking - self.output = NDOutput(self.module) + self.output = NDOutput(output_level=module.params.get("output_level", "normal")) # Configuration self.model_orchestrator = model_orchestrator(sender=self.nd_module) @@ -57,11 +58,11 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest item = self.model_class.from_config(config) self.proposed.add(item) except ValidationError as e: - self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) + raise NDStateMachineError(f"Invalid configuration. for config {config}: {str(e)}") return self.output.assign(after=self.existing, before=self.before, proposed=self.proposed) except Exception as e: - self.fail_json(msg=f"NDStateMachine initialization failed: {str(e)}", error=str(e)) + raise NDStateMachineError(f"Initialization failed: {str(e)}") # State Management (core function) def manage_state(self) -> None: @@ -78,9 +79,8 @@ def manage_state(self) -> None: elif self.state == "deleted": self._manage_delete_state() - # TODO: boil down an Exception instead of using `fail_json` method else: - self.fail_json(msg=f"Invalid state: {self.state}") + raise NDStateMachineError(f"Invalid state: {self.state}") def _manage_create_update_state(self) -> None: """ @@ -125,8 +125,7 @@ def _manage_create_update_state(self) -> None: except Exception as e: error_msg = f"Failed to process {identifier}: {e}" if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return + raise NDStateMachineError(error_msg) def _manage_override_deletions(self) -> None: """ @@ -152,10 +151,8 @@ def _manage_override_deletions(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return + raise NDStateMachineError(error_msg) def _manage_delete_state(self) -> None: """Handle deleted state.""" @@ -179,15 +176,5 @@ def _manage_delete_state(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return - - # Module Exit Methods - - def fail_json(self, msg: str, **kwargs) -> None: - """ - Exit module with failure. - """ - self.module.fail_json(msg=msg) + raise NDStateMachineError(error_msg) From 25f746ad890a5fd93a7014b8d5e28ad38bc9b67a Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 19 Aug 2025 12:44:17 -0400 Subject: [PATCH 027/131] [minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher. --- plugins/module_utils/constants.py | 14 + plugins/module_utils/nd.py | 79 ++--- plugins/module_utils/nd_config_collection.py | 295 ++++++++++++++++++ plugins/module_utils/nd_network_resources.py | 202 ++++++++++++ plugins/module_utils/utils.py | 32 ++ plugins/modules/nd_local_user.py | 269 ++++++++++++++++ .../targets/nd_local_user/tasks/main.yml | 134 ++++++++ 7 files changed, 974 insertions(+), 51 deletions(-) create mode 100644 plugins/module_utils/nd_config_collection.py create mode 100644 plugins/module_utils/nd_network_resources.py create mode 100644 plugins/module_utils/utils.py create mode 100644 plugins/modules/nd_local_user.py create mode 100644 tests/integration/targets/nd_local_user/tasks/main.yml diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 10de9edf..cbba61b3 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -157,6 +157,11 @@ "restart", "delete", "update", + "merged", + "replaced", + "overridden", + "deleted", + "gathered", ) INTERFACE_FLOW_RULES_TYPES_MAPPING = {"port_channel": "PORTCHANNEL", "physical": "PHYSICAL", "l3out_sub_interface": "L3_SUBIF", "l3out_svi": "SVI"} @@ -170,3 +175,12 @@ ND_SETUP_NODE_DEPLOYMENT_TYPE = {"physical": "cimc", "virtual": "vnode"} BACKUP_TYPE = {"config_only": "config-only", None: "config-only", "": "config-only", "full": "full"} + +USER_ROLES_MAPPING = { + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", +} diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 03ffc85f..5f528bb8 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -18,7 +18,6 @@ from ansible.module_utils.basic import json from ansible.module_utils.basic import env_fallback from ansible.module_utils.six import PY3 -from ansible.module_utils.six.moves import filterfalse from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils._text import to_native, to_text from ansible.module_utils.connection import Connection @@ -73,53 +72,27 @@ def cmp(a, b): def issubset(subset, superset): - """Recurse through nested dictionary and compare entries""" + """Recurse through a nested dictionary and check if it is a subset of another.""" - # Both objects are the same object - if subset is superset: - return True - - # Both objects are identical - if subset == superset: - return True - - # Both objects have a different type - if isinstance(subset) is not isinstance(superset): + if type(subset) is not type(superset): return False + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + for key, value in subset.items(): - # Ignore empty values if value is None: - return True + continue - # Item from subset is missing from superset if key not in superset: return False - # Item has different types in subset and superset - if isinstance(superset.get(key)) is not isinstance(value): - return False + superset_value = superset.get(key) - # Compare if item values are subset - if isinstance(value, dict): - if not issubset(superset.get(key), value): - return False - elif isinstance(value, list): - try: - # NOTE: Fails for lists of dicts - if not set(value) <= set(superset.get(key)): - return False - except TypeError: - # Fall back to exact comparison for lists of dicts - diff = list(filterfalse(lambda i: i in value, superset.get(key))) + list(filterfalse(lambda j: j in superset.get(key), value)) - if diff: - return False - elif isinstance(value, set): - if not value <= superset.get(key): - return False - else: - if not value == superset.get(key): - return False + if not issubset(value, superset_value): + return False return True @@ -212,7 +185,7 @@ def __init__(self, module): self.previous = dict() self.proposed = dict() self.sent = dict() - self.stdout = None + self.stdout = "" # debug output self.has_modified = False @@ -266,8 +239,13 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: +<<<<<<< HEAD if data: info = self.connection.send_request(method, uri, json.dumps(data)) +======= + if data is not None: + info = conn.send_request(method, uri, json.dumps(data)) +>>>>>>> 7c967c3 ([minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher.) else: info = self.connection.send_request(method, uri) self.result["data"] = data @@ -324,6 +302,8 @@ def request( self.fail_json(msg="ND Error: {0}".format(self.error.get("message")), data=data, info=info) self.error = payload if "code" in payload: + if self.status == 404 and ignore_not_found_error: + return {} self.fail_json(msg="ND Error {code}: {message}".format(**payload), data=data, info=info, payload=payload) elif "messages" in payload and len(payload.get("messages")) > 0: self.fail_json(msg="ND Error {code} ({severity}): {message}".format(**payload["messages"][0]), data=data, info=info, payload=payload) @@ -520,30 +500,27 @@ def get_diff(self, unwanted=None): if not self.existing and self.sent: return True - existing = self.existing - sent = self.sent + existing = deepcopy(self.existing) + sent = deepcopy(self.sent) for key in unwanted: if isinstance(key, str): if key in existing: - try: - del existing[key] - except KeyError: - pass - try: - del sent[key] - except KeyError: - pass + del existing[key] + if key in sent: + del sent[key] elif isinstance(key, list): key_path, last = key[:-1], key[-1] try: existing_parent = reduce(dict.get, key_path, existing) - del existing_parent[last] + if existing_parent is not None: + del existing_parent[last] except KeyError: pass try: sent_parent = reduce(dict.get, key_path, sent) - del sent_parent[last] + if sent_parent is not None: + del sent_parent[last] except KeyError: pass return not issubset(sent, existing) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py new file mode 100644 index 00000000..1cf86756 --- /dev/null +++ b/plugins/module_utils/nd_config_collection.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import sys +from copy import deepcopy +from functools import reduce + +# Python 2 and 3 compatibility (To be removed in the future) +if sys.version_info[0] >= 3: + from collections.abc import MutableMapping + iteritems = lambda d: d.items() +else: + from collections import MutableMapping + iteritems = lambda d: d.iteritems() + +# NOTE: Single-Index Hybrid Collection for ND Network Resource Module +class NDConfigCollection(MutableMapping): + + def __init__(self, identifier_keys, data=None, use_composite_keys=False): + self.identifier_keys = identifier_keys + self.use_composite_keys = use_composite_keys + + # Dual Storage + self._list = [] + self._map = {} + + if data: + for item in data: + self.add(item) + + # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") + def _get_identifier_value(self, config): + """Generates the internal map key based on the selected mode.""" + if self.use_composite_keys: + # Mode: Composite (Tuple of ALL keys) + values = [] + for key in self.identifier_keys: + val = config.get(key) + if val is None: + return None # Missing a required part + values.append(val) + return tuple(values) + else: + # Mode: Priority (First available key) + for key in self.identifier_keys: + if key in config: + return config[key] + return None + + # Magic Methods + def __getitem__(self, key): + return self._map[key] + + def __setitem__(self, key, value): + if key in self._map: + old_ref = self._map[key] + try: + idx = self._list.index(old_ref) + self._list[idx] = value + self._map[key] = value + except ValueError: + pass + else: + # Add new + self._list.append(value) + self._map[key] = value + + def __delitem__(self, key): + if key in self._map: + obj_ref = self._map[key] + del self._map[key] + self._list.remove(obj_ref) + else: + raise KeyError(key) + + def __iter__(self): + return iter(self._map) + + def __len__(self): + return len(self._list) + + def __eq__(self, other): + if isinstance(other, NDConfigCollection): + return self._list == other._list + elif isinstance(other, list): + return self._list == other + elif isinstance(other, dict): + return self._map == other + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return str(self._list) + + # Helper Methods + def _filter_dict(self, data, ignore_keys): + return {k: v for k, v in iteritems(data) if k not in ignore_keys} + + def _issubset(self, subset, superset): + if type(subset) is not type(superset): + return False + + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + + for key, value in iteritems(subset): + if value is None: + continue + + if key not in superset: + return False + + superset_value = superset.get(key) + + if not self._issubset(value, superset_value): + return False + return True + + def _remove_unwanted_keys(self, data, unwanted_keys): + for key in unwanted_keys: + if isinstance(key, str): + if key in data: + del data[key] + elif isinstance(key, list) and len(key) > 0: + key_path, last = key[:-1], key[-1] + try: + parent = reduce(lambda d, k: d.get(k) if isinstance(d, dict) else None, key_path, data) + if isinstance(parent, dict) and last in parent: + del parent[last] + except (KeyError, TypeError): + pass + return data + + # Core Operations + def to_list(self): + return self._list + + def to_dict(self): + return self._map + + def copy(self): + return NDConfigCollection(self.identifier_keys, deepcopy(self._list), self.use_composite_keys) + + def add(self, config): + ident = self._get_identifier_value(config) + if ident is None: + mode = "Composite" if self.use_composite_keys else "Priority" + raise ValueError("[{0} Mode] Config missing required keys: {1}".format(mode, self.identifier_keys)) + + if ident in self._map: + self.__setitem__(ident, config) + else: + self._list.append(config) + self._map[ident] = config + + def merge(self, new_config): + ident = self._get_identifier_value(new_config) + if ident and ident in self._map: + self._map[ident].update(new_config) + else: + self.add(new_config) + + def replace(self, new_config): + ident = self._get_identifier_value(new_config) + if ident: + self[ident] = new_config + else: + self.add(new_config) + + def remove(self, identifiers): + # Try Map Removal + try: + target_key = self._get_identifier_value(identifiers) + if target_key and target_key in self._map: + self.__delitem__(target_key) + return + except Exception: + pass + + # Fallback: Linear Removal + to_remove = [] + for config in self._list: + match = True + for k, v in iteritems(identifiers): + if config.get(k) != v: + match = False + break + if match: + to_remove.append(self._get_identifier_value(config)) + + for ident in to_remove: + if ident in self._map: + self.__delitem__(ident) + + def get_by_key(self, key, default=None): + return self._map.get(key, default) + + def get_by_idenfiers(self, identifiers, default=None): + # Try Map Lookup + target_key = self._get_identifier_value(identifiers) + if target_key and target_key in self._map: + return self._map[target_key] + + # Fallback: Linear Lookup + valid_search_keys = [k for k in identifiers if k in self.identifier_keys] + if not valid_search_keys: + return default + + for config in self._list: + match = True + for k in valid_search_keys: + if config.get(k) != identifiers[k]: + match = False + break + if match: + return config + return default + + # Diff logic + def get_diff_config(self, new_config, unwanted_keys=None): + unwanted_keys = unwanted_keys or [] + + ident = self._get_identifier_value(new_config) + + if not ident or ident not in self._map: + return "new" + + existing = deepcopy(self._map[ident]) + sent = deepcopy(new_config) + + self._remove_unwanted_keys(existing, unwanted_keys) + self._remove_unwanted_keys(sent, unwanted_keys) + + is_subset = self._issubset(sent, existing) + + if is_subset: + return "no_diff" + else: + return "changed" + + def get_diff_collection(self, new_collection, unwanted_keys=None): + if not isinstance(new_collection, NDConfigCollection): + raise TypeError("Argument must be an NDConfigCollection") + + if len(self) != len(new_collection): + return True + + for item in new_collection.to_list(): + if self.get_diff_config(item, unwanted_keys) != "no_diff": + return True + + for ident in self._map: + if ident not in new_collection._map: + return True + + return False + + def get_diff_identifiers(self, new_collection): + current_identifiers = set(self.config_collection.keys()) + other_identifiers = set(new_collection.config_collection.keys()) + + return list(current_identifiers - other_identifiers) + + # Sanitize Operations + def sanitize(self, keys_to_remove=None, values_to_remove=None, remove_none_values=False): + keys_to_remove = keys_to_remove or [] + values_to_remove = values_to_remove or [] + + def recursive_clean(obj): + if isinstance(obj, dict): + keys = list(obj.keys()) + for k in keys: + v = obj[k] + if k in keys_to_remove or v in values_to_remove or (remove_none_values and v is None): + del obj[k] + continue + if isinstance(v, (dict, list)): + recursive_clean(v) + elif isinstance(obj, list): + for item in obj: + recursive_clean(item) + + for item in self._list: + recursive_clean(item) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py new file mode 100644 index 00000000..b73b24e7 --- /dev/null +++ b/plugins/module_utils/nd_network_resources.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from copy import deepcopy +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED + +# TODO: Make further enhancement to logs and outputs +# NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later +class NDNetworkResourceModule(NDModule): + + def __init__(self, module, path, identifier_keys, use_composite_keys=False, actions_overwrite_map=None): + super().__init__(module) + + # Initial variables + self.path = path + self.actions_overwrite_map = actions_overwrite_map or {} + self.identifier_keys = identifier_keys + self.use_composite_keys = use_composite_keys + + # Initial data + self.init_all_data = self._query_all() + + # Info ouput + self.existing = NDConfigCollection(identifier_keys, data=self.init_all_data) + self.previous = NDConfigCollection(identifier_keys) + self.proposed = NDConfigCollection(identifier_keys) + self.sent = NDConfigCollection(identifier_keys) + + # Debug output + self.nd_logs = [] + + # Helper variables + self.current_identifier = "" + self.existing_config = {} + self.proposed_config = {} + + # Actions Operations + def actions_overwrite(action): + def decorator(func): + def wrapper(self, *args, **kwargs): + overwrite_action = self.actions_overwrite_map.get(action) + if callable(overwrite_action): + return overwrite_action(self) + else: + return func(self, *args, **kwargs) + return wrapper + return decorator + + @actions_overwrite("create") + def _create(self): + if not self.module.check_mode: + return self.request(path=self.path, method="POST", data=self.proposed_config) + + @actions_overwrite("update") + def _update(self): + if not self.module.check_mode: + object_path = "{0}/{1}".format(self.path, self.current_identifier) + return self.request(path=object_path, method="PUT", data=self.proposed_config) + + @actions_overwrite("delete") + def _delete(self): + if not self.module.check_mode: + object_path = "{0}/{1}".format(self.path, self.current_identifier) + self.request(path=object_path, method="DELETE") + + @actions_overwrite("query_all") + def _query_all(self): + return self.query_obj(self.path) + + def format_log(self, identifier, status, after_data, sent_payload_data=None): + item_result = { + "identifier": identifier, + "status": status, + "before": self.existing_config, + "after": deepcopy(after_data) if after_data is not None else self.existing_config, + "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {}, + } + + if not self.module.check_mode and self.url is not None: + item_result.update( + { + "method": self.method, + "response": self.response, + "status": self.status, + "url": self.url, + } + ) + + self.nd_logs.append(item_result) + + # Logs and Outputs formating Operations + def add_logs_and_ouputs(self): + if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + if self.params.get("output_level") in ("debug", "info"): + self.result["previous"] = self.previous.to_list() + if not self.has_modified and self.previous.get_diff_collection(self.existing): + self.result["changed"] = True + if self.stdout: + self.result["stdout"] = self.stdout + + if self.params.get("output_level") == "debug": + self.result["nd_logs"] = self.nd_logs + if self.url is not None: + self.result["httpapi_logs"] = self.httpapi_logs + + if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + self.result["sent"] = self.sent.to_list() + self.result["proposed"] = self.proposed.to_list() + + self.result["current"] = self.existing.to_list() + + # Manage State Operations + def manage_state(self, state, new_configs, unwanted_keys=None, override_exceptions=None): + unwanted_keys = unwanted_keys or [] + override_exceptions = override_exceptions or [] + + self.proposed = NDConfigCollection(self.identifier_keys, data=new_configs) + self.proposed.sanitize() + self.previous = self.existing.copy() + + if state in ["merged", "replaced", "overidden"]: + for identifier, config in self.proposed.items(): + + diff_config_info = self.existing.get_diff_config(config, unwanted_keys) + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self.proposed_config = config + request_response = None + sent_payload = None + status = "no_change" + + if diff_config_info != "no_diff": + if state == "merged": + self.existing.merge(config) + self.proposed_config = self.existing[identifier] + else: + self.existing.replace(config) + + if diff_config_info == "changed": + request_response = self._update() + status = "updated" + else: + request_response = self._create() + status= "created" + + if not self.module.check_mode: + self.sent.add(self.proposed_config) + sent_payload = self.proposed_config + else: + request_response = self.proposed_config + + self.format_log(identifier, status, request_response, sent_payload) + + + if state == "overidden": + diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + for identifier in diff_identifiers: + if identifier not in override_exceptions: + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self._delete() + del self.existing[identifier] + self.format_log(identifier, "deleted", after_data={}) + + + elif state == "deleted": + for identifier, config in self.proposed.items(): + if identifier in self.existing.keys(): + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self.proposed_config = config + self._delete() + del self.existing[identifier] + self.format_log(identifier, "deleted", after_data={}) + + # Outputs Operations + def fail_json(self, msg, **kwargs): + self.add_logs_and_ouputs() + + self.result.update(**kwargs) + self.module.fail_json(msg=msg, **self.result) + + def exit_json(self, **kwargs): + self.add_logs_and_ouputs() + + if self.module._diff and self.result.get("changed") is True: + self.result["diff"] = dict( + before=self.previous.to_list(), + after=self.existing.to_list(), + ) + + self.result.update(**kwargs) + self.module.exit_json(**self.result) diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py new file mode 100644 index 00000000..5bf0a0f0 --- /dev/null +++ b/plugins/module_utils/utils.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from copy import deepcopy + + +def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): + if keys is None: + keys = [] + if values is None: + values = [] + + result = deepcopy(dict_to_sanitize) + for k, v in dict_to_sanitize.items(): + if k in keys: + del result[k] + elif v in values or (v is None and remove_none_values): + del result[k] + elif isinstance(v, dict) and recursive: + result[k] = sanitize_dict(v, keys, values) + elif isinstance(v, list) and recursive: + for index, item in enumerate(v): + if isinstance(item, dict): + result[k][index] = sanitize_dict(item, keys, values) + return result \ No newline at end of file diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py new file mode 100644 index 00000000..552df3b7 --- /dev/null +++ b/plugins/modules/nd_local_user.py @@ -0,0 +1,269 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"} + +DOCUMENTATION = r""" +--- +module: nd_local_user +version_added: "1.4.0" +short_description: Manage local users on Cisco Nexus Dashboard +description: +- Manage local users on Cisco Nexus Dashboard (ND). +- It supports creating, updating, querying, and deleting local users. +author: +- Gaspard Micol (@gmicol) +options: + config: + description: + - The list of the local users to configure. + type: list + elements: dict + suboptions: + email: + description: + - The email address of the local user. + type: str + login_id: + description: + - The login ID of the local user. + - The O(config.login_id) must be defined when creating, updating or deleting a local user. + type: str + required: true + first_name: + description: + - The first name of the local user. + type: str + last_name: + description: + - The last name of the local user. + type: str + user_password: + description: + - The password of the local user. + - Password must have a minimum of 8 characters to a maximum of 64 characters. + - Password must have three of the following; one number, one lower case character, one upper case character, one special character. + - The O(config.user_password) must be defined when creating a new local_user. + type: str + reuse_limitation: + description: + - The number of different passwords a user must use before they can reuse a previous one. + - It defaults to C(0) when unset during creation. + type: int + time_interval_limitation: + description: + - The minimum time period that must pass before a previous password can be reused. + - It defaults to C(0) when unset during creation. + type: int + security_domains: + description: + - The list of Security Domains and Roles for the local user. + - At least, one Security Domain must be defined when creating a new local user. + type: list + elements: dict + suboptions: + name: + description: + - The name of the Security Domain to which the local user is given access. + type: str + required: true + aliases: [ security_domain_name, domain_name ] + roles: + description: + - The Permission Roles of the local user within the Security Domain. + type: list + elements: str + choices: [ fabric_admin, observer, super_admin, support_engineer, approver, designer ] + aliases: [ domains ] + remote_id_claim: + description: + - The remote ID claim of the local user. + type: str + remote_user_authorization: + description: + - To enable/disable the Remote User Authorization of the local user. + - Remote User Authorization is used for signing into Nexus Dashboard when using identity providers that cannot provide authorization claims. + Once this attribute is enabled, the local user ID cannot be used to directly login to Nexus Dashboard. + - It defaults to C(false) when unset during creation. + type: bool + state: + description: + - The desired state of the network resources on the Cisco Nexus Dashboard. + - Use O(state=merged) to create new resources and updates existing ones as defined in your configuration. + Resources on ND that are not specified in the configuration will be left unchanged. + - Use O(state=replaced) to replace the resources specified in the configuration. + - Use O(state=overridden) to enforce the configuration as the single source of truth. + The resources on ND will be modified to exactly match the configuration. + Any resource existing on ND but not present in the configuration will be deleted. Use with extra caution. + - Use O(state=deleted) to remove the resources specified in the configuration from the Cisco Nexus Dashboard. + type: str + default: merged + choices: [ merged, replaced, overridden, deleted ] +extends_documentation_fragment: +- cisco.nd.modules +- cisco.nd.check_mode +notes: +- This module is only supported on Nexus Dashboard having version 4.1.0 or higher. +- This module is not idempotent when creating or updating a local user object when O(config.user_password) is used. +""" + +EXAMPLES = r""" +- name: Create a new local user + cisco.nd.nd_local_user: + config: + - email: user@example.com + login_id: local_user + first_name: User first name + last_name: User last name + user_password: localUserPassword1% + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + name: all + roles: + - observer + - support_engineer + remote_id_claim: remote_user + remote_user_authorization: true + state: merged + register: result + +- name: Create local user with minimal configuration + cisco.nd.nd_local_user: + config: + - login_id: local_user_min + user_password: localUserMinuser_password + security_domain: all + state: merged + +- name: Update local user + cisco.nd.nd_local_user: + config: + - email: udpateduser@example.com + login_id: local_user + first_name: Updated user first name + last_name: Updated user last name + user_password: updatedLocalUserPassword1% + reuse_limitation: 25 + time_interval_limitation: 15 + security_domains: + - name: all + roles: super_admin + - name: ansible_domain + roles: observer + roles: super_admin + remote_id_claim: "" + remote_user_authorization: false + state: replaced + +- name: Delete a local user + cisco.nd.nd_local_user: + config: + - login_id: local_user + state: deleted +""" + +RETURN = r""" +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec, NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resources import NDNetworkResourceModule +from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING + + +# Actions overwrite functions +def quey_all_local_users(nd): + return nd.query_obj(nd.path).get("localusers") + + +def main(): + argument_spec = nd_argument_spec() + argument_spec.update( + config=dict( + type="list", + elements="dict", + options=dict( + email=dict(type="str"), + login_id=dict(type="str", required=True), + first_name=dict(type="str"), + last_name=dict(type="str"), + user_password=dict(type="str", no_log=True), + reuse_limitation=dict(type="int"), + time_interval_limitation=dict(type="int"), + security_domains=dict( + type="list", + elements="dict", + options=dict( + name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), + roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + ), + aliases=["domains"], + ), + remote_id_claim=dict(type="str"), + remote_user_authorization=dict(type="bool"), + ), + ), + override_exceptions=dict(type="list", elements="str"), + state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), + ) + + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + ) + + path = "/api/v1/infra/aaa/localUsers" + identifier_keys = ["loginID"] + actions_overwrite_map = {"query_all": quey_all_local_users} + + nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) + + state = nd.params.get("state") + config = nd.params.get("config") + override_exceptions = nd.params.get("override_exceptions") + new_config = [] + for object in config: + payload = { + "email": object.get("email"), + "firstName": object.get("first_name"), + "lastName": object.get("last_name"), + "loginID": object.get("login_id"), + "password": object.get("user_password"), + "remoteIDClaim": object.get("remote_id_claim"), + "xLaunch": object.get("remote_user_authorization"), + } + + if object.get("security_domains"): + payload["rbac"] = { + "domains": { + security_domain.get("name"): { + "roles": ( + [USER_ROLES_MAPPING.get(role) for role in security_domain["roles"]] if isinstance(security_domain.get("roles"), list) else [] + ) + } + for security_domain in object["security_domains"] + }, + } + if object.get("reuse_limitation") or object.get("time_interval_limitation"): + payload["passwordPolicy"] = { + "reuseLimitation": object.get("reuse_limitation"), + "timeIntervalLimitation": object.get("time_interval_limitation"), + } + new_config.append(payload) + + nd.manage_state(state=state, new_configs=new_config, unwanted_keys=[["passwordPolicy", "passwordChangeTime"], ["userID"]], override_exceptions=override_exceptions) + + nd.exit_json() + + +if __name__ == "__main__": + main() diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml new file mode 100644 index 00000000..77e55cd1 --- /dev/null +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -0,0 +1,134 @@ +# Test code for the ND modules +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +- name: Test that we have a Nexus Dashboard host, username and password + ansible.builtin.fail: + msg: 'Please define the following variables: ansible_host, ansible_user and ansible_password.' + when: ansible_host is not defined or ansible_user is not defined or ansible_password is not defined + +- name: Set vars + ansible.builtin.set_fact: + nd_info: &nd_info + output_level: '{{ api_key_output_level | default("debug") }}' + +- name: Ensure local users do not exist before test starts + cisco.nd.nd_local_user: + <<: *nd_info + config: + - login_id: ansible_local_user + - login_id: ansible_local_user_2 + state: deleted + +# CREATE +- name: Create local users with full and minimum configuration (check mode) + cisco.nd.nd_local_user: &create_local_user + <<: *nd_info + config: + - email: ansibleuser@example.com + login_id: ansible_local_user + first_name: Ansible first name + last_name: Ansible last name + user_password: ansibleLocalUserPassword1%Test + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + - name: all + roles: + - observer + - support_engineer + remote_id_claim: ansible_remote_user + remote_user_authorization: true + - login_id: ansible_local_user_2 + user_password: ansibleLocalUser2Password1%Test + security_domains: + - name: all + state: merged + check_mode: true + register: cm_create_local_user + +- name: Create local users with full and minimum configuration (normal mode) + cisco.nd.nd_local_user: + <<: *create_local_user + register: nm_create_local_user + +# UPDATE +- name: Update all ansible_local_user's attributes (check mode) + cisco.nd.nd_local_user: &update_first_local_user + <<: *nd_info + config: + - email: updatedansibleuser@example.com + login_id: ansible_local_user + first_name: Updated Ansible first name + last_name: Updated Ansible last name + user_password: updatedAnsibleLocalUserPassword1% + reuse_limitation: 25 + time_interval_limitation: 15 + security_domains: + - name: all + roles: super_admin + remote_id_claim: "" + remote_user_authorization: false + state: replaced + check_mode: true + register: cm_update_local_user + +- name: Update local user (normal mode) + cisco.nd.nd_local_user: + <<: *update_first_local_user + register: nm_update_local_user + +- name: Update all ansible_local_user_2's attributes except password + cisco.nd.nd_local_user: &update_second_local_user + <<: *nd_info + config: + - email: secondansibleuser@example.com + login_id: ansible_local_user_2 + first_name: Second Ansible first name + last_name: Second Ansible last name + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + - name: all + roles: fabric_admin + remote_id_claim: ansible_remote_user_2 + remote_user_authorization: true + state: merged + register: nm_update_local_user_2 + +- name: Update all ansible_local_user_2's attributes except password again (idempotency) + cisco.nd.nd_local_user: + <<: *update_second_local_user + register: nm_update_local_user_2_again + + +# DELETE +- name: Delete local user by name (check mode) + cisco.nd.nd_local_user: &delete_local_user + <<: *nd_info + config: + - login_id: ansible_local_user + state: deleted + check_mode: true + register: cm_delete_local_user + +- name: Delete local user by name (normal mode) + cisco.nd.nd_local_user: + <<: *delete_local_user + register: nm_delete_local_user + +- name: Delete local user again (idempotency test) + cisco.nd.nd_local_user: + <<: *delete_local_user + register: nm_delete_local_user_again + + +# CLEAN UP +- name: Ensure local users do not exist + cisco.nd.nd_local_user: + <<: *nd_info + config: + - login_id: ansible_local_user + - login_id: ansible_local_user_2 + state: deleted From e95251747c69fb3c84821c1dfe264104ad619b92 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 15 Jan 2026 11:47:32 -0500 Subject: [PATCH 028/131] [ignore] First Pydantic implementation: Add Pydantic Models for nd_local_user. --- .../module_utils/models/local_user_model.py | 142 ++++++++++++++++++ plugins/module_utils/nd_config_collection.py | 1 + plugins/module_utils/nd_network_resources.py | 2 + plugins/modules/nd_local_user.py | 5 +- 4 files changed, 148 insertions(+), 2 deletions(-) create mode 100644 plugins/module_utils/models/local_user_model.py diff --git a/plugins/module_utils/models/local_user_model.py b/plugins/module_utils/models/local_user_model.py new file mode 100644 index 00000000..f8de1f46 --- /dev/null +++ b/plugins/module_utils/models/local_user_model.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json +from typing import List, Dict, Any, Optional +from pydantic import BaseModel, ConfigDict, Field, field_validator + +# TODO: Add Field validation methods +# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel +# TODO: Maybe define our own baseModel +# TODO: Look at ansible aliases +from pydantic import BaseModel, Field, ConfigDict +from typing import List, Dict, Any, Optional + +class SecurityDomainModel(BaseModel): + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + name: str = Field(alias="name") + roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") + + +class LocalUserModel(BaseModel): + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + email: str = Field(default="", alias="email") + login_id: str = Field(alias="loginID") + first_name: str = Field(default="", alias="firstName") + last_name: str = Field(default="", alias="lastName") + user_password: str = Field(alias="password") + reuse_limitation: int = Field(default=0, alias="reuseLimitation") + time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") + security_domains: List[SecurityDomainModel] = Field(default_factory=list, alias="domains") + remote_id_claim: str = Field(default="", alias="remoteIDClaim") + remote_user_authorization: bool = Field(default=False, alias="xLaunch") + + def to_api_payload(self, user_roles_mapping: Dict[str, str] = None) -> Dict[str, Any]: + """Convert the model to the specific API payload format required.""" + if user_roles_mapping is None: + user_roles_mapping = {} + + base_data = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + + payload = { + "email": base_data.get("email"), + "firstName": base_data.get("firstName"), + "lastName": base_data.get("lastName"), + "loginID": base_data.get("loginID"), + "password": base_data.get("password"), + "remoteIDClaim": base_data.get("remoteIDClaim"), + "xLaunch": base_data.get("xLaunch"), + } + + if self.security_domains: + payload["rbac"] = { + "domains": { + domain.name: { + "roles": [ + user_roles_mapping.get(role, role) for role in domain.roles + ] + } + for domain in self.security_domains + } + } + + if self.reuse_limitation or self.time_interval_limitation: + payload["passwordPolicy"] = { + "reuseLimitation": self.reuse_limitation, + "timeIntervalLimitation": self.time_interval_limitation, + } + + return payload + + @classmethod + def from_api_payload( + cls, + payload: Dict[str, Any], + reverse_user_roles_mapping: Optional[Dict[str, str]] = None + ) -> 'LocalUserModel': + + if reverse_user_roles_mapping is None: + reverse_user_roles_mapping = {} + + user_data = { + "email": payload.get("email", ""), + "loginID": payload.get("loginID", ""), + "firstName": payload.get("firstName", ""), + "lastName": payload.get("lastName", ""), + "password": payload.get("password", ""), + "remoteIDClaim": payload.get("remoteIDClaim", ""), + "xLaunch": payload.get("xLaunch", False), + } + + password_policy = payload.get("passwordPolicy", {}) + user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) + user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) + + domains_data = [] + rbac = payload.get("rbac", {}) + if rbac and "domains" in rbac: + for domain_name, domain_config in rbac["domains"].items(): + # Map API roles back to internal roles + api_roles = domain_config.get("roles", []) + internal_roles = [ + reverse_user_roles_mapping.get(role, role) for role in api_roles + ] + + domain_data = { + "name": domain_name, + "roles": internal_roles + } + domains_data.append(domain_data) + + user_data["domains"] = domains_data + + return cls(**user_data) + + # @classmethod + # def from_api_payload_json( + # cls, + # json_payload: str, + # reverse_user_roles_mapping: Optional[Dict[str, str]] = None + # ) -> 'LocalUserModel': + + # payload = json.loads(json_payload) + # return cls.from_api_payload(payload, reverse_user_roles_mapping) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 1cf86756..8f0058bb 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -20,6 +20,7 @@ from collections import MutableMapping iteritems = lambda d: d.iteritems() +# TODO: Adapt to Pydantic Models # NOTE: Single-Index Hybrid Collection for ND Network Resource Module class NDConfigCollection(MutableMapping): diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index b73b24e7..3b549da1 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -14,6 +14,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED # TODO: Make further enhancement to logs and outputs +# TODO: Adapt to Pydantic Models # NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later class NDNetworkResourceModule(NDModule): @@ -98,6 +99,7 @@ def format_log(self, identifier, status, after_data, sent_payload_data=None): self.nd_logs.append(item_result) # Logs and Outputs formating Operations + # TODO: Move it to different file def add_logs_and_ouputs(self): if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if self.params.get("output_level") in ("debug", "info"): diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 552df3b7..4a5f1ad2 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -181,10 +181,11 @@ # Actions overwrite functions -def quey_all_local_users(nd): +def query_all_local_users(nd): return nd.query_obj(nd.path).get("localusers") +# TODO: Adapt to Pydantic Model def main(): argument_spec = nd_argument_spec() argument_spec.update( @@ -223,7 +224,7 @@ def main(): path = "/api/v1/infra/aaa/localUsers" identifier_keys = ["loginID"] - actions_overwrite_map = {"query_all": quey_all_local_users} + actions_overwrite_map = {"query_all": query_all_local_users} nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) From 727d25e48624d68ce093c2984a3f45bb66598a7a Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 20 Jan 2026 13:17:35 -0500 Subject: [PATCH 029/131] [ignore] Second Pydantic Implementation: Create a NDBaseModel to be inherited from future class models. Modify class models for local_user. --- plugins/module_utils/models/base.py | 57 +++++++ plugins/module_utils/models/local_user.py | 116 ++++++++++++++ .../module_utils/models/local_user_model.py | 142 ------------------ 3 files changed, 173 insertions(+), 142 deletions(-) create mode 100644 plugins/module_utils/models/base.py create mode 100644 plugins/module_utils/models/local_user.py delete mode 100644 plugins/module_utils/models/local_user_model.py diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py new file mode 100644 index 00000000..e7301d14 --- /dev/null +++ b/plugins/module_utils/models/base.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from abc import ABC, abstractmethod +from pydantic import BaseModel, ConfigDict +from typing import List, Dict, Any, Optional, ClassVar + + +class NDBaseModel(BaseModel, ABC): + + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + # TODO: find ways to redifine these var in every + identifiers: ClassVar[List[str]] = [] + use_composite_identifiers: ClassVar[bool] = False + + @abstractmethod + def to_payload(self) -> Dict[str, Any]: + pass + + @classmethod + @abstractmethod + def from_response(cls, response: Dict[str, Any]) -> 'NDBaseModel': + pass + + # TODO: Modify to make it more generic and Pydantic + # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") + def get_identifier_value(self) -> Any: + """Generates the internal map key based on the selected mode.""" + # if self.use_composite_keys: + # # Mode: Composite (Tuple of ALL keys) + # values = [] + # for key in self.identifier_keys: + # val = config.get(key) + # if val is None: + # return None # Missing a required part + # values.append(val) + # return tuple(values) + # else: + # # Mode: Priority (First available key) + # for key in self.identifier_keys: + # if key in config: + # return config[key] + # return None + pass diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py new file mode 100644 index 00000000..7877a5a5 --- /dev/null +++ b/plugins/module_utils/models/local_user.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from pydantic import Field, field_validator +from types import MappingProxyType +from typing import List, Dict, Any, Optional, ClassVar + +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel + +# TODO: Add Field validation methods +# TODO: define our own Field class for string versioning, ansible aliases +# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel +# TODO: Surclass BaseModel -> Priority +# TODO: Look at ansible aliases + +# TODO: use constants.py file in the future +user_roles_mapping = MappingProxyType({}) + + +class LocalUserSecurityDomainModel(NDBaseModel): + + name: str = Field(alias="name") + roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") + + def to_payload(self) -> Dict[str, Any]: + return { + self.name: { + "roles": [ + user_roles_mapping.get(role, role) for role in self.roles + ] + } + } + + @classmethod + def from_response(cls, name: str, domain_config: List[str]) -> 'NDBaseModel': + internal_roles = [user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + + domain_data = { + "name": name, + "roles": internal_roles + } + + return cls(**domain_data) + + +class LocalUserModel(NDBaseModel): + + # TODO: Define a way to generate it (look at NDBaseModel comments) + identifiers: ClassVar[List[str]] = ["login_id"] + + # TODO: Use Optinal to remove default values (get them from API response instead) + email: str = Field(default="", alias="email") + login_id: str = Field(alias="loginID") + first_name: str = Field(default="", alias="firstName") + last_name: str = Field(default="", alias="lastName") + user_password: str = Field(alias="password") + reuse_limitation: int = Field(default=0, alias="reuseLimitation") + time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") + security_domains: List[LocalUserSecurityDomainModel] = Field(default_factory=list, alias="domains") + remote_id_claim: str = Field(default="", alias="remoteIDClaim") + remote_user_authorization: bool = Field(default=False, alias="xLaunch") + + def to_payload(self) -> Dict[str, Any]: + """Convert the model to the specific API payload format required.""" + + payload = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + + if self.security_domains: + payload["rbac"] = {"domains": {}} + for domain in self.security_domains: + payload["rbac"]["domains"].update(domain.to_api_payload()) + + if self.reuse_limitation or self.time_interval_limitation: + payload["passwordPolicy"] = { + "reuseLimitation": self.reuse_limitation, + "timeIntervalLimitation": self.time_interval_limitation, + } + + return payload + + @classmethod + def from_response(cls, payload: Dict[str, Any]) -> 'LocalUserModel': + + if reverse_user_roles_mapping is None: + reverse_user_roles_mapping = {} + + user_data = { + "email": payload.get("email"), + "loginID": payload.get("loginID"), + "firstName": payload.get("firstName"), + "lastName": payload.get("lastName"), + "password": payload.get("password"), + "remoteIDClaim": payload.get("remoteIDClaim"), + "xLaunch": payload.get("xLaunch"), + } + + password_policy = payload.get("passwordPolicy", {}) + user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) + user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) + + domains_data = [] + rbac = payload.get("rbac", {}) + if rbac and "domains" in rbac: + for domain_name, domain_config in rbac["domains"].items(): + domains_data.append(LocalUserSecurityDomainModel.from_api_response(domain_name, domain_config)) + + user_data["domains"] = domains_data + + return cls(**user_data) diff --git a/plugins/module_utils/models/local_user_model.py b/plugins/module_utils/models/local_user_model.py deleted file mode 100644 index f8de1f46..00000000 --- a/plugins/module_utils/models/local_user_model.py +++ /dev/null @@ -1,142 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2025, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -import json -from typing import List, Dict, Any, Optional -from pydantic import BaseModel, ConfigDict, Field, field_validator - -# TODO: Add Field validation methods -# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel -# TODO: Maybe define our own baseModel -# TODO: Look at ansible aliases -from pydantic import BaseModel, Field, ConfigDict -from typing import List, Dict, Any, Optional - -class SecurityDomainModel(BaseModel): - model_config = ConfigDict( - str_strip_whitespace=True, - use_enum_values=True, - validate_assignment=True, - populate_by_name=True, - ) - - name: str = Field(alias="name") - roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") - - -class LocalUserModel(BaseModel): - model_config = ConfigDict( - str_strip_whitespace=True, - use_enum_values=True, - validate_assignment=True, - populate_by_name=True, - ) - - email: str = Field(default="", alias="email") - login_id: str = Field(alias="loginID") - first_name: str = Field(default="", alias="firstName") - last_name: str = Field(default="", alias="lastName") - user_password: str = Field(alias="password") - reuse_limitation: int = Field(default=0, alias="reuseLimitation") - time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") - security_domains: List[SecurityDomainModel] = Field(default_factory=list, alias="domains") - remote_id_claim: str = Field(default="", alias="remoteIDClaim") - remote_user_authorization: bool = Field(default=False, alias="xLaunch") - - def to_api_payload(self, user_roles_mapping: Dict[str, str] = None) -> Dict[str, Any]: - """Convert the model to the specific API payload format required.""" - if user_roles_mapping is None: - user_roles_mapping = {} - - base_data = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) - - payload = { - "email": base_data.get("email"), - "firstName": base_data.get("firstName"), - "lastName": base_data.get("lastName"), - "loginID": base_data.get("loginID"), - "password": base_data.get("password"), - "remoteIDClaim": base_data.get("remoteIDClaim"), - "xLaunch": base_data.get("xLaunch"), - } - - if self.security_domains: - payload["rbac"] = { - "domains": { - domain.name: { - "roles": [ - user_roles_mapping.get(role, role) for role in domain.roles - ] - } - for domain in self.security_domains - } - } - - if self.reuse_limitation or self.time_interval_limitation: - payload["passwordPolicy"] = { - "reuseLimitation": self.reuse_limitation, - "timeIntervalLimitation": self.time_interval_limitation, - } - - return payload - - @classmethod - def from_api_payload( - cls, - payload: Dict[str, Any], - reverse_user_roles_mapping: Optional[Dict[str, str]] = None - ) -> 'LocalUserModel': - - if reverse_user_roles_mapping is None: - reverse_user_roles_mapping = {} - - user_data = { - "email": payload.get("email", ""), - "loginID": payload.get("loginID", ""), - "firstName": payload.get("firstName", ""), - "lastName": payload.get("lastName", ""), - "password": payload.get("password", ""), - "remoteIDClaim": payload.get("remoteIDClaim", ""), - "xLaunch": payload.get("xLaunch", False), - } - - password_policy = payload.get("passwordPolicy", {}) - user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) - user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) - - domains_data = [] - rbac = payload.get("rbac", {}) - if rbac and "domains" in rbac: - for domain_name, domain_config in rbac["domains"].items(): - # Map API roles back to internal roles - api_roles = domain_config.get("roles", []) - internal_roles = [ - reverse_user_roles_mapping.get(role, role) for role in api_roles - ] - - domain_data = { - "name": domain_name, - "roles": internal_roles - } - domains_data.append(domain_data) - - user_data["domains"] = domains_data - - return cls(**user_data) - - # @classmethod - # def from_api_payload_json( - # cls, - # json_payload: str, - # reverse_user_roles_mapping: Optional[Dict[str, str]] = None - # ) -> 'LocalUserModel': - - # payload = json.loads(json_payload) - # return cls.from_api_payload(payload, reverse_user_roles_mapping) From 49b307b354d143945d889fcac8be749bf6b1714f Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 22 Jan 2026 01:04:05 -0500 Subject: [PATCH 030/131] [ignore] Pydantic Models: Modify and Clean both local_user.py and base.py based on comments. Add a get method and get_identifier_value function to NDBaseModel. --- plugins/module_utils/models/base.py | 43 ++++++------ plugins/module_utils/models/local_user.py | 82 ++++++++++------------- 2 files changed, 57 insertions(+), 68 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index e7301d14..bdd1b9c2 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -11,6 +11,7 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import List, Dict, Any, Optional, ClassVar +from typing_extensions import Self class NDBaseModel(BaseModel, ABC): @@ -22,7 +23,7 @@ class NDBaseModel(BaseModel, ABC): populate_by_name=True, ) - # TODO: find ways to redifine these var in every + # TODO: find ways to redifine these var in every future NDBaseModels identifiers: ClassVar[List[str]] = [] use_composite_identifiers: ClassVar[bool] = False @@ -32,26 +33,28 @@ def to_payload(self) -> Dict[str, Any]: @classmethod @abstractmethod - def from_response(cls, response: Dict[str, Any]) -> 'NDBaseModel': + def from_response(cls, response: Dict[str, Any]) -> Self: pass - # TODO: Modify to make it more generic and Pydantic + def get(self, field: str, default: Any = None) -> Any: + """Custom get method to mimic dictionary behavior.""" + return getattr(self, field, default) + + # TODO: Modify to make it more generic and Pydantic | might change and be moved in different Generic Class/Model # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") def get_identifier_value(self) -> Any: - """Generates the internal map key based on the selected mode.""" - # if self.use_composite_keys: - # # Mode: Composite (Tuple of ALL keys) - # values = [] - # for key in self.identifier_keys: - # val = config.get(key) - # if val is None: - # return None # Missing a required part - # values.append(val) - # return tuple(values) - # else: - # # Mode: Priority (First available key) - # for key in self.identifier_keys: - # if key in config: - # return config[key] - # return None - pass + """Generates the internal map key based on the selected mode.""" + if self.use_composite_identifiers: + # Mode: Composite (Tuple of ALL keys) + values = [] + for identifier in self.identifiers: + value = self.get(identifier) + if value is None: + return None # Missing a required part | Add Error Handling method here + values.append(value) + return tuple(values) + else: + # Mode: Priority (First available key) + for identifier in self.identifiers: + return self.get(identifier) + return None diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 7877a5a5..28cea27c 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,9 +8,10 @@ __metaclass__ = type -from pydantic import Field, field_validator +from pydantic import Field, field_validator, SecretStr from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar +from typing_extensions import Self from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel @@ -20,7 +21,7 @@ # TODO: Surclass BaseModel -> Priority # TODO: Look at ansible aliases -# TODO: use constants.py file in the future +# TODO: To be moved in constants.py file user_roles_mapping = MappingProxyType({}) @@ -39,15 +40,11 @@ def to_payload(self) -> Dict[str, Any]: } @classmethod - def from_response(cls, name: str, domain_config: List[str]) -> 'NDBaseModel': - internal_roles = [user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] - - domain_data = { - "name": name, - "roles": internal_roles - } - - return cls(**domain_data) + def from_response(cls, name: str, domain_config: List[str]) -> Self: + return cls( + name=name, + roles=[user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + ) class LocalUserModel(NDBaseModel): @@ -55,17 +52,17 @@ class LocalUserModel(NDBaseModel): # TODO: Define a way to generate it (look at NDBaseModel comments) identifiers: ClassVar[List[str]] = ["login_id"] - # TODO: Use Optinal to remove default values (get them from API response instead) - email: str = Field(default="", alias="email") + email: Optional[str] = Field(alias="email") login_id: str = Field(alias="loginID") - first_name: str = Field(default="", alias="firstName") - last_name: str = Field(default="", alias="lastName") - user_password: str = Field(alias="password") - reuse_limitation: int = Field(default=0, alias="reuseLimitation") - time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") - security_domains: List[LocalUserSecurityDomainModel] = Field(default_factory=list, alias="domains") - remote_id_claim: str = Field(default="", alias="remoteIDClaim") - remote_user_authorization: bool = Field(default=False, alias="xLaunch") + first_name: Optional[str] = Field(default="", alias="firstName") + last_name: Optional[str] = Field(default="", alias="lastName") + # TODO: Check secrets manipulation when tracking changes while maintaining security + user_password: Optional[SecretStr] = Field(alias="password") + reuse_limitation: Optional[int] = Field(default=0, alias="reuseLimitation") + time_interval_limitation: Optional[int] = Field(default=0, alias="timeIntervalLimitation") + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(alias="domains") + remote_id_claim: Optional[str] = Field(default="", alias="remoteIDClaim") + remote_user_authorization: Optional[bool] = Field(default=False, alias="xLaunch") def to_payload(self) -> Dict[str, Any]: """Convert the model to the specific API payload format required.""" @@ -86,31 +83,20 @@ def to_payload(self) -> Dict[str, Any]: return payload @classmethod - def from_response(cls, payload: Dict[str, Any]) -> 'LocalUserModel': + def from_response(cls, response: Dict[str, Any]) -> Self: - if reverse_user_roles_mapping is None: - reverse_user_roles_mapping = {} - - user_data = { - "email": payload.get("email"), - "loginID": payload.get("loginID"), - "firstName": payload.get("firstName"), - "lastName": payload.get("lastName"), - "password": payload.get("password"), - "remoteIDClaim": payload.get("remoteIDClaim"), - "xLaunch": payload.get("xLaunch"), - } - - password_policy = payload.get("passwordPolicy", {}) - user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) - user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) - - domains_data = [] - rbac = payload.get("rbac", {}) - if rbac and "domains" in rbac: - for domain_name, domain_config in rbac["domains"].items(): - domains_data.append(LocalUserSecurityDomainModel.from_api_response(domain_name, domain_config)) - - user_data["domains"] = domains_data - - return cls(**user_data) + return cls( + email=response.get("email"), + login_id=response.get("loginID"), + first_name=response.get("firstName"), + last_name=response.get("lastName"), + user_password=response.get("password"), + reuse_limitation=response.get("passwordPolicy", {}).get("reuseLimitation"), + time_interval_limitation=response.get("passwordPolicy", {}).get("timeIntervalLimitation"), + security_domains=[ + LocalUserSecurityDomainModel.from_response(name, domain_config) + for name, domain_config in response.get("rbac", {}).get("domains", {}).items() + ], + remote_id_claim=response.get("remoteIDClaim"), + remote_user_authorization=response.get("xLaunch"), + ) From 5608592672ad33e5d254086ca3dc7526b98ee941 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 00:56:49 -0500 Subject: [PATCH 031/131] [ignore] Pydantic ND base models and local_user models: Final proposition of core design adding new methods which will be used in NDConfigCollection and NDNetworkResourceModule classes as well as basic error handling and simple docstrings. --- plugins/module_utils/models/base.py | 124 ++++++++++++++---- plugins/module_utils/models/local_user.py | 146 ++++++++++++++-------- 2 files changed, 192 insertions(+), 78 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index bdd1b9c2..a7eabf17 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -10,51 +10,127 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import List, Dict, Any, Optional, ClassVar +from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal from typing_extensions import Self class NDBaseModel(BaseModel, ABC): - + """ + Base model for all Nexus Dashboard API objects. + + Supports three identifier strategies: + - single: One unique required field (e.g., ["login_id"]) + - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) + - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) + """ + model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, validate_assignment=True, populate_by_name=True, + extra='ignore' ) - - # TODO: find ways to redifine these var in every future NDBaseModels + + # Subclasses MUST define these identifiers: ClassVar[List[str]] = [] - use_composite_identifiers: ClassVar[bool] = False - + identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + + # Optional: fields to exclude from diffs (e.g., passwords) + exclude_from_diff: ClassVar[List[str]] = [] + @abstractmethod def to_payload(self) -> Dict[str, Any]: + """ + Convert model to API payload format. + """ pass @classmethod @abstractmethod def from_response(cls, response: Dict[str, Any]) -> Self: + """ + Create model instance from API response. + """ pass - def get(self, field: str, default: Any = None) -> Any: - """Custom get method to mimic dictionary behavior.""" - return getattr(self, field, default) - - # TODO: Modify to make it more generic and Pydantic | might change and be moved in different Generic Class/Model - # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") - def get_identifier_value(self) -> Any: - """Generates the internal map key based on the selected mode.""" - if self.use_composite_identifiers: - # Mode: Composite (Tuple of ALL keys) + def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: + """ + Extract identifier value(s) from this instance: + - single identifier: Returns field value. + - composite identifiers: Returns tuple of all field values. + - hierarchical identifiers: Returns tuple of (field_name, value) for first non-None field. + """ + if not self.identifiers: + raise ValueError(f"{self.__class__.__name__} has no identifiers defined") + + if self.identifier_strategy == "single": + value = getattr(self, self.identifiers[0], None) + if value is None: + raise ValueError( + f"Single identifier field '{self.identifiers[0]}' is None" + ) + return value + + elif self.identifier_strategy == "composite": values = [] - for identifier in self.identifiers: - value = self.get(identifier) + missing = [] + + for field in self.identifiers: + value = getattr(self, field, None) if value is None: - return None # Missing a required part | Add Error Handling method here + missing.append(field) values.append(value) + + # NOTE: might not be needed in the future with field_validator + if missing: + raise ValueError( + f"Composite identifier fields {missing} are None. " + f"All required: {self.identifiers}" + ) + return tuple(values) + + elif self.identifier_strategy == "hierarchical": + for field in self.identifiers: + value = getattr(self, field, None) + if value is not None: + return (field, value) + + raise ValueError( + f"No non-None value in hierarchical fields {self.identifiers}" + ) + else: - # Mode: Priority (First available key) - for identifier in self.identifiers: - return self.get(identifier) - return None + raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") + + def to_diff_dict(self) -> Dict[str, Any]: + """ + Export for diff comparison (excludes sensitive fields). + """ + return self.model_dump( + by_alias=True, + exclude_none=True, + exclude=set(self.exclude_from_diff) + ) + +# NOTE: Maybe make it a seperate BaseModel +class NDNestedModel(NDBaseModel): + """ + Base for nested models without identifiers. + """ + + identifiers: ClassVar[List[str]] = [] + + def to_payload(self) -> Dict[str, Any]: + """ + Convert model to API payload format. + """ + return self.model_dump(by_alias=True, exclude_none=True) + + @classmethod + def from_response(cls, response: Dict[str, Any]) -> Self: + """ + Create model instance from API response. + """ + return cls.model_validate(response) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 28cea27c..b7069126 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -8,95 +8,133 @@ __metaclass__ = type -from pydantic import Field, field_validator, SecretStr +from pydantic import Field, SecretStr from types import MappingProxyType -from typing import List, Dict, Any, Optional, ClassVar +from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self -from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel +from models.base import NDBaseModel, NDNestedModel -# TODO: Add Field validation methods -# TODO: define our own Field class for string versioning, ansible aliases -# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel -# TODO: Surclass BaseModel -> Priority -# TODO: Look at ansible aliases +# TODO: Move it to constants.py and import it +USER_ROLES_MAPPING = MappingProxyType({ + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", +}) -# TODO: To be moved in constants.py file -user_roles_mapping = MappingProxyType({}) +class LocalUserSecurityDomainModel(NDNestedModel): + """Security domain configuration for local user (nested model).""" -class LocalUserSecurityDomainModel(NDBaseModel): - - name: str = Field(alias="name") - roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") - + # Fields + name: str + roles: Optional[List[str]] = None + def to_payload(self) -> Dict[str, Any]: - return { + + return { self.name: { "roles": [ - user_roles_mapping.get(role, role) for role in self.roles + USER_ROLES_MAPPING.get(role, role) + for role in (self.roles or []) ] } } - + @classmethod - def from_response(cls, name: str, domain_config: List[str]) -> Self: + def from_response(cls, name: str, domain_config: Dict[str, Any]) -> Self: + + # NOTE: Maybe create a function from it to be moved to utils.py and to be imported + reverse_mapping = {value: key for key, value in USER_ROLES_MAPPING.items()} + return cls( name=name, - roles=[user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + roles=[ + reverse_mapping.get(role, role) + for role in domain_config.get("roles", []) + ] ) class LocalUserModel(NDBaseModel): + """ + Local user configuration. - # TODO: Define a way to generate it (look at NDBaseModel comments) + Identifier: login_id (single field) + """ + + # Identifier configuration identifiers: ClassVar[List[str]] = ["login_id"] - - email: Optional[str] = Field(alias="email") - login_id: str = Field(alias="loginID") - first_name: Optional[str] = Field(default="", alias="firstName") - last_name: Optional[str] = Field(default="", alias="lastName") - # TODO: Check secrets manipulation when tracking changes while maintaining security - user_password: Optional[SecretStr] = Field(alias="password") - reuse_limitation: Optional[int] = Field(default=0, alias="reuseLimitation") - time_interval_limitation: Optional[int] = Field(default=0, alias="timeIntervalLimitation") - security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(alias="domains") - remote_id_claim: Optional[str] = Field(default="", alias="remoteIDClaim") - remote_user_authorization: Optional[bool] = Field(default=False, alias="xLaunch") - + identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + exclude_from_diff: ClassVar[List[str]] = ["user_password"] + + # Fields + login_id: str = Field(..., alias="loginID") + email: Optional[str] = None + first_name: Optional[str] = Field(default=None, alias="firstName") + last_name: Optional[str] = Field(default=None, alias="lastName") + user_password: Optional[SecretStr] = Field(default=None, alias="password") + reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation") + time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation") + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="domains") + remote_id_claim: Optional[str] = Field(default=None, alias="remoteIDClaim") + remote_user_authorization: Optional[bool] = Field(default=None, alias="xLaunch") + def to_payload(self) -> Dict[str, Any]: - """Convert the model to the specific API payload format required.""" + payload = self.model_dump( + by_alias=True, + exclude={ + 'domains', + 'security_domains', + 'reuseLimitation', + 'reuse_limitation', + 'timeIntervalLimitation', + 'time_interval_limitation' + }, + exclude_none=True + ) - payload = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + if self.user_password: + payload["password"] = self.user_password.get_secret_value() if self.security_domains: payload["rbac"] = {"domains": {}} for domain in self.security_domains: - payload["rbac"]["domains"].update(domain.to_api_payload()) - - if self.reuse_limitation or self.time_interval_limitation: - payload["passwordPolicy"] = { - "reuseLimitation": self.reuse_limitation, - "timeIntervalLimitation": self.time_interval_limitation, - } - + payload["rbac"]["domains"].update(domain.to_payload()) + + if self.reuse_limitation is not None or self.time_interval_limitation is not None: + payload["passwordPolicy"] = {} + if self.reuse_limitation is not None: + payload["passwordPolicy"]["reuseLimitation"] = self.reuse_limitation + if self.time_interval_limitation is not None: + payload["passwordPolicy"]["timeIntervalLimitation"] = self.time_interval_limitation + return payload - + @classmethod def from_response(cls, response: Dict[str, Any]) -> Self: + password_policy = response.get("passwordPolicy", {}) + rbac = response.get("rbac", {}) + domains = rbac.get("domains", {}) + + security_domains = [ + LocalUserSecurityDomainModel.from_response(name, config) + for name, config in domains.items() + ] if domains else None return cls( - email=response.get("email"), login_id=response.get("loginID"), + email=response.get("email"), first_name=response.get("firstName"), last_name=response.get("lastName"), user_password=response.get("password"), - reuse_limitation=response.get("passwordPolicy", {}).get("reuseLimitation"), - time_interval_limitation=response.get("passwordPolicy", {}).get("timeIntervalLimitation"), - security_domains=[ - LocalUserSecurityDomainModel.from_response(name, domain_config) - for name, domain_config in response.get("rbac", {}).get("domains", {}).items() - ], + reuse_limitation=password_policy.get("reuseLimitation"), + time_interval_limitation=password_policy.get("timeIntervalLimitation"), + security_domains=security_domains, remote_id_claim=response.get("remoteIDClaim"), - remote_user_authorization=response.get("xLaunch"), + remote_user_authorization=response.get("xLaunch") ) From 4677805ced68d70a64f728f0e0f91d8d3d1d8102 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 13:09:33 -0500 Subject: [PATCH 032/131] [ignore] Pydantic ND Config Collection: Final proposition of core design changing existing methods and adding new ones which will be used in NDNetworkResourceModule class as well as basic error handling and simple docstrings. --- plugins/module_utils/nd_config_collection.py | 515 ++++++++++--------- 1 file changed, 266 insertions(+), 249 deletions(-) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 8f0058bb..2f256d30 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -8,289 +8,306 @@ __metaclass__ = type -import sys +from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable from copy import deepcopy -from functools import reduce -# Python 2 and 3 compatibility (To be removed in the future) -if sys.version_info[0] >= 3: - from collections.abc import MutableMapping - iteritems = lambda d: d.items() -else: - from collections import MutableMapping - iteritems = lambda d: d.iteritems() +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from models.base import NDBaseModel -# TODO: Adapt to Pydantic Models -# NOTE: Single-Index Hybrid Collection for ND Network Resource Module -class NDConfigCollection(MutableMapping): +# Type aliases +# NOTE: Maybe add more type aliases in the future if needed +ModelType = TypeVar('ModelType', bound=NDBaseModel) +IdentifierKey = Union[str, int, Tuple[Any, ...]] - def __init__(self, identifier_keys, data=None, use_composite_keys=False): - self.identifier_keys = identifier_keys - self.use_composite_keys = use_composite_keys - - # Dual Storage - self._list = [] - self._map = {} + +class NDConfigCollection(Generic[ModelType]): + """ + Nexus Dashboard configuration collection for NDBaseModel instances. + """ + + def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType]] = None): + """ + Initialize collection. + """ + self._model_class = model_class - if data: - for item in data: + # Dual storage + self._items: List[ModelType] = [] + self._index: Dict[IdentifierKey, int] = {} + + if items: + for item in items: self.add(item) - # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") - def _get_identifier_value(self, config): - """Generates the internal map key based on the selected mode.""" - if self.use_composite_keys: - # Mode: Composite (Tuple of ALL keys) - values = [] - for key in self.identifier_keys: - val = config.get(key) - if val is None: - return None # Missing a required part - values.append(val) - return tuple(values) - else: - # Mode: Priority (First available key) - for key in self.identifier_keys: - if key in config: - return config[key] - return None - - # Magic Methods - def __getitem__(self, key): - return self._map[key] - - def __setitem__(self, key, value): - if key in self._map: - old_ref = self._map[key] - try: - idx = self._list.index(old_ref) - self._list[idx] = value - self._map[key] = value - except ValueError: - pass - else: - # Add new - self._list.append(value) - self._map[key] = value - - def __delitem__(self, key): - if key in self._map: - obj_ref = self._map[key] - del self._map[key] - self._list.remove(obj_ref) + def _extract_key(self, item: ModelType) -> IdentifierKey: + """ + Extract identifier key from item. + """ + try: + return item.get_identifier_value() + except Exception as e: + raise ValueError(f"Failed to extract identifier: {e}") from e + + def _rebuild_index(self) -> None: + """Rebuild index from scratch (O(n) operation).""" + self._index.clear() + for index, item in enumerate(self._items): + key = self._extract_key(item) + self._index[key] = index + + # Core CRUD Operations + + def add(self, item: ModelType) -> IdentifierKey: + """ + Add item to collection (O(1) operation). + """ + if not isinstance(item, self._model_class): + raise TypeError( + f"Item must be instance of {self._model_class.__name__}, " + f"got {type(item).__name__}" + ) + + key = self._extract_key(item) + + if key in self._index: + raise ValueError( + f"Item with identifier {key} already exists. Use replace() to update" + ) + + position = len(self._items) + self._items.append(item) + self._index[key] = position + + return key + + def get(self, key: IdentifierKey) -> Optional[ModelType]: + """ + Get item by identifier key (O(1) operation). + """ + index = self._index.get(key) + return self._items[index] if index is not None else None + + def replace(self, item: ModelType) -> bool: + """ + Replace existing item with same identifier (O(1) operation). + """ + if not isinstance(item, self._model_class): + raise TypeError( + f"Item must be instance of {self._model_class.__name__}, " + f"got {type(item).__name__}" + ) + + key = self._extract_key(item) + index = self._index.get(key) + + if index is None: + return False + + self._items[index] = item + return True + + def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[ModelType, ModelType], ModelType]] = None) -> ModelType: + """ + Merge item with existing, or add if not present. + """ + key = self._extract_key(item) + existing = self.get(key) + + if existing is None: + self.add(item) + return item + + # Custom or default merge + if custom_merge_function: + merged = custom_merge_function(existing, item) else: - raise KeyError(key) - - def __iter__(self): - return iter(self._map) - - def __len__(self): - return len(self._list) + # Default merge + existing_data = existing.model_dump() + new_data = item.model_dump(exclude_unset=True) + merged_data = self._deep_merge(existing_data, new_data) + merged = self._model_class.model_validate(merged_data) + + self.replace(merged) + return merged - def __eq__(self, other): - if isinstance(other, NDConfigCollection): - return self._list == other._list - elif isinstance(other, list): - return self._list == other - elif isinstance(other, dict): - return self._map == other - return False + def _deep_merge(self, base: Dict, update: Dict) -> Dict: + """Recursively merge dictionaries.""" + result = base.copy() + + for key, value in update.items(): + if value is None: + continue + + if key in result and isinstance(result[key], dict) and isinstance(value, dict): + result[key] = self._deep_merge(result[key], value) + else: + result[key] = value + + return result + + def delete(self, key: IdentifierKey) -> bool: + """ + Delete item by identifier (O(n) operation due to index rebuild) + """ + index = self._index.get(key) + + if index is None: + return False + + del self._items[index] + self._rebuild_index() + + return True + + # Diff Operations + + def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: + """ + Compare single item against collection. + """ + try: + key = self._extract_key(new_item) + except ValueError: + return "new" + + existing = self.get(key) + + if existing is None: + return "new" - def __ne__(self, other): - return not self.__eq__(other) + existing_data = existing.to_diff_dict() + new_data = new_item.to_diff_dict() + + if unwanted_keys: + existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) + new_data = self._remove_unwanted_keys(new_data, unwanted_keys) - def __repr__(self): - return str(self._list) + is_subset = self._issubset(new_data, existing_data) + + return "no_diff" if is_subset else "changed" + + def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> bool: + """ + Check if two collections differ. + """ + if not isinstance(other, NDConfigCollection): + raise TypeError("Argument must be NDConfigCollection") + + if len(self) != len(other): + return True - # Helper Methods - def _filter_dict(self, data, ignore_keys): - return {k: v for k, v in iteritems(data) if k not in ignore_keys} + for item in other: + if self.get_diff_config(item, unwanted_keys) != "no_diff": + return True - def _issubset(self, subset, superset): + for key in self.keys(): + if other.get(key) is None: + return True + + return False + + def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[IdentifierKey]: + """ + Get identifiers in self but not in other. + """ + current_keys = set(self.keys()) + other_keys = set(other.keys()) + return list(current_keys - other_keys) + + def _issubset(self, subset: Any, superset: Any) -> bool: + """Check if subset is contained in superset.""" if type(subset) is not type(superset): return False - + if not isinstance(subset, dict): if isinstance(subset, list): return all(item in superset for item in subset) return subset == superset - - for key, value in iteritems(subset): + + for key, value in subset.items(): if value is None: continue - + if key not in superset: return False - - superset_value = superset.get(key) - - if not self._issubset(value, superset_value): + + if not self._issubset(value, superset[key]): return False + return True - def _remove_unwanted_keys(self, data, unwanted_keys): + def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: + """Remove unwanted keys from dict (supports nested paths).""" + data = deepcopy(data) + for key in unwanted_keys: if isinstance(key, str): if key in data: del data[key] + elif isinstance(key, list) and len(key) > 0: - key_path, last = key[:-1], key[-1] try: - parent = reduce(lambda d, k: d.get(k) if isinstance(d, dict) else None, key_path, data) - if isinstance(parent, dict) and last in parent: - del parent[last] - except (KeyError, TypeError): + parent = data + for k in key[:-1]: + if isinstance(parent, dict) and k in parent: + parent = parent[k] + else: + break + else: + if isinstance(parent, dict) and key[-1] in parent: + del parent[key[-1]] + except (KeyError, TypeError, IndexError): pass + return data - - # Core Operations - def to_list(self): - return self._list + + # Collection Operations - def to_dict(self): - return self._map - - def copy(self): - return NDConfigCollection(self.identifier_keys, deepcopy(self._list), self.use_composite_keys) - - def add(self, config): - ident = self._get_identifier_value(config) - if ident is None: - mode = "Composite" if self.use_composite_keys else "Priority" - raise ValueError("[{0} Mode] Config missing required keys: {1}".format(mode, self.identifier_keys)) - - if ident in self._map: - self.__setitem__(ident, config) - else: - self._list.append(config) - self._map[ident] = config - - def merge(self, new_config): - ident = self._get_identifier_value(new_config) - if ident and ident in self._map: - self._map[ident].update(new_config) - else: - self.add(new_config) - - def replace(self, new_config): - ident = self._get_identifier_value(new_config) - if ident: - self[ident] = new_config - else: - self.add(new_config) - - def remove(self, identifiers): - # Try Map Removal - try: - target_key = self._get_identifier_value(identifiers) - if target_key and target_key in self._map: - self.__delitem__(target_key) - return - except Exception: - pass - - # Fallback: Linear Removal - to_remove = [] - for config in self._list: - match = True - for k, v in iteritems(identifiers): - if config.get(k) != v: - match = False - break - if match: - to_remove.append(self._get_identifier_value(config)) - - for ident in to_remove: - if ident in self._map: - self.__delitem__(ident) - - def get_by_key(self, key, default=None): - return self._map.get(key, default) - - def get_by_idenfiers(self, identifiers, default=None): - # Try Map Lookup - target_key = self._get_identifier_value(identifiers) - if target_key and target_key in self._map: - return self._map[target_key] - - # Fallback: Linear Lookup - valid_search_keys = [k for k in identifiers if k in self.identifier_keys] - if not valid_search_keys: - return default - - for config in self._list: - match = True - for k in valid_search_keys: - if config.get(k) != identifiers[k]: - match = False - break - if match: - return config - return default - - # Diff logic - def get_diff_config(self, new_config, unwanted_keys=None): - unwanted_keys = unwanted_keys or [] - - ident = self._get_identifier_value(new_config) - - if not ident or ident not in self._map: - return "new" - - existing = deepcopy(self._map[ident]) - sent = deepcopy(new_config) - - self._remove_unwanted_keys(existing, unwanted_keys) - self._remove_unwanted_keys(sent, unwanted_keys) - - is_subset = self._issubset(sent, existing) - - if is_subset: - return "no_diff" - else: - return "changed" - - def get_diff_collection(self, new_collection, unwanted_keys=None): - if not isinstance(new_collection, NDConfigCollection): - raise TypeError("Argument must be an NDConfigCollection") - - if len(self) != len(new_collection): - return True - - for item in new_collection.to_list(): - if self.get_diff_config(item, unwanted_keys) != "no_diff": - return True - - for ident in self._map: - if ident not in new_collection._map: - return True - - return False - - def get_diff_identifiers(self, new_collection): - current_identifiers = set(self.config_collection.keys()) - other_identifiers = set(new_collection.config_collection.keys()) - - return list(current_identifiers - other_identifiers) + def __len__(self) -> int: + """Return number of items.""" + return len(self._items) + + def __iter__(self): + """Iterate over items.""" + return iter(self._items) - # Sanitize Operations - def sanitize(self, keys_to_remove=None, values_to_remove=None, remove_none_values=False): - keys_to_remove = keys_to_remove or [] - values_to_remove = values_to_remove or [] + def keys(self) -> List[IdentifierKey]: + """Get all identifier keys.""" + return list(self._index.keys()) - def recursive_clean(obj): - if isinstance(obj, dict): - keys = list(obj.keys()) - for k in keys: - v = obj[k] - if k in keys_to_remove or v in values_to_remove or (remove_none_values and v is None): - del obj[k] - continue - if isinstance(v, (dict, list)): - recursive_clean(v) - elif isinstance(obj, list): - for item in obj: - recursive_clean(item) + def copy(self) -> "NDConfigCollection[ModelType]": + """Create deep copy of collection.""" + return NDConfigCollection( + model_class=self._model_class, + items=deepcopy(self._items) + ) - for item in self._list: - recursive_clean(item) + # Serialization + + def to_list(self, **kwargs) -> List[Dict]: + """ + Export as list of dicts (with aliases). + """ + return [item.model_dump(by_alias=True, exclude_none=True, **kwargs) for item in self._items] + + def to_payload_list(self) -> List[Dict[str, Any]]: + """ + Export as list of API payloads. + """ + return [item.to_payload() for item in self._items] + + @classmethod + def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + """ + Create collection from list of dicts. + """ + items = [model_class.model_validate(item_data) for item_data in data] + return cls(model_class=model_class, items=items) + + @classmethod + def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + """ + Create collection from API response. + """ + items = [model_class.from_response(item_data) for item_data in response_data] + return cls(model_class=model_class, items=items) From 146063f1d2a0aff319be6eac414204e1ce0853a7 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 13:51:54 -0500 Subject: [PATCH 033/131] [ignore] Pydantic Base ND Network Resource Module: Final proposition of core design changing existing methods and adding new ones which will be used in future as a based for ND network resource modules as well as basic error handling and simple docstrings. --- plugins/module_utils/nd_network_resources.py | 561 ++++++++++++++----- 1 file changed, 411 insertions(+), 150 deletions(-) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index 3b549da1..ab7df9e2 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -9,196 +9,457 @@ __metaclass__ = type from copy import deepcopy -from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from typing import Optional, List, Dict, Any, Callable, Literal +from pydantic import ValidationError -# TODO: Make further enhancement to logs and outputs -# TODO: Adapt to Pydantic Models -# NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later -class NDNetworkResourceModule(NDModule): +# TODO: To be replaced with: +# from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +# from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +# from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from nd import NDModule +from nd_config_collection import NDConfigCollection +from models.base import NDBaseModel +from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED - def __init__(self, module, path, identifier_keys, use_composite_keys=False, actions_overwrite_map=None): - super().__init__(module) - # Initial variables +class NDNetworkResourceModule(NDModule): + """ + Generic Network Resource Module for Nexus Dashboard. + """ + + def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_overwrite_map: Optional[Dict[str, Callable]] = None): + """ + Initialize the Network Resource Module. + """ + super().__init__(module) + + # Configuration self.path = path + self.model_class = model_class self.actions_overwrite_map = actions_overwrite_map or {} - self.identifier_keys = identifier_keys - self.use_composite_keys = use_composite_keys - - # Initial data - self.init_all_data = self._query_all() - - # Info ouput - self.existing = NDConfigCollection(identifier_keys, data=self.init_all_data) - self.previous = NDConfigCollection(identifier_keys) - self.proposed = NDConfigCollection(identifier_keys) - self.sent = NDConfigCollection(identifier_keys) - - # Debug output - self.nd_logs = [] - - # Helper variables - self.current_identifier = "" - self.existing_config = {} - self.proposed_config = {} - - # Actions Operations - def actions_overwrite(action): + + # Initialize collections + try: + init_all_data = self._query_all() + + self.existing = NDConfigCollection.from_api_response( + response_data=init_all_data, + model_class=model_class + ) + self.previous = NDConfigCollection(model_class=model_class) + self.proposed = NDConfigCollection(model_class=model_class) + self.sent = NDConfigCollection(model_class=model_class) + + except Exception as e: + self.fail_json( + msg=f"Initialization failed: {str(e)}", + error=str(e) + ) + + # Operation tracking + self.nd_logs: List[Dict[str, Any]] = [] + + # Current operation context + self.current_identifier = None + self.existing_config: Dict[str, Any] = {} + self.proposed_config: Dict[str, Any] = {} + + # Action Decorator + + @staticmethod + def actions_overwrite(action: str): + """ + Decorator to allow overriding default action operations. + """ def decorator(func): def wrapper(self, *args, **kwargs): overwrite_action = self.actions_overwrite_map.get(action) if callable(overwrite_action): - return overwrite_action(self) + return overwrite_action(self, *args, **kwargs) else: return func(self, *args, **kwargs) return wrapper return decorator - + + # Action Operations + @actions_overwrite("create") - def _create(self): - if not self.module.check_mode: + def _create(self) -> Optional[Dict[str, Any]]: + """ + Create a new configuration object. + """ + if self.module.check_mode: + return self.proposed_config + + try: return self.request(path=self.path, method="POST", data=self.proposed_config) - + except Exception as e: + raise Exception(f"Create failed for {self.current_identifier}: {e}") from e + @actions_overwrite("update") - def _update(self): - if not self.module.check_mode: - object_path = "{0}/{1}".format(self.path, self.current_identifier) + def _update(self) -> Optional[Dict[str, Any]]: + """ + Update an existing configuration object. + """ + if self.module.check_mode: + return self.proposed_config + + try: + object_path = f"{self.path}/{self.current_identifier}" return self.request(path=object_path, method="PUT", data=self.proposed_config) - + except Exception as e: + raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + @actions_overwrite("delete") - def _delete(self): - if not self.module.check_mode: - object_path = "{0}/{1}".format(self.path, self.current_identifier) + def _delete(self) -> None: + """Delete a configuration object.""" + if self.module.check_mode: + return + + try: + object_path = f"{self.path}/{self.current_identifier}" self.request(path=object_path, method="DELETE") + except Exception as e: + raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e @actions_overwrite("query_all") - def _query_all(self): - return self.query_obj(self.path) - - def format_log(self, identifier, status, after_data, sent_payload_data=None): - item_result = { + def _query_all(self) -> List[Dict[str, Any]]: + """ + Query all configuration objects from device. + """ + try: + result = self.query_obj(self.path) + return result or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e + + # Logging + + def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: + """ + Create and append a log entry. + """ + log_entry = { "identifier": identifier, "status": status, - "before": self.existing_config, + "before": deepcopy(self.existing_config), "after": deepcopy(after_data) if after_data is not None else self.existing_config, - "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {}, + "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {} } - + + # Add HTTP details if not in check mode if not self.module.check_mode and self.url is not None: - item_result.update( - { - "method": self.method, - "response": self.response, - "status": self.status, - "url": self.url, - } + log_entry.update({ + "method": self.method, + "response": self.response, + "status": self.status, + "url": self.url + }) + + self.nd_logs.append(log_entry) + + # State Management + + def manage_state( + self, state: Literal["merged", "replaced", "overridden", "deleted"], new_configs: List[Dict[str, Any]], unwanted_keys: Optional[List] = None, override_exceptions: Optional[List] = None) -> None: + """ + Manage state according to desired configuration. + """ + unwanted_keys = unwanted_keys or [] + override_exceptions = override_exceptions or [] + + # Parse and validate configs + try: + parsed_items = [] + for config in new_configs: + try: + # Parse config into model + item = self.model_class.model_validate(config) + parsed_items.append(item) + except ValidationError as e: + self.fail_json( + msg=f"Invalid configuration: {e}", + config=config, + validation_errors=e.errors() + ) + return + + # Create proposed collection + self.proposed = NDConfigCollection( + model_class=self.model_class, + items=parsed_items ) + + # Save previous state + self.previous = self.existing.copy() - self.nd_logs.append(item_result) - - # Logs and Outputs formating Operations - # TODO: Move it to different file - def add_logs_and_ouputs(self): - if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - if self.params.get("output_level") in ("debug", "info"): + except Exception as e: + self.fail_json( + msg=f"Failed to prepare configurations: {e}", + error=str(e) + ) + return + + # Execute state operations + if state in ["merged", "replaced", "overridden"]: + self._manage_create_update_state(state, unwanted_keys) + + if state == "overridden": + self._manage_override_deletions(override_exceptions) + + elif state == "deleted": + self._manage_delete_state() + + else: + self.fail_json(msg=f"Invalid state: {state}") + + def _manage_create_update_state(self,state: Literal["merged", "replaced", "overridden"], unwanted_keys: List) -> None: + """ + Handle merged/replaced/overridden states. + """ + for proposed_item in self.proposed: + try: + # Extract identifier + identifier = proposed_item.get_identifier_value() + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + self.existing_config = ( + existing_item.model_dump(by_alias=True, exclude_none=True) + if existing_item + else {} + ) + + # Determine diff status + diff_status = self.existing.get_diff_config( + proposed_item, + unwanted_keys=unwanted_keys + ) + + # No changes needed + if diff_status == "no_diff": + self.format_log( + identifier=identifier, + status="no_change", + after_data=self.existing_config + ) + continue + + # Prepare final config based on state + if state == "merged" and existing_item: + # Merge with existing + merged_item = self.existing.merge(proposed_item) + final_item = merged_item + else: + # Replace or create + if existing_item: + self.existing.replace(proposed_item) + else: + self.existing.add(proposed_item) + final_item = proposed_item + + # Convert to API payload + self.proposed_config = final_item.to_payload() + + # Execute API operation + if diff_status == "changed": + response = self._update() + operation_status = "updated" + else: + response = self._create() + operation_status = "created" + + # Track sent payload + if not self.module.check_mode: + self.sent.add(final_item) + sent_payload = self.proposed_config + else: + sent_payload = None + + # Log operation + self.format_log( + identifier=identifier, + status=operation_status, + after_data=( + response if not self.module.check_mode + else final_item.model_dump(by_alias=True, exclude_none=True) + ), + sent_payload_data=sent_payload + ) + + except Exception as e: + error_msg = f"Failed to process {identifier}: {e}" + + self.format_log( + identifier=identifier, + status="no_change", + after_data=self.existing_config + ) + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + def _manage_override_deletions(self, override_exceptions: List) -> None: + """ + Delete items not in proposed config (for overridden state). + """ + diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + + for identifier in diff_identifiers: + if identifier in override_exceptions: + continue + + try: + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + if not existing_item: + continue + + self.existing_config = existing_item.model_dump( + by_alias=True, + exclude_none=True + ) + + # Execute delete + self._delete() + + # Remove from collection + self.existing.delete(identifier) + + # Log deletion + self.format_log( + identifier=identifier, + status="deleted", + after_data={} + ) + + except Exception as e: + error_msg = f"Failed to delete {identifier}: {e}" + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + def _manage_delete_state(self) -> None: + """Handle deleted state.""" + for proposed_item in self.proposed: + try: + identifier = proposed_item.get_identifier_value() + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + if not existing_item: + # Already deleted or doesn't exist + self.format_log( + identifier=identifier, + status="no_change", + after_data={} + ) + continue + + self.existing_config = existing_item.model_dump( + by_alias=True, + exclude_none=True + ) + + # Execute delete + self._delete() + + # Remove from collection + self.existing.delete(identifier) + + # Log deletion + self.format_log( + identifier=identifier, + status="deleted", + after_data={} + ) + + except Exception as e: + error_msg = f"Failed to delete {identifier}: {e}" + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + # Output Formatting + + def add_logs_and_outputs(self) -> None: + """Add logs and outputs to module result based on output_level.""" + output_level = self.params.get("output_level", "normal") + state = self.params.get("state") + + # Add previous state for certain states and output levels + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + if output_level in ("debug", "info"): self.result["previous"] = self.previous.to_list() + + # Check if there were changes if not self.has_modified and self.previous.get_diff_collection(self.existing): self.result["changed"] = True + + # Add stdout if present if self.stdout: self.result["stdout"] = self.stdout - - if self.params.get("output_level") == "debug": + + # Add debug information + if output_level == "debug": self.result["nd_logs"] = self.nd_logs + if self.url is not None: self.result["httpapi_logs"] = self.httpapi_logs - - if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - self.result["sent"] = self.sent.to_list() - self.result["proposed"] = self.proposed.to_list() - - self.result["current"] = self.existing.to_list() - - # Manage State Operations - def manage_state(self, state, new_configs, unwanted_keys=None, override_exceptions=None): - unwanted_keys = unwanted_keys or [] - override_exceptions = override_exceptions or [] - - self.proposed = NDConfigCollection(self.identifier_keys, data=new_configs) - self.proposed.sanitize() - self.previous = self.existing.copy() - - if state in ["merged", "replaced", "overidden"]: - for identifier, config in self.proposed.items(): - - diff_config_info = self.existing.get_diff_config(config, unwanted_keys) - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self.proposed_config = config - request_response = None - sent_payload = None - status = "no_change" - - if diff_config_info != "no_diff": - if state == "merged": - self.existing.merge(config) - self.proposed_config = self.existing[identifier] - else: - self.existing.replace(config) - - if diff_config_info == "changed": - request_response = self._update() - status = "updated" - else: - request_response = self._create() - status= "created" - - if not self.module.check_mode: - self.sent.add(self.proposed_config) - sent_payload = self.proposed_config - else: - request_response = self.proposed_config - - self.format_log(identifier, status, request_response, sent_payload) - - if state == "overidden": - diff_identifiers = self.previous.get_diff_identifiers(self.proposed) - for identifier in diff_identifiers: - if identifier not in override_exceptions: - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self._delete() - del self.existing[identifier] - self.format_log(identifier, "deleted", after_data={}) + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + self.result["sent"] = self.sent.to_payload_list() + self.result["proposed"] = self.proposed.to_list() - - elif state == "deleted": - for identifier, config in self.proposed.items(): - if identifier in self.existing.keys(): - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self.proposed_config = config - self._delete() - del self.existing[identifier] - self.format_log(identifier, "deleted", after_data={}) - - # Outputs Operations - def fail_json(self, msg, **kwargs): - self.add_logs_and_ouputs() - + # Always include current state + self.result["current"] = self.existing.to_list() + + # Module Exit Methods + + def fail_json(self, msg: str, **kwargs) -> None: + """ + Exit module with failure. + """ + self.add_logs_and_outputs() self.result.update(**kwargs) self.module.fail_json(msg=msg, **self.result) - - def exit_json(self, **kwargs): - self.add_logs_and_ouputs() - + + def exit_json(self, **kwargs) -> None: + """ + Exit module successfully. + """ + self.add_logs_and_outputs() + + # Add diff if module supports it if self.module._diff and self.result.get("changed") is True: - self.result["diff"] = dict( - before=self.previous.to_list(), - after=self.existing.to_list(), - ) - + try: + # Use diff-safe dicts (excludes sensitive fields) + before = [item.to_diff_dict() for item in self.previous] + after = [item.to_diff_dict() for item in self.existing] + + self.result["diff"] = dict( + before=before, + after=after + ) + except Exception: + pass # Don't fail on diff generation + self.result.update(**kwargs) self.module.exit_json(**self.result) From 09404e1161da4952511317da2f11442c59437e91 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 14:37:44 -0500 Subject: [PATCH 034/131] [ignore] Modify nd_local_user based on Pydantic implementation and changes added to NDNetworkResourceModule. --- plugins/modules/nd_local_user.py | 91 +++++++++++++++----------------- 1 file changed, 43 insertions(+), 48 deletions(-) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 4a5f1ad2..3dcaf1a4 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -175,23 +175,34 @@ """ from ansible.module_utils.basic import AnsibleModule -from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec, NDModule -from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resources import NDNetworkResourceModule -from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +# TODO: To be replaced with: +# from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec +# from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule +# from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +# from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +from module_utils.nd import nd_argument_spec +from module_utils.nd_network_resources import NDNetworkResourceModule +from module_utils.models.local_user import LocalUserModel +from module_utils.constants import USER_ROLES_MAPPING -# Actions overwrite functions -def query_all_local_users(nd): - return nd.query_obj(nd.path).get("localusers") +# NOTE: Maybe Add the overwrite action in the LocalUserModel +def query_all_local_users(nd_module): + """ + Custom query_all action to extract 'localusers' from response. + """ + response = nd_module.query_obj(nd_module.path) + return response.get("localusers", []) -# TODO: Adapt to Pydantic Model +# NOTE: Maybe Add More aliases like in the LocalUserModel / Revisit the argmument_spec def main(): argument_spec = nd_argument_spec() argument_spec.update( config=dict( type="list", elements="dict", + required=True, options=dict( email=dict(type="str"), login_id=dict(type="str", required=True), @@ -221,49 +232,33 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, ) - - path = "/api/v1/infra/aaa/localUsers" - identifier_keys = ["loginID"] - actions_overwrite_map = {"query_all": query_all_local_users} - - nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) - - state = nd.params.get("state") - config = nd.params.get("config") - override_exceptions = nd.params.get("override_exceptions") - new_config = [] - for object in config: - payload = { - "email": object.get("email"), - "firstName": object.get("first_name"), - "lastName": object.get("last_name"), - "loginID": object.get("login_id"), - "password": object.get("user_password"), - "remoteIDClaim": object.get("remote_id_claim"), - "xLaunch": object.get("remote_user_authorization"), - } - - if object.get("security_domains"): - payload["rbac"] = { - "domains": { - security_domain.get("name"): { - "roles": ( - [USER_ROLES_MAPPING.get(role) for role in security_domain["roles"]] if isinstance(security_domain.get("roles"), list) else [] - ) - } - for security_domain in object["security_domains"] - }, - } - if object.get("reuse_limitation") or object.get("time_interval_limitation"): - payload["passwordPolicy"] = { - "reuseLimitation": object.get("reuse_limitation"), - "timeIntervalLimitation": object.get("time_interval_limitation"), + + try: + # Create NDNetworkResourceModule with LocalUserModel + nd_module = NDNetworkResourceModule( + module=module, + path="/api/v1/infra/aaa/localUsers", + model_class=LocalUserModel, + actions_overwrite_map={ + "query_all": query_all_local_users } - new_config.append(payload) - - nd.manage_state(state=state, new_configs=new_config, unwanted_keys=[["passwordPolicy", "passwordChangeTime"], ["userID"]], override_exceptions=override_exceptions) + ) + + # Manage state + nd_module.manage_state( + state=module.params["state"], + new_configs=module.params["config"], + unwanted_keys=[ + ["passwordPolicy", "passwordChangeTime"], # Nested path + ["userID"] # Simple key + ], + override_exceptions=module.params.get("override_exceptions") + ) - nd.exit_json() + nd_module.exit_json() + + except Exception as e: + module.fail_json(msg=f"Module execution failed: {str(e)}") if __name__ == "__main__": From 08bc60435f55418068f056bcefc3c87a72966bbe Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 17 Feb 2026 13:46:10 -0500 Subject: [PATCH 035/131] [ignore] Add api_endpoints for configuring endpoints and orchestrators for orchestrating crud api operations with model instances and endpoints. --- plugins/module_utils/api_endpoints/base.py | 178 ++++++++++++++++++ plugins/module_utils/api_endpoints/enums.py | 46 +++++ .../module_utils/api_endpoints/local_user.py | 178 ++++++++++++++++++ plugins/module_utils/api_endpoints/mixins.py | 25 +++ plugins/module_utils/orchestrators/base.py | 79 ++++++++ .../module_utils/orchestrators/local_user.py | 42 +++++ 6 files changed, 548 insertions(+) create mode 100644 plugins/module_utils/api_endpoints/base.py create mode 100644 plugins/module_utils/api_endpoints/enums.py create mode 100644 plugins/module_utils/api_endpoints/local_user.py create mode 100644 plugins/module_utils/api_endpoints/mixins.py create mode 100644 plugins/module_utils/orchestrators/base.py create mode 100644 plugins/module_utils/orchestrators/local_user.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py new file mode 100644 index 00000000..1a9cd768 --- /dev/null +++ b/plugins/module_utils/api_endpoints/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from abc import ABC, abstractmethod +from pydantic import BaseModel, ConfigDict +from typing import Final, Union, Tuple, Any + +IdentifierKey = Union[str, int, Tuple[Any, ...], None] + +class NDBaseSmartEndpoint(BaseModel, ABC): + + # TODO: maybe to be modified in the future + model_config = ConfigDict(validate_assignment=True) + + base_path: str + + @abstractmethod + @property + def path(self) -> str: + pass + + @abstractmethod + @property + def verb(self) -> str: + pass + + # TODO: Maybe to be modifed to be more Pydantic + # TODO: Maybe change function's name + # NOTE: function to set mixins fields from identifiers + @abstractmethod + def set_identifiers(self, identifier: IdentifierKey = None): + pass + + +class NDBasePath: + """ + # Summary + + Centralized API Base Paths + + ## Description + + Provides centralized base path definitions for all ND API endpoints. + This allows API path changes to be managed in a single location. + + ## Usage + + ```python + # Get a complete base path + path = BasePath.control_fabrics("MyFabric", "config-deploy") + # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/MyFabric/config-deploy + + # Build custom paths + path = BasePath.v1("custom", "endpoint") + # Returns: /appcenter/cisco/ndfc/api/v1/custom/endpoint + ``` + + ## Design Notes + + - All base paths are defined as class constants for easy modification + - Helper methods compose paths from base constants + - Use these methods in Pydantic endpoint models to ensure consistency + - If NDFC changes base API paths, only this class needs updating + """ + + # Root API paths + NDFC_API: Final = "/appcenter/cisco/ndfc/api" + ND_INFRA_API: Final = "/api/v1/infra" + ONEMANAGE: Final = "/onemanage" + LOGIN: Final = "/login" + + @classmethod + def api(cls, *segments: str) -> str: + """ + # Summary + + Build path from NDFC API root. + + ## Parameters + + - segments: Path segments to append + + ## Returns + + - Complete path string + + ## Example + + ```python + path = BasePath.api("custom", "endpoint") + # Returns: /appcenter/cisco/ndfc/api/custom/endpoint + ``` + """ + if not segments: + return cls.NDFC_API + return f"{cls.NDFC_API}/{'/'.join(segments)}" + + @classmethod + def v1(cls, *segments: str) -> str: + """ + # Summary + + Build v1 API path. + + ## Parameters + + - segments: Path segments to append after v1 + + ## Returns + + - Complete v1 API path + + ## Example + + ```python + path = BasePath.v1("lan-fabric", "rest") + # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest + ``` + """ + return cls.api("v1", *segments) + + @classmethod + def nd_infra(cls, *segments: str) -> str: + """ + # Summary + + Build ND infra API path. + + ## Parameters + + - segments: Path segments to append after /api/v1/infra + + ## Returns + + - Complete ND infra API path + + ## Example + + ```python + path = BasePath.nd_infra("aaa", "localUsers") + # Returns: /api/v1/infra/aaa/localUsers + ``` + """ + if not segments: + return cls.ND_INFRA_API + return f"{cls.ND_INFRA_API}/{'/'.join(segments)}" + + @classmethod + def nd_infra_aaa(cls, *segments: str) -> str: + """ + # Summary + + Build ND infra AAA API path. + + ## Parameters + + - segments: Path segments to append after aaa (e.g., "localUsers") + + ## Returns + + - Complete ND infra AAA path + + ## Example + + ```python + path = BasePath.nd_infra_aaa("localUsers") + # Returns: /api/v1/infra/aaa/localUsers + ``` + """ + return cls.nd_infra("aaa", *segments) diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py new file mode 100644 index 00000000..afb4dd5c --- /dev/null +++ b/plugins/module_utils/api_endpoints/enums.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Enums used in api_endpoints. +""" +from enum import Enum + + +class VerbEnum(str, Enum): + """ + # Summary + + Enum for HTTP verb values used in endpoints. + + ## Members + + - GET: Represents the HTTP GET method. + - POST: Represents the HTTP POST method. + - PUT: Represents the HTTP PUT method. + - DELETE: Represents the HTTP DELETE method. + """ + + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + + +class BooleanStringEnum(str, Enum): + """ + # Summary + + Enum for boolean string values used in query parameters. + + ## Members + + - TRUE: Represents the string "true". + - FALSE: Represents the string "false". + """ + + TRUE = "true" + FALSE = "false" \ No newline at end of file diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py new file mode 100644 index 00000000..de493e40 --- /dev/null +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +ND Infra AAA LocalUsers endpoint models. + +This module contains endpoint definitions for LocalUsers-related operations +in the ND Infra AAA API. +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type # pylint: disable=invalid-name + +from typing import Literal, Union, Tuple, Any, Final +from mixins import LoginIdMixin +from enums import VerbEnum +from base import NDBaseSmartEndpoint, NDBasePath +from pydantic import Field + +IdentifierKey = Union[str, int, Tuple[Any, ...], None] + +class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): + """ + Base class for ND Infra AAA Local Users endpoints. + + Provides common functionality for all HTTP methods on the + /api/v1/infra/aaa/localUsers endpoint. + """ + + base_path: Final = NDBasePath.nd_infra_aaa("localUsers") + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path. + + ## Returns + + - Complete endpoint path string, optionally including login_id + """ + if self.login_id is not None: + return NDBasePath.nd_infra_aaa("localUsers", self.login_id) + return self.base_path + + def set_identifiers(self, identifier: IdentifierKey = None): + self.login_id = identifier + + +class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users GET Endpoint + + ## Description + + Endpoint to retrieve local users from the ND Infra AAA service. + Optionally retrieve a specific local user by login_id. + + ## Path + + - /api/v1/infra/aaa/localUsers + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - GET + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersGet"] = Field( + default="EpApiV1InfraAaaLocalUsersGet", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.GET + + +class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users POST Endpoint + + ## Description + + Endpoint to create a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers + + ## Verb + + - POST + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( + default="EpApiV1InfraAaaLocalUsersPost", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.POST + + +class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users PUT Endpoint + + ## Description + + Endpoint to update a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - PUT + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( + default="EpApiV1InfraAaaLocalUsersPut", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.PUT + + +class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users DELETE Endpoint + + ## Description + + Endpoint to delete a local user from the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - DELETE + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersDelete"] = Field( + default="EpApiV1InfraAaaLocalUsersDelete", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.DELETE diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py new file mode 100644 index 00000000..8ff3218f --- /dev/null +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Reusable mixin classes for endpoint models. + +This module provides mixin classes that can be composed to add common +fields to endpoint models without duplication. +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type # pylint: disable=invalid-name + +from typing import TYPE_CHECKING, Optional +from pydantic import BaseModel, Field + + +class LoginIdMixin(BaseModel): + """Mixin for endpoints that require login_id parameter.""" + + login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py new file mode 100644 index 00000000..120ea475 --- /dev/null +++ b/plugins/module_utils/orchestrators/base.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ..models.base import NDBaseModel +from ..nd import NDModule +from ..api_endpoints.base import NDBaseSmartEndpoint +from typing import Dict, List, Any, Union, ClassVar, Type +from pydantic import BaseModel + + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] + + +# TODO: Revisit naming them "Orchestrator" +class NDBaseOrchestrator(BaseModel): + + model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] + + # NOTE: if not defined by subclasses, return an error as they are required + post_endpoint: NDBaseSmartEndpoint + put_endpoint: NDBaseSmartEndpoint + delete_endpoint: NDBaseSmartEndpoint + get_endpoint: NDBaseSmartEndpoint + + # NOTE: Module Field is always required + # TODO: Replace it with future sender + module: NDModule + + # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") + # TODO: Explore how to make them even more general + def create(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + return self.module.request(path=self.post_endpoint.base_path, method=self.post_endpoint.verb, data=model_instance.model_dump()) + except Exception as e: + raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e + + def update(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.model_dump()) + except Exception as e: + raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + + def delete(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.delete_endpoint.path, method=self.delete_endpoint.verb) + except Exception as e: + raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e + + def query_one(self, model_instance: NDBaseModel) -> ResponseType: + try: + self.get_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.get_endpoint.path, method=self.get_endpoint.verb) + except Exception as e: + raise Exception(f"Query failed for {self.current_identifier}: {e}") from e + + def query_all(self) -> ResponseType: + try: + result = self.module.query_obj(self.get_endpoint.path) + return result or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py new file mode 100644 index 00000000..b156512c --- /dev/null +++ b/plugins/module_utils/orchestrators/local_user.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from .base import NDBaseOrchestrator +from ..models.local_user import LocalUserModel +from typing import Dict, List, Any, Union, Type +from ..api_endpoints.local_user import ( + EpApiV1InfraAaaLocalUsersPost, + EpApiV1InfraAaaLocalUsersPut, + EpApiV1InfraAaaLocalUsersDelete, + EpApiV1InfraAaaLocalUsersGet, +) + + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] + +class LocalUserOrchestrator(NDBaseOrchestrator): + + model_class = Type[LocalUserModel] + + post_endpoint = EpApiV1InfraAaaLocalUsersPost() + put_endpoint = EpApiV1InfraAaaLocalUsersPut() + delete_endpoint = EpApiV1InfraAaaLocalUsersDelete() + get_endpoint = EpApiV1InfraAaaLocalUsersGet() + + def query_all(self): + """ + Custom query_all action to extract 'localusers' from response. + """ + try: + result = self.module.query_obj(self.get_endpoint.base_path) + return result.get("localusers", []) or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e + \ No newline at end of file From 91d177022cd93dd143867c905d08edf070108e50 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 18 Feb 2026 01:23:37 -0500 Subject: [PATCH 036/131] [ignore] Modifiy models/local_user to take full advantage of Pydantic built-in functionalities. Slightly modify models/base.py to enforce identifiers definitions in NDBaseModel subclasses. Added multiple notes to assert next steps. --- plugins/module_utils/models/base.py | 48 ++++- plugins/module_utils/models/local_user.py | 216 ++++++++++++++-------- 2 files changed, 183 insertions(+), 81 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index a7eabf17..5a64c7a9 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -10,10 +10,11 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal +from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional from typing_extensions import Self +# TODO: Revisit identifiers strategy (low priority) class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -22,8 +23,9 @@ class NDBaseModel(BaseModel, ABC): - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) + - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ - + # TODO: revisit initial Model Configurations (low priority) model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, @@ -31,14 +33,38 @@ class NDBaseModel(BaseModel, ABC): populate_by_name=True, extra='ignore' ) - - # Subclasses MUST define these - identifiers: ClassVar[List[str]] = [] - identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + + # TODO: Revisit identifiers strategy (low priority) + identifiers: ClassVar[Optional[List[str]]] = None + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = None # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] + + # TODO: Revisit it with identifiers strategy (low priority) + def __init_subclass__(cls, **kwargs): + """ + Enforce configuration for identifiers definition. + """ + super().__init_subclass__(**kwargs) + + # Skip enforcement for nested models + # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) + if cls.__name__ in ['NDNestedModel']: + return + + if not hasattr(cls, "identifiers") or cls.identifiers is None: + raise ValueError( + f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." + f"Example: `identifiers: ClassVar[Optional[List[str]]] = ['login_id']`" + ) + if not hasattr(cls, "identifier_strategy") or cls.identifier_strategy is None: + raise ValueError( + f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." + f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'none']]] = 'single'`" + ) + # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) @abstractmethod def to_payload(self) -> Dict[str, Any]: """ @@ -54,6 +80,8 @@ def from_response(cls, response: Dict[str, Any]) -> Self: """ pass + # TODO: Revisit this function when revisiting identifier strategy (low priority) + # TODO: Add condition when there is no identifiers (high priority) def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: """ Extract identifier value(s) from this instance: @@ -82,7 +110,7 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: missing.append(field) values.append(value) - # NOTE: might not be needed in the future with field_validator + # NOTE: might be redefined with Pydantic (low priority) if missing: raise ValueError( f"Composite identifier fields {missing} are None. " @@ -104,6 +132,7 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") + def to_diff_dict(self) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). @@ -114,12 +143,13 @@ def to_diff_dict(self) -> Dict[str, Any]: exclude=set(self.exclude_from_diff) ) -# NOTE: Maybe make it a seperate BaseModel +# TODO: Make it a seperated BaseModel (low priority) class NDNestedModel(NDBaseModel): """ Base for nested models without identifiers. """ + # TODO: Configuration Fields to be clearly defined here (low priority) identifiers: ClassVar[List[str]] = [] def to_payload(self) -> Dict[str, Any]: @@ -133,4 +163,4 @@ def from_response(cls, response: Dict[str, Any]) -> Self: """ Create model instance from API response. """ - return cls.model_validate(response) + return cls.model_validate(response, by_alias=True) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index b7069126..4be05991 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,15 +8,15 @@ __metaclass__ = type -from pydantic import Field, SecretStr +from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel -from models.base import NDBaseModel, NDNestedModel +from .base import NDBaseModel, NDNestedModel -# TODO: Move it to constants.py and import it +# TODO: Move it to constants.py and make a reverse class Map for this USER_ROLES_MAPPING = MappingProxyType({ "fabric_admin": "fabric-admin", "observer": "observer", @@ -31,11 +31,13 @@ class LocalUserSecurityDomainModel(NDNestedModel): """Security domain configuration for local user (nested model).""" # Fields - name: str - roles: Optional[List[str]] = None - - def to_payload(self) -> Dict[str, Any]: + name: str = Field(..., alias="name", exclude=True) + roles: Optional[List[str]] = Field(default=None, alias="roles", exclude=True) + + # -- Serialization (Model instance -> API payload) -- + @model_serializer() + def serialize_model(self) -> Dict: return { self.name: { "roles": [ @@ -44,22 +46,12 @@ def to_payload(self) -> Dict[str, Any]: ] } } - - @classmethod - def from_response(cls, name: str, domain_config: Dict[str, Any]) -> Self: - # NOTE: Maybe create a function from it to be moved to utils.py and to be imported - reverse_mapping = {value: key for key, value in USER_ROLES_MAPPING.items()} - - return cls( - name=name, - roles=[ - reverse_mapping.get(role, role) - for role in domain_config.get("roles", []) - ] - ) + # -- Deserialization (API response / Ansible payload -> Model instance) -- + # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed +# TODO: Add field validation (e.g. me, le, choices, etc...) (medium priority) class LocalUserModel(NDBaseModel): """ Local user configuration. @@ -68,73 +60,153 @@ class LocalUserModel(NDBaseModel): """ # Identifier configuration - identifiers: ClassVar[List[str]] = ["login_id"] - identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + # TODO: Revisit this identifiers strategy (low priority) + identifiers: ClassVar[Optional[List[str]]] = ["login_id"] + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "single" + + # Keys management configurations + # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] + unwanted_keys: ClassVar[List[List[str]]]= [ + ["passwordPolicy", "passwordChangeTime"], # Nested path + ["userID"] # Simple key + ] # Fields + # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec login_id: str = Field(..., alias="loginID") - email: Optional[str] = None + email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") last_name: Optional[str] = Field(default=None, alias="lastName") user_password: Optional[SecretStr] = Field(default=None, alias="password") - reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation") - time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation") - security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="domains") + reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation", exclude=True) + time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation", exclude=True) + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="rbac") remote_id_claim: Optional[str] = Field(default=None, alias="remoteIDClaim") remote_user_authorization: Optional[bool] = Field(default=None, alias="xLaunch") - + + # -- Serialization (Model instance -> API payload) -- + + @computed_field(alias="passwordPolicy") + @property + def password_policy(self) -> Optional[Dict[str, int]]: + """Computed nested structure for API payload.""" + if self.reuse_limitation is None and self.time_interval_limitation is None: + return None + + policy = {} + if self.reuse_limitation is not None: + policy["reuseLimitation"] = self.reuse_limitation + if self.time_interval_limitation is not None: + policy["timeIntervalLimitation"] = self.time_interval_limitation + return policy + + @field_serializer("user_password") + def serialize_password(self, value: Optional[SecretStr]) -> Optional[str]: + return value.get_secret_value() if value else None + + + @field_serializer("security_domains") + def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) -> Optional[Dict]: + # NOTE: exclude `None` values and empty list (-> should we exclude empty list?) + if not value: + return None + + domains_dict = {} + for domain in value: + domains_dict.update(domain.to_payload()) + + return { + "domains": domains_dict + } + + def to_payload(self) -> Dict[str, Any]: - payload = self.model_dump( - by_alias=True, - exclude={ - 'domains', - 'security_domains', - 'reuseLimitation', - 'reuse_limitation', - 'timeIntervalLimitation', - 'time_interval_limitation' - }, - exclude_none=True - ) + return self.model_dump(by_alias=True, exclude_none=True) - if self.user_password: - payload["password"] = self.user_password.get_secret_value() + # -- Deserialization (API response / Ansible payload -> Model instance) -- - if self.security_domains: - payload["rbac"] = {"domains": {}} - for domain in self.security_domains: - payload["rbac"]["domains"].update(domain.to_payload()) + @model_validator(mode="before") + @classmethod + def deserialize_password_policy(cls, data: Any) -> Any: + if not isinstance(data, dict): + return data - if self.reuse_limitation is not None or self.time_interval_limitation is not None: - payload["passwordPolicy"] = {} - if self.reuse_limitation is not None: - payload["passwordPolicy"]["reuseLimitation"] = self.reuse_limitation - if self.time_interval_limitation is not None: - payload["passwordPolicy"]["timeIntervalLimitation"] = self.time_interval_limitation + password_policy = data.get("passwordPolicy") - return payload - + if password_policy and isinstance(password_policy, dict): + if "reuseLimitation" in password_policy: + data["reuse_limitation"] = password_policy["reuseLimitation"] + if "timeIntervalLimitation" in password_policy: + data["time_interval_limitation"] = password_policy["timeIntervalLimitation"] + + # Remove the nested structure from data to avoid conflicts + # (since it's a computed field, not a real field) + data.pop("passwordPolicy", None) + + return data + + @field_validator("security_domains", mode="before") @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - password_policy = response.get("passwordPolicy", {}) - rbac = response.get("rbac", {}) - domains = rbac.get("domains", {}) + def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: + if value is None: + return None + + # If already in list format (Ansible module representation), return as-is + if isinstance(value, list): + return value + + # If in the nested dict format (API representation) + if isinstance(value, dict) and "domains" in value: + domains_dict = value["domains"] + domains_list = [] + + for domain_name, domain_data in domains_dict.items(): + domains_list.append({ + "name": domain_name, + "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])] + }) + + return domains_list - security_domains = [ - LocalUserSecurityDomainModel.from_response(name, config) - for name, config in domains.items() - ] if domains else None + return value + + # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) + @classmethod + def from_response(cls, response: Dict[str, Any]) -> Self: + return cls.model_validate(response, by_alias=True) - return cls( - login_id=response.get("loginID"), - email=response.get("email"), - first_name=response.get("firstName"), - last_name=response.get("lastName"), - user_password=response.get("password"), - reuse_limitation=password_policy.get("reuseLimitation"), - time_interval_limitation=password_policy.get("timeIntervalLimitation"), - security_domains=security_domains, - remote_id_claim=response.get("remoteIDClaim"), - remote_user_authorization=response.get("xLaunch") + + # -- Extra -- + + # TODO: to generate from Fields (low priority) + def get_argument_spec(self): + return dict( + config=dict( + type="list", + elements="dict", + required=True, + options=dict( + email=dict(type="str"), + login_id=dict(type="str", required=True), + first_name=dict(type="str"), + last_name=dict(type="str"), + user_password=dict(type="str", no_log=True), + reuse_limitation=dict(type="int"), + time_interval_limitation=dict(type="int"), + security_domains=dict( + type="list", + elements="dict", + options=dict( + name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), + roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + ), + aliases=["domains"], + ), + remote_id_claim=dict(type="str"), + remote_user_authorization=dict(type="bool"), + ), + ), + override_exceptions=dict(type="list", elements="str"), + state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), ) From 9df5de1dbdca5bfebe32d516298b7697de041925 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 24 Feb 2026 12:57:37 -0500 Subject: [PATCH 037/131] [ignore] Adapt the Network Resource Module architecture for ND to smart endpoints and Pydantic models modification (works for merge and replace states). Add comments for next steps. --- plugins/module_utils/api_endpoints/base.py | 5 +- .../module_utils/api_endpoints/local_user.py | 1 + plugins/module_utils/models/base.py | 25 ++- plugins/module_utils/models/local_user.py | 12 +- plugins/module_utils/nd_config_collection.py | 76 ++------ plugins/module_utils/nd_network_resources.py | 163 ++++++------------ plugins/module_utils/orchestrators/base.py | 27 +-- .../module_utils/orchestrators/local_user.py | 12 +- plugins/module_utils/utils.py | 26 ++- plugins/modules/nd_local_user.py | 63 +------ 10 files changed, 159 insertions(+), 251 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 1a9cd768..747c3283 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -15,11 +15,14 @@ IdentifierKey = Union[str, int, Tuple[Any, ...], None] +# TODO: Rename it to APIEndpoint +# NOTE: This is a very minimalist endpoint package -> needs to be enhanced class NDBaseSmartEndpoint(BaseModel, ABC): # TODO: maybe to be modified in the future model_config = ConfigDict(validate_assignment=True) + # TODO: to remove base_path: str @abstractmethod @@ -34,7 +37,7 @@ def verb(self) -> str: # TODO: Maybe to be modifed to be more Pydantic # TODO: Maybe change function's name - # NOTE: function to set mixins fields from identifiers + # NOTE: function to set endpoints attribute fields from identifiers @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): pass diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index de493e40..61f52ad8 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -31,6 +31,7 @@ class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): /api/v1/infra/aaa/localUsers endpoint. """ + # TODO: Remove it base_path: Final = NDBasePath.nd_infra_aaa("localUsers") @property diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 5a64c7a9..db7fd9ae 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -40,6 +40,7 @@ class NDBaseModel(BaseModel, ABC): # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] + unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) def __init_subclass__(cls, **kwargs): @@ -65,8 +66,9 @@ def __init_subclass__(cls, **kwargs): ) # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) + # NOTE: Should we use keyword arguments? @abstractmethod - def to_payload(self) -> Dict[str, Any]: + def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ @@ -74,7 +76,7 @@ def to_payload(self) -> Dict[str, Any]: @classmethod @abstractmethod - def from_response(cls, response: Dict[str, Any]) -> Self: + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: """ Create model instance from API response. """ @@ -142,6 +144,25 @@ def to_diff_dict(self) -> Dict[str, Any]: exclude_none=True, exclude=set(self.exclude_from_diff) ) + + # NOTE: initialize and return a deep copy of the instance? + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... + def merge(self, other_model: "NDBaseModel") -> Self: + if not isinstance(other_model, type(self)): + # TODO: Change error message + return TypeError("models are not of the same type.") + + for field, value in other_model: + if value is None: + continue + + current_value = getattr(self, field) + if isinstance(current_value, NDBaseModel) and isinstance(value, NDBaseModel): + setattr(self, field, current_value.merge(value)) + + else: + setattr(self, field, value) + return self # TODO: Make it a seperated BaseModel (low priority) class NDNestedModel(NDBaseModel): diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 4be05991..ea511097 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -67,14 +67,14 @@ class LocalUserModel(NDBaseModel): # Keys management configurations # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] - unwanted_keys: ClassVar[List[List[str]]]= [ + unwanted_keys: ClassVar[List]= [ ["passwordPolicy", "passwordChangeTime"], # Nested path ["userID"] # Simple key ] # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec - login_id: str = Field(..., alias="loginID") + login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") last_name: Optional[str] = Field(default=None, alias="lastName") @@ -121,8 +121,8 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) } - def to_payload(self) -> Dict[str, Any]: - return self.model_dump(by_alias=True, exclude_none=True) + def to_payload(self, **kwargs) -> Dict[str, Any]: + return self.model_dump(by_alias=True, exclude_none=True, **kwargs) # -- Deserialization (API response / Ansible payload -> Model instance) -- @@ -173,8 +173,8 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - return cls.model_validate(response, by_alias=True) + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(response, by_alias=True, **kwargs) # -- Extra -- diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 2f256d30..a25287aa 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -12,24 +12,26 @@ from copy import deepcopy # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from models.base import NDBaseModel +from .models.base import NDBaseModel +from .utils import issubset # Type aliases # NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) +# TODO: Defined the same acros multiple files -> maybe move to constants.py IdentifierKey = Union[str, int, Tuple[Any, ...]] - +# TODO:Might make it a Pydantic RootModel (low priority but medium impact on NDNetworkResourceModule) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. """ - def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType]] = None): + def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = None): """ Initialize collection. """ - self._model_class = model_class + self._model_class: ModelType = model_class # Dual storage self._items: List[ModelType] = [] @@ -39,6 +41,7 @@ def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType] for item in items: self.add(item) + # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ Extract identifier key from item. @@ -48,6 +51,7 @@ def _extract_key(self, item: ModelType) -> IdentifierKey: except Exception as e: raise ValueError(f"Failed to extract identifier: {e}") from e + # TODO: optimize it -> only needed for delete method (low priority) def _rebuild_index(self) -> None: """Rebuild index from scratch (O(n) operation).""" self._index.clear() @@ -105,8 +109,8 @@ def replace(self, item: ModelType) -> bool: self._items[index] = item return True - - def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[ModelType, ModelType], ModelType]] = None) -> ModelType: + + def merge(self, item: ModelType) -> ModelType: """ Merge item with existing, or add if not present. """ @@ -116,35 +120,11 @@ def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[Model if existing is None: self.add(item) return item - - # Custom or default merge - if custom_merge_function: - merged = custom_merge_function(existing, item) else: - # Default merge - existing_data = existing.model_dump() - new_data = item.model_dump(exclude_unset=True) - merged_data = self._deep_merge(existing_data, new_data) - merged = self._model_class.model_validate(merged_data) - + merged = existing.merge(item) self.replace(merged) return merged - - def _deep_merge(self, base: Dict, update: Dict) -> Dict: - """Recursively merge dictionaries.""" - result = base.copy() - - for key, value in update.items(): - if value is None: - continue - - if key in result and isinstance(result[key], dict) and isinstance(value, dict): - result[key] = self._deep_merge(result[key], value) - else: - result[key] = value - - return result - + def delete(self, key: IdentifierKey) -> bool: """ Delete item by identifier (O(n) operation due to index rebuild) @@ -161,6 +141,7 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations + # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. @@ -182,7 +163,7 @@ def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Unio existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) new_data = self._remove_unwanted_keys(new_data, unwanted_keys) - is_subset = self._issubset(new_data, existing_data) + is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" @@ -214,28 +195,7 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I other_keys = set(other.keys()) return list(current_keys - other_keys) - def _issubset(self, subset: Any, superset: Any) -> bool: - """Check if subset is contained in superset.""" - if type(subset) is not type(superset): - return False - - if not isinstance(subset, dict): - if isinstance(subset, list): - return all(item in superset for item in subset) - return subset == superset - - for key, value in subset.items(): - if value is None: - continue - - if key not in superset: - return False - - if not self._issubset(value, superset[key]): - return False - - return True - + # TODO: Maybe not necessary def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) @@ -282,8 +242,8 @@ def copy(self) -> "NDConfigCollection[ModelType]": items=deepcopy(self._items) ) - # Serialization - + # Collection Serialization + def to_list(self, **kwargs) -> List[Dict]: """ Export as list of dicts (with aliases). @@ -301,7 +261,7 @@ def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigC """ Create collection from list of dicts. """ - items = [model_class.model_validate(item_data) for item_data in data] + items = [model_class.model_validate(item_data, by_name=True) for item_data in data] return cls(model_class=model_class, items=items) @classmethod diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index ab7df9e2..d52fb9de 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -9,8 +9,9 @@ __metaclass__ = type from copy import deepcopy -from typing import Optional, List, Dict, Any, Callable, Literal +from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError +from ansible.module_utils.basic import AnsibleModule # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule @@ -20,36 +21,48 @@ from nd import NDModule from nd_config_collection import NDConfigCollection from models.base import NDBaseModel +from .orchestrators.base import NDBaseOrchestrator from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED - +# TODO: replace path and verbs with smart Endpoint (Top priority) +# TODO: Rename it (low priority) +# TODO: Revisit Deserialization in every method (high priority) class NDNetworkResourceModule(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_overwrite_map: Optional[Dict[str, Callable]] = None): + def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ + # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: + # nd_module = NDModule() super().__init__(module) # Configuration - self.path = path + # TODO: make sure `model_class` is the same as the one in `model_orchestrator`. if not, error out (high priority) self.model_class = model_class - self.actions_overwrite_map = actions_overwrite_map or {} + self.model_orchestrator = model_orchestrator(module=module) + # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) + self.state = self.params["state"] + self.ansible_config = self.params["config"] + # Initialize collections + # TODO: Revisit collections initialization especially `init_all_data` (medium priority) + # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) + self.nd_config_collection = NDConfigCollection[model_class] try: - init_all_data = self._query_all() + init_all_data = self.model_orchestrator.query_all() - self.existing = NDConfigCollection.from_api_response( + self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, model_class=model_class ) - self.previous = NDConfigCollection(model_class=model_class) - self.proposed = NDConfigCollection(model_class=model_class) - self.sent = NDConfigCollection(model_class=model_class) + self.previous = self.nd_config_collection(model_class=model_class) + self.proposed = self.nd_config_collection(model_class=model_class) + self.sent = self.nd_config_collection(model_class=model_class) except Exception as e: self.fail_json( @@ -59,83 +72,10 @@ def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_ov # Operation tracking self.nd_logs: List[Dict[str, Any]] = [] - - # Current operation context - self.current_identifier = None - self.existing_config: Dict[str, Any] = {} - self.proposed_config: Dict[str, Any] = {} - - # Action Decorator - - @staticmethod - def actions_overwrite(action: str): - """ - Decorator to allow overriding default action operations. - """ - def decorator(func): - def wrapper(self, *args, **kwargs): - overwrite_action = self.actions_overwrite_map.get(action) - if callable(overwrite_action): - return overwrite_action(self, *args, **kwargs) - else: - return func(self, *args, **kwargs) - return wrapper - return decorator - - # Action Operations - - @actions_overwrite("create") - def _create(self) -> Optional[Dict[str, Any]]: - """ - Create a new configuration object. - """ - if self.module.check_mode: - return self.proposed_config - - try: - return self.request(path=self.path, method="POST", data=self.proposed_config) - except Exception as e: - raise Exception(f"Create failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("update") - def _update(self) -> Optional[Dict[str, Any]]: - """ - Update an existing configuration object. - """ - if self.module.check_mode: - return self.proposed_config - - try: - object_path = f"{self.path}/{self.current_identifier}" - return self.request(path=object_path, method="PUT", data=self.proposed_config) - except Exception as e: - raise Exception(f"Update failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("delete") - def _delete(self) -> None: - """Delete a configuration object.""" - if self.module.check_mode: - return - - try: - object_path = f"{self.path}/{self.current_identifier}" - self.request(path=object_path, method="DELETE") - except Exception as e: - raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("query_all") - def _query_all(self) -> List[Dict[str, Any]]: - """ - Query all configuration objects from device. - """ - try: - result = self.query_obj(self.path) - return result or [] - except Exception as e: - raise Exception(f"Query all failed: {e}") from e - + # Logging - + # NOTE: format log placeholder + # TODO: use a proper logger (low priority) def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: """ Create and append a log entry. @@ -159,20 +99,20 @@ def format_log(self, identifier, status: Literal["created", "updated", "deleted" self.nd_logs.append(log_entry) - # State Management - - def manage_state( - self, state: Literal["merged", "replaced", "overridden", "deleted"], new_configs: List[Dict[str, Any]], unwanted_keys: Optional[List] = None, override_exceptions: Optional[List] = None) -> None: + # State Management (core function) + # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) + def manage_state(self) -> None: """ Manage state according to desired configuration. """ unwanted_keys = unwanted_keys or [] - override_exceptions = override_exceptions or [] # Parse and validate configs + # TODO: move it to init() (top priority) + # TODO: Modify it if NDConfigCollection becomes a Pydantic RootModel (low priority) try: parsed_items = [] - for config in new_configs: + for config in self.ansible_config: try: # Parse config into model item = self.model_class.model_validate(config) @@ -186,7 +126,7 @@ def manage_state( return # Create proposed collection - self.proposed = NDConfigCollection( + self.proposed = self.nd_config_collection( model_class=self.model_class, items=parsed_items ) @@ -202,27 +142,29 @@ def manage_state( return # Execute state operations - if state in ["merged", "replaced", "overridden"]: - self._manage_create_update_state(state, unwanted_keys) + if self.state in ["merged", "replaced", "overridden"]: + self._manage_create_update_state() - if state == "overridden": - self._manage_override_deletions(override_exceptions) + if self.state == "overridden": + self._manage_override_deletions() - elif state == "deleted": + elif self.state == "deleted": self._manage_delete_state() + # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) else: - self.fail_json(msg=f"Invalid state: {state}") + self.fail_json(msg=f"Invalid state: {self.state}") - def _manage_create_update_state(self,state: Literal["merged", "replaced", "overridden"], unwanted_keys: List) -> None: + + def _manage_create_update_state(self) -> None: """ Handle merged/replaced/overridden states. """ for proposed_item in self.proposed: try: # Extract identifier + # TODO: Remove self.current_identifier, get it directly into the action functions identifier = proposed_item.get_identifier_value() - self.current_identifier = identifier existing_item = self.existing.get(identifier) self.existing_config = ( @@ -232,10 +174,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr ) # Determine diff status - diff_status = self.existing.get_diff_config( - proposed_item, - unwanted_keys=unwanted_keys - ) + diff_status = self.existing.get_diff_config(proposed_item) # No changes needed if diff_status == "no_diff": @@ -247,7 +186,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr continue # Prepare final config based on state - if state == "merged" and existing_item: + if self.state == "merged" and existing_item: # Merge with existing merged_item = self.existing.merge(proposed_item) final_item = merged_item @@ -264,16 +203,16 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr # Execute API operation if diff_status == "changed": - response = self._update() + response = self.model_orchestrator.update(final_item) operation_status = "updated" else: - response = self._create() + response = self.model_orchestrator.create(final_item) operation_status = "created" # Track sent payload if not self.module.check_mode: self.sent.add(final_item) - sent_payload = self.proposed_config + sent_payload = final_item else: sent_payload = None @@ -297,7 +236,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr after_data=self.existing_config ) - if not self.module.params.get("ignore_errors", False): + if not self.params.get("ignore_errors", False): self.fail_json( msg=error_msg, identifier=str(identifier), @@ -305,6 +244,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr ) return + # TODO: Refactor with orchestrator (Top priority) def _manage_override_deletions(self, override_exceptions: List) -> None: """ Delete items not in proposed config (for overridden state). @@ -351,6 +291,7 @@ def _manage_override_deletions(self, override_exceptions: List) -> None: ) return + # TODO: Refactor with orchestrator (Top priority) def _manage_delete_state(self) -> None: """Handle deleted state.""" for proposed_item in self.proposed: @@ -398,7 +339,7 @@ def _manage_delete_state(self) -> None: return # Output Formatting - + # TODO: move to separate Class (results) -> align it with rest_send PR def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.params.get("output_level", "normal") diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 120ea475..e2d9fa75 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -24,39 +24,43 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - post_endpoint: NDBaseSmartEndpoint - put_endpoint: NDBaseSmartEndpoint - delete_endpoint: NDBaseSmartEndpoint - get_endpoint: NDBaseSmartEndpoint + # TODO: change name from http method to crud (e.g. post -> create) + post_endpoint: Type[NDBaseSmartEndpoint] + put_endpoint: Type[NDBaseSmartEndpoint] + delete_endpoint: Type[NDBaseSmartEndpoint] + get_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender module: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - # TODO: Explore how to make them even more general + # TODO: Explore new ways to make them even more general + # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: - return self.module.request(path=self.post_endpoint.base_path, method=self.post_endpoint.verb, data=model_instance.model_dump()) + api_endpoint = self.post_endpoint() + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e + # TODO: Make the same changes as create() with local api_endpoint variable def update(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.model_dump()) + return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Update failed for {self.current_identifier}: {e}") from e def delete(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) @@ -71,7 +75,8 @@ def query_one(self, model_instance: NDBaseModel) -> ResponseType: except Exception as e: raise Exception(f"Query failed for {self.current_identifier}: {e}") from e - def query_all(self) -> ResponseType: + # TODO: Revisit the straegy around the query_all (see local_user's case) + def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: result = self.module.query_obj(self.get_endpoint.path) return result or [] diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index b156512c..3810fa83 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -9,8 +9,10 @@ __metaclass__ = type from .base import NDBaseOrchestrator +from ..models.base import NDBaseModel from ..models.local_user import LocalUserModel from typing import Dict, List, Any, Union, Type +from ..api_endpoints.base import NDBaseSmartEndpoint from ..api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, @@ -23,12 +25,12 @@ class LocalUserOrchestrator(NDBaseOrchestrator): - model_class = Type[LocalUserModel] + model_class: Type[NDBaseModel] = LocalUserModel - post_endpoint = EpApiV1InfraAaaLocalUsersPost() - put_endpoint = EpApiV1InfraAaaLocalUsersPut() - delete_endpoint = EpApiV1InfraAaaLocalUsersDelete() - get_endpoint = EpApiV1InfraAaaLocalUsersGet() + post_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost + put_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete + get_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 5bf0a0f0..72ccbcd7 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -9,6 +9,7 @@ __metaclass__ = type from copy import deepcopy +from typing import Any def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): @@ -29,4 +30,27 @@ def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remo for index, item in enumerate(v): if isinstance(item, dict): result[k][index] = sanitize_dict(item, keys, values) - return result \ No newline at end of file + return result + + +def issubset(subset: Any, superset: Any) -> bool: + """Check if subset is contained in superset.""" + if type(subset) is not type(superset): + return False + + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + + for key, value in subset.items(): + if value is None: + continue + + if key not in superset: + return False + + if not issubset(value, superset[key]): + return False + + return True diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 3dcaf1a4..901549fb 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -180,53 +180,15 @@ # from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel # from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING -from module_utils.nd import nd_argument_spec -from module_utils.nd_network_resources import NDNetworkResourceModule -from module_utils.models.local_user import LocalUserModel -from module_utils.constants import USER_ROLES_MAPPING +from ..module_utils.nd import nd_argument_spec +from ..module_utils.nd_network_resources import NDNetworkResourceModule +from ..module_utils.models.local_user import LocalUserModel +from ..module_utils.orchestrators.local_user import LocalUserOrchestrator -# NOTE: Maybe Add the overwrite action in the LocalUserModel -def query_all_local_users(nd_module): - """ - Custom query_all action to extract 'localusers' from response. - """ - response = nd_module.query_obj(nd_module.path) - return response.get("localusers", []) - - -# NOTE: Maybe Add More aliases like in the LocalUserModel / Revisit the argmument_spec def main(): argument_spec = nd_argument_spec() - argument_spec.update( - config=dict( - type="list", - elements="dict", - required=True, - options=dict( - email=dict(type="str"), - login_id=dict(type="str", required=True), - first_name=dict(type="str"), - last_name=dict(type="str"), - user_password=dict(type="str", no_log=True), - reuse_limitation=dict(type="int"), - time_interval_limitation=dict(type="int"), - security_domains=dict( - type="list", - elements="dict", - options=dict( - name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), - roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), - ), - aliases=["domains"], - ), - remote_id_claim=dict(type="str"), - remote_user_authorization=dict(type="bool"), - ), - ), - override_exceptions=dict(type="list", elements="str"), - state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), - ) + argument_spec.update(LocalUserModel.get_argument_spec()) module = AnsibleModule( argument_spec=argument_spec, @@ -237,23 +199,12 @@ def main(): # Create NDNetworkResourceModule with LocalUserModel nd_module = NDNetworkResourceModule( module=module, - path="/api/v1/infra/aaa/localUsers", model_class=LocalUserModel, - actions_overwrite_map={ - "query_all": query_all_local_users - } + model_orchestrator=LocalUserOrchestrator, ) # Manage state - nd_module.manage_state( - state=module.params["state"], - new_configs=module.params["config"], - unwanted_keys=[ - ["passwordPolicy", "passwordChangeTime"], # Nested path - ["userID"] # Simple key - ], - override_exceptions=module.params.get("override_exceptions") - ) + nd_module.manage_state() nd_module.exit_json() From 0dca2b504a44e92cd611a267c57cbe2963755a2d Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 25 Feb 2026 08:24:28 -0500 Subject: [PATCH 038/131] [ignore] Default to none and update condition for regarding in models/base.py. --- plugins/module_utils/models/base.py | 8 +++++--- plugins/module_utils/models/local_user.py | 1 + 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index db7fd9ae..4ddeacd0 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,6 +15,7 @@ # TODO: Revisit identifiers strategy (low priority) +# TODO: add kwargs to every sub method class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -26,6 +27,7 @@ class NDBaseModel(BaseModel, ABC): - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ # TODO: revisit initial Model Configurations (low priority) + # TODO: enable extra model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, @@ -36,7 +38,7 @@ class NDBaseModel(BaseModel, ABC): # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = None + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "none" # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] @@ -51,7 +53,7 @@ def __init_subclass__(cls, **kwargs): # Skip enforcement for nested models # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) - if cls.__name__ in ['NDNestedModel']: + if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return if not hasattr(cls, "identifiers") or cls.identifiers is None: @@ -146,7 +148,7 @@ def to_diff_dict(self) -> Dict[str, Any]: ) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel") -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index ea511097..77307d07 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -74,6 +74,7 @@ class LocalUserModel(NDBaseModel): # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec + # TODO: use extra for generating argument_spec (low priority) login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") From 3d3cda47975e3c133a5d5adcb2d0c5f7e76a197c Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:38:50 -0500 Subject: [PATCH 039/131] [ignore] Add choice for when no identifier is needed. Add quick comments and changes to models/local_user.py and api_endpoints/base.py --- plugins/module_utils/api_endpoints/base.py | 6 ++--- plugins/module_utils/models/base.py | 29 +++++++++++----------- plugins/module_utils/models/local_user.py | 6 ++--- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 747c3283..90ef5c87 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -35,9 +35,9 @@ def path(self) -> str: def verb(self) -> str: pass - # TODO: Maybe to be modifed to be more Pydantic - # TODO: Maybe change function's name - # NOTE: function to set endpoints attribute fields from identifiers + # TODO: Maybe to be modifed to be more Pydantic (low priority) + # TODO: Maybe change function's name (low priority) + # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): pass diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 4ddeacd0..159acb93 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,7 +15,6 @@ # TODO: Revisit identifiers strategy (low priority) -# TODO: add kwargs to every sub method class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -24,24 +23,24 @@ class NDBaseModel(BaseModel, ABC): - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) - - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) + - singleton: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ # TODO: revisit initial Model Configurations (low priority) - # TODO: enable extra model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, validate_assignment=True, populate_by_name=True, - extra='ignore' + extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "none" + # TODO: Rvisit no identifiers strategy naming (`singleton`) (low priority) + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" # Optional: fields to exclude from diffs (e.g., passwords) - exclude_from_diff: ClassVar[List[str]] = [] + exclude_from_diff: ClassVar[List] = [] unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) @@ -52,7 +51,7 @@ def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) # Skip enforcement for nested models - # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) + # TODO: Remove if `NDNestedModel` is a separated BaseModel (low conditional priority) if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -64,11 +63,10 @@ def __init_subclass__(cls, **kwargs): if not hasattr(cls, "identifier_strategy") or cls.identifier_strategy is None: raise ValueError( f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." - f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'none']]] = 'single'`" + f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'singleton']]] = 'single'`" ) # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) - # NOTE: Should we use keyword arguments? @abstractmethod def to_payload(self, **kwargs) -> Dict[str, Any]: """ @@ -85,16 +83,15 @@ def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: pass # TODO: Revisit this function when revisiting identifier strategy (low priority) - # TODO: Add condition when there is no identifiers (high priority) - def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: + def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ Extract identifier value(s) from this instance: - single identifier: Returns field value. - composite identifiers: Returns tuple of all field values. - hierarchical identifiers: Returns tuple of (field_name, value) for first non-None field. """ - if not self.identifiers: - raise ValueError(f"{self.__class__.__name__} has no identifiers defined") + if not self.identifiers and self.identifier_strategy != "singleton": + raise ValueError(f"{self.__class__.__name__} must have identifiers defined with its current identifier strategy: `{self.identifier_strategy}`") if self.identifier_strategy == "single": value = getattr(self, self.identifiers[0], None) @@ -133,6 +130,10 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: f"No non-None value in hierarchical fields {self.identifiers}" ) + # TODO: Revisit condition when there is no identifiers (low priority) + elif self.identifier_strategy == "singleton": + return self.identifier_strategy + else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") @@ -166,7 +167,7 @@ def merge(self, other_model: "NDBaseModel") -> Self: setattr(self, field, value) return self -# TODO: Make it a seperated BaseModel (low priority) +# TODO: Make it a seperated BaseModel? (low conditional priority) class NDNestedModel(NDBaseModel): """ Base for nested models without identifiers. diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 77307d07..ed09666d 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -16,7 +16,7 @@ # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel from .base import NDBaseModel, NDNestedModel -# TODO: Move it to constants.py and make a reverse class Map for this +# TODO: Move it to constants.py and make a reverse class Map for this (low priority) USER_ROLES_MAPPING = MappingProxyType({ "fabric_admin": "fabric-admin", "observer": "observer", @@ -51,7 +51,7 @@ def serialize_model(self) -> Dict: # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed -# TODO: Add field validation (e.g. me, le, choices, etc...) (medium priority) +# TODO: Add field validation (e.g. me, le, choices, etc...) (low priority) class LocalUserModel(NDBaseModel): """ Local user configuration. @@ -62,7 +62,7 @@ class LocalUserModel(NDBaseModel): # Identifier configuration # TODO: Revisit this identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = ["login_id"] - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "single" + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "single" # Keys management configurations # TODO: Revisit these configurations (low priority) From b7654042e117ec5e3e5681856e3712aae5908a7e Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:42:12 -0500 Subject: [PATCH 040/131] [ignore] Complete orchestrators/base.py by making simple CRUD operations methods that work for single_identifier strategy (meant to be overridden if needed). --- plugins/module_utils/orchestrators/base.py | 48 ++++++++++--------- .../module_utils/orchestrators/local_user.py | 9 ++-- 2 files changed, 29 insertions(+), 28 deletions(-) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index e2d9fa75..611f39a6 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -24,61 +24,63 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - # TODO: change name from http method to crud (e.g. post -> create) - post_endpoint: Type[NDBaseSmartEndpoint] - put_endpoint: Type[NDBaseSmartEndpoint] + create_endpoint: Type[NDBaseSmartEndpoint] + update_endpoint: Type[NDBaseSmartEndpoint] delete_endpoint: Type[NDBaseSmartEndpoint] - get_endpoint: Type[NDBaseSmartEndpoint] + query_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required - # TODO: Replace it with future sender + # TODO: Replace it with future sender (low priority) module: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - # TODO: Explore new ways to make them even more general + # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) # TODO: Revisit Deserialization - def create(self, model_instance: NDBaseModel) -> ResponseType: + def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - api_endpoint = self.post_endpoint() + api_endpoint = self.create_endpoint() return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e - # TODO: Make the same changes as create() with local api_endpoint variable - def update(self, model_instance: NDBaseModel) -> ResponseType: + def update(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.to_payload()) + api_endpoint = self.update_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: - raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + raise Exception(f"Update failed for {model_instance.get_identifier_value()}: {e}") from e - def delete(self, model_instance: NDBaseModel) -> ResponseType: + def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.delete_endpoint.path, method=self.delete_endpoint.verb) + api_endpoint = self.delete_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: - raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e + raise Exception(f"Delete failed for {model_instance.get_identifier_value()}: {e}") from e - def query_one(self, model_instance: NDBaseModel) -> ResponseType: + def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - self.get_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.get_endpoint.path, method=self.get_endpoint.verb) + api_endpoint = self.query_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + self.query_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: - raise Exception(f"Query failed for {self.current_identifier}: {e}") from e + raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.get_endpoint.path) + result = self.module.query_obj(self.query_endpoint.path) return result or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e \ No newline at end of file + raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 3810fa83..caacc5aa 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -27,18 +27,17 @@ class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - post_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost - put_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut + create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - get_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.get_endpoint.base_path) + result = self.module.query_obj(self.query_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e - \ No newline at end of file From 9f9d32d5e40088a3b072eedd2e23213bcab2bf69 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:44:23 -0500 Subject: [PATCH 041/131] [ignore] Fix and in nd_config_collections.py. Move to utils.py. --- plugins/module_utils/nd_config_collection.py | 42 +++----------------- plugins/module_utils/utils.py | 29 +++++++++++++- 2 files changed, 34 insertions(+), 37 deletions(-) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index a25287aa..fa6662c9 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -18,10 +18,10 @@ # Type aliases # NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) -# TODO: Defined the same acros multiple files -> maybe move to constants.py +# TODO: Defined the same acros multiple files -> maybe move to constants.py (low priority) IdentifierKey = Union[str, int, Tuple[Any, ...]] -# TODO:Might make it a Pydantic RootModel (low priority but medium impact on NDNetworkResourceModule) +# TODO: Make it a Pydantic RootModel? (low conditional priority but medium impact on NDNetworkResourceModule) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. @@ -59,7 +59,7 @@ def _rebuild_index(self) -> None: key = self._extract_key(item) self._index[key] = index - # Core CRUD Operations + # Core Operations def add(self, item: ModelType) -> IdentifierKey: """ @@ -142,7 +142,7 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) - def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: + def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. """ @@ -158,16 +158,12 @@ def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Unio existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() - - if unwanted_keys: - existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) - new_data = self._remove_unwanted_keys(new_data, unwanted_keys) is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" - def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> bool: + def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: """ Check if two collections differ. """ @@ -178,7 +174,7 @@ def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_k return True for item in other: - if self.get_diff_config(item, unwanted_keys) != "no_diff": + if self.get_diff_config(item) != "no_diff": return True for key in self.keys(): @@ -195,32 +191,6 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I other_keys = set(other.keys()) return list(current_keys - other_keys) - # TODO: Maybe not necessary - def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: - """Remove unwanted keys from dict (supports nested paths).""" - data = deepcopy(data) - - for key in unwanted_keys: - if isinstance(key, str): - if key in data: - del data[key] - - elif isinstance(key, list) and len(key) > 0: - try: - parent = data - for k in key[:-1]: - if isinstance(parent, dict) and k in parent: - parent = parent[k] - else: - break - else: - if isinstance(parent, dict) and key[-1] in parent: - del parent[key[-1]] - except (KeyError, TypeError, IndexError): - pass - - return data - # Collection Operations def __len__(self) -> int: diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 72ccbcd7..a7c1d3dc 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -9,7 +9,7 @@ __metaclass__ = type from copy import deepcopy -from typing import Any +from typing import Any, Dict, List, Union def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): @@ -54,3 +54,30 @@ def issubset(subset: Any, superset: Any) -> bool: return False return True + + +# TODO: Might not necessary with Pydantic validation and serialization built-in methods +def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: + """Remove unwanted keys from dict (supports nested paths).""" + data = deepcopy(data) + + for key in unwanted_keys: + if isinstance(key, str): + if key in data: + del data[key] + + elif isinstance(key, list) and len(key) > 0: + try: + parent = data + for k in key[:-1]: + if isinstance(parent, dict) and k in parent: + parent = parent[k] + else: + break + else: + if isinstance(parent, dict) and key[-1] in parent: + del parent[key[-1]] + except (KeyError, TypeError, IndexError): + pass + + return data From 5398a80affe5127aabdfbdae9bca998a12caacd4 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 14:01:24 -0500 Subject: [PATCH 042/131] [ignore] Rename NDNetworkResourceModule to NDStateMachine. Add file for NDNestedModel. Add types.file. Various Renaming and small Modifications across the repo. WIP. --- plugins/module_utils/api_endpoints/base.py | 2 +- .../module_utils/api_endpoints/local_user.py | 3 +- plugins/module_utils/constants.py | 21 ++++--- plugins/module_utils/models/base.py | 58 +++++++------------ plugins/module_utils/models/local_user.py | 28 ++++----- plugins/module_utils/models/nested.py | 22 +++++++ plugins/module_utils/nd.py | 5 -- plugins/module_utils/nd_config_collection.py | 28 +++++---- ...twork_resources.py => nd_state_machine.py} | 23 ++++---- plugins/module_utils/orchestrators/base.py | 8 +-- .../module_utils/orchestrators/local_user.py | 5 +- plugins/module_utils/types.py | 14 +++++ plugins/modules/nd_local_user.py | 7 +-- 13 files changed, 115 insertions(+), 109 deletions(-) create mode 100644 plugins/module_utils/models/nested.py rename plugins/module_utils/{nd_network_resources.py => nd_state_machine.py} (95%) create mode 100644 plugins/module_utils/types.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 90ef5c87..0355a1de 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -12,8 +12,8 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import Final, Union, Tuple, Any +from ..types import IdentifierKey -IdentifierKey = Union[str, int, Tuple[Any, ...], None] # TODO: Rename it to APIEndpoint # NOTE: This is a very minimalist endpoint package -> needs to be enhanced diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 61f52ad8..666782ab 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -20,8 +20,7 @@ from enums import VerbEnum from base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field - -IdentifierKey = Union[str, int, Tuple[Any, ...], None] +from ..types import IdentifierKey class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index cbba61b3..7bb7e95d 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -9,6 +9,18 @@ __metaclass__ = type +from typing import Dict +from types import MappingProxyType +from copy import deepcopy + +class NDConstantMapping(Dict): + + def __init__(self, data: Dict): + new_dict = deepcopy(data) + for k,v in data.items(): + new_dict[v] = k + return MappingProxyType(new_dict) + OBJECT_TYPES = { "tenant": "OST_TENANT", "vrf": "OST_VRF", @@ -175,12 +187,3 @@ ND_SETUP_NODE_DEPLOYMENT_TYPE = {"physical": "cimc", "virtual": "vnode"} BACKUP_TYPE = {"config_only": "config-only", None: "config-only", "": "config-only", "full": "full"} - -USER_ROLES_MAPPING = { - "fabric_admin": "fabric-admin", - "observer": "observer", - "super_admin": "super-admin", - "support_engineer": "support-engineer", - "approver": "approver", - "designer": "designer", -} diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 159acb93..ca672fd5 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,6 +15,7 @@ # TODO: Revisit identifiers strategy (low priority) +# NOTE: what about List of NestedModels? -> make it a separate Sub Model class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -36,11 +37,12 @@ class NDBaseModel(BaseModel, ABC): # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - # TODO: Rvisit no identifiers strategy naming (`singleton`) (low priority) + # TODO: Revisit no identifiers strategy naming (`singleton` -> `unique`, `unnamed`) (low priority) identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List] = [] + # TODO: To be removed in the future (see local_user model) unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) @@ -51,7 +53,6 @@ def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) # Skip enforcement for nested models - # TODO: Remove if `NDNestedModel` is a separated BaseModel (low conditional priority) if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -65,22 +66,26 @@ def __init_subclass__(cls, **kwargs): f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'singleton']]] = 'single'`" ) - - # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) - @abstractmethod + def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ - pass + return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - @classmethod - @abstractmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + def to_config(self, **kwargs) -> Dict[str, Any]: """ - Create model instance from API response. + Convert model to Ansible config format. """ - pass + return self.model_dump(by_name=True, exclude_none=True, **kwargs) + + @classmethod + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(response, by_alias=True, **kwargs) + + @classmethod + def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(ansible_config, by_name=True, **kwargs) # TODO: Revisit this function when revisiting identifier strategy (low priority) def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: @@ -132,25 +137,26 @@ def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: # TODO: Revisit condition when there is no identifiers (low priority) elif self.identifier_strategy == "singleton": - return self.identifier_strategy + return None else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") - def to_diff_dict(self) -> Dict[str, Any]: + def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ return self.model_dump( by_alias=True, exclude_none=True, - exclude=set(self.exclude_from_diff) + exclude=set(self.exclude_from_diff), + **kwargs ) # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace - def merge(self, other_model: "NDBaseModel") -> Self: + def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") @@ -166,25 +172,3 @@ def merge(self, other_model: "NDBaseModel") -> Self: else: setattr(self, field, value) return self - -# TODO: Make it a seperated BaseModel? (low conditional priority) -class NDNestedModel(NDBaseModel): - """ - Base for nested models without identifiers. - """ - - # TODO: Configuration Fields to be clearly defined here (low priority) - identifiers: ClassVar[List[str]] = [] - - def to_payload(self) -> Dict[str, Any]: - """ - Convert model to API payload format. - """ - return self.model_dump(by_alias=True, exclude_none=True) - - @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - """ - Create model instance from API response. - """ - return cls.model_validate(response, by_alias=True) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index ed09666d..dba35aee 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -13,11 +13,14 @@ from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel -from .base import NDBaseModel, NDNestedModel - -# TODO: Move it to constants.py and make a reverse class Map for this (low priority) -USER_ROLES_MAPPING = MappingProxyType({ +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from .base import NDBaseModel +from .nested import NDNestedModel +from ..constants import NDConstantMapping + +# Constant defined here as it is only used in this model +USER_ROLES_MAPPING = NDConstantMapping({ "fabric_admin": "fabric-admin", "observer": "observer", "super_admin": "super-admin", @@ -31,7 +34,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): """Security domain configuration for local user (nested model).""" # Fields - name: str = Field(..., alias="name", exclude=True) + name: str = Field(alias="name", exclude=True) roles: Optional[List[str]] = Field(default=None, alias="roles", exclude=True) # -- Serialization (Model instance -> API payload) -- @@ -47,8 +50,7 @@ def serialize_model(self) -> Dict: } } - # -- Deserialization (API response / Ansible payload -> Model instance) -- - # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed + # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity # TODO: Add field validation (e.g. me, le, choices, etc...) (low priority) @@ -121,10 +123,6 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) "domains": domains_dict } - - def to_payload(self, **kwargs) -> Dict[str, Any]: - return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - # -- Deserialization (API response / Ansible payload -> Model instance) -- @model_validator(mode="before") @@ -172,12 +170,6 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: return value - # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) - @classmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: - return cls.model_validate(response, by_alias=True, **kwargs) - - # -- Extra -- # TODO: to generate from Fields (low priority) diff --git a/plugins/module_utils/models/nested.py b/plugins/module_utils/models/nested.py new file mode 100644 index 00000000..f2560819 --- /dev/null +++ b/plugins/module_utils/models/nested.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import List, ClassVar +from .base import NDBaseModel + + +class NDNestedModel(NDBaseModel): + """ + Base for nested models without identifiers. + """ + + # NOTE: model_config, ClassVar, and Fields can be overwritten here if needed + + identifiers: ClassVar[List[str]] = [] diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 5f528bb8..07af68e5 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -239,13 +239,8 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: -<<<<<<< HEAD if data: info = self.connection.send_request(method, uri, json.dumps(data)) -======= - if data is not None: - info = conn.send_request(method, uri, json.dumps(data)) ->>>>>>> 7c967c3 ([minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher.) else: info = self.connection.send_request(method, uri) self.result["data"] = data diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index fa6662c9..364519b8 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -14,14 +14,12 @@ # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from .models.base import NDBaseModel from .utils import issubset +from .types import IdentifierKey # Type aliases -# NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) -# TODO: Defined the same acros multiple files -> maybe move to constants.py (low priority) -IdentifierKey = Union[str, int, Tuple[Any, ...]] -# TODO: Make it a Pydantic RootModel? (low conditional priority but medium impact on NDNetworkResourceModule) + class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. @@ -156,9 +154,9 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha if existing is None: return "new" + # TODO: make a diff class level method for NDBaseModel existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() - is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" @@ -214,30 +212,30 @@ def copy(self) -> "NDConfigCollection[ModelType]": # Collection Serialization - def to_list(self, **kwargs) -> List[Dict]: + def to_ansible_config(self, **kwargs) -> List[Dict]: """ - Export as list of dicts (with aliases). + Export as an Ansible config. """ - return [item.model_dump(by_alias=True, exclude_none=True, **kwargs) for item in self._items] + return [item.to_config(**kwargs) for item in self._items] - def to_payload_list(self) -> List[Dict[str, Any]]: + def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: """ Export as list of API payloads. """ - return [item.to_payload() for item in self._items] + return [item.to_payload(**kwargs) for item in self._items] @classmethod - def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ - Create collection from list of dicts. + Create collection from Ansible config. """ - items = [model_class.model_validate(item_data, by_name=True) for item_data in data] + items = [model_class.from_config(item_data, **kwargs) for item_data in data] return cls(model_class=model_class, items=items) @classmethod - def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ Create collection from API response. """ - items = [model_class.from_response(item_data) for item_data in response_data] + items = [model_class.from_response(item_data, **kwargs) for item_data in response_data] return cls(model_class=model_class, items=items) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_state_machine.py similarity index 95% rename from plugins/module_utils/nd_network_resources.py rename to plugins/module_utils/nd_state_machine.py index d52fb9de..5306bfe8 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_state_machine.py @@ -24,26 +24,24 @@ from .orchestrators.base import NDBaseOrchestrator from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: replace path and verbs with smart Endpoint (Top priority) -# TODO: Rename it (low priority) + # TODO: Revisit Deserialization in every method (high priority) -class NDNetworkResourceModule(NDModule): +class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_orchestrator: Type[NDBaseOrchestrator]): + def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: # nd_module = NDModule() super().__init__(module) - + # Configuration - # TODO: make sure `model_class` is the same as the one in `model_orchestrator`. if not, error out (high priority) - self.model_class = model_class self.model_orchestrator = model_orchestrator(module=module) + self.model_class = self.model_orchestrator.model_class # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) self.state = self.params["state"] self.ansible_config = self.params["config"] @@ -52,17 +50,17 @@ def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_ # Initialize collections # TODO: Revisit collections initialization especially `init_all_data` (medium priority) # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) - self.nd_config_collection = NDConfigCollection[model_class] + self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, - model_class=model_class + model_class=self.model_class ) - self.previous = self.nd_config_collection(model_class=model_class) - self.proposed = self.nd_config_collection(model_class=model_class) - self.sent = self.nd_config_collection(model_class=model_class) + self.previous = self.nd_config_collection(model_class=self.model_class) + self.proposed = self.nd_config_collection(model_class=self.model_class) + self.sent = self.nd_config_collection(model_class=self.model_class) except Exception as e: self.fail_json( @@ -340,6 +338,7 @@ def _manage_delete_state(self) -> None: # Output Formatting # TODO: move to separate Class (results) -> align it with rest_send PR + # TODO: return a defined ordered list of config (for integration test) def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.params.get("output_level", "normal") diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 611f39a6..db72b740 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -27,7 +27,8 @@ class NDBaseOrchestrator(BaseModel): create_endpoint: Type[NDBaseSmartEndpoint] update_endpoint: Type[NDBaseSmartEndpoint] delete_endpoint: Type[NDBaseSmartEndpoint] - query_endpoint: Type[NDBaseSmartEndpoint] + query_one_endpoint: Type[NDBaseSmartEndpoint] + query_all_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) @@ -70,9 +71,8 @@ def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - api_endpoint = self.query_endpoint() + api_endpoint = self.query_one_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - self.query_endpoint.set_identifiers(model_instance.get_identifier_value()) return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e @@ -80,7 +80,7 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.query_endpoint.path) + result = self.module.query_obj(self.query_all_endpoint.path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index caacc5aa..ef2aa36a 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -30,14 +30,15 @@ class LocalUserOrchestrator(NDBaseOrchestrator): create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - query_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.query_endpoint.base_path) + result = self.module.query_obj(self.query_all_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/types.py b/plugins/module_utils/types.py new file mode 100644 index 00000000..124aedd5 --- /dev/null +++ b/plugins/module_utils/types.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Any, Union, Tuple + + +IdentifierKey = Union[str, int, Tuple[Any, ...]] diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 901549fb..67fb3e80 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -181,7 +181,7 @@ # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel # from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING from ..module_utils.nd import nd_argument_spec -from ..module_utils.nd_network_resources import NDNetworkResourceModule +from ..module_utils.nd_state_machine import NDStateMachine from ..module_utils.models.local_user import LocalUserModel from ..module_utils.orchestrators.local_user import LocalUserOrchestrator @@ -194,12 +194,11 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, ) - + try: # Create NDNetworkResourceModule with LocalUserModel - nd_module = NDNetworkResourceModule( + nd_module = NDStateMachine( module=module, - model_class=LocalUserModel, model_orchestrator=LocalUserOrchestrator, ) From cd44c8b546e7eb14013db5c10acb7a72f86c7e81 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 14:09:18 -0500 Subject: [PATCH 043/131] [ignore] Make a small change to NDModule request function. --- plugins/module_utils/nd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 07af68e5..42b1b118 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -239,7 +239,7 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: - if data: + if data is not None: info = self.connection.send_request(method, uri, json.dumps(data)) else: info = self.connection.send_request(method, uri) From 3d86efb68d3e101bdc1f2b42d3f2ca9223d619cd Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Mon, 2 Mar 2026 17:59:17 -0500 Subject: [PATCH 044/131] [ignore] Modify nd_state_machine to work with orchestrators/models/api_endpoints. Adapt api_endpoints, models, orchestrators accordingly. Integration Tests passing for nd_local_user module. Still WIP. --- plugins/module_utils/api_endpoints/base.py | 6 +- .../module_utils/api_endpoints/local_user.py | 6 +- plugins/module_utils/constants.py | 9 +- plugins/module_utils/models/base.py | 3 +- plugins/module_utils/models/local_user.py | 5 +- plugins/module_utils/nd_state_machine.py | 237 ++++++++---------- plugins/module_utils/orchestrators/base.py | 34 ++- .../module_utils/orchestrators/local_user.py | 2 +- plugins/modules/nd_local_user.py | 4 +- requirements.txt | 3 +- .../network-integration.requirements.txt | 3 +- 11 files changed, 140 insertions(+), 172 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 0355a1de..832476ed 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -25,13 +25,13 @@ class NDBaseSmartEndpoint(BaseModel, ABC): # TODO: to remove base_path: str - @abstractmethod @property + @abstractmethod def path(self) -> str: pass - - @abstractmethod + @property + @abstractmethod def verb(self) -> str: pass diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 666782ab..cae1326b 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -16,9 +16,9 @@ __metaclass__ = type # pylint: disable=invalid-name from typing import Literal, Union, Tuple, Any, Final -from mixins import LoginIdMixin -from enums import VerbEnum -from base import NDBaseSmartEndpoint, NDBasePath +from .mixins import LoginIdMixin +from .enums import VerbEnum +from .base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field from ..types import IdentifierKey diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 7bb7e95d..784a7f51 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -16,10 +16,13 @@ class NDConstantMapping(Dict): def __init__(self, data: Dict): - new_dict = deepcopy(data) + self.new_dict = deepcopy(data) for k,v in data.items(): - new_dict[v] = k - return MappingProxyType(new_dict) + self.new_dict[v] = k + self.new_dict = MappingProxyType(self.new_dict) + + def get_dict(self): + return self.new_dict OBJECT_TYPES = { "tenant": "OST_TENANT", diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index ca672fd5..7b569a58 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -32,6 +32,7 @@ class NDBaseModel(BaseModel, ABC): use_enum_values=True, validate_assignment=True, populate_by_name=True, + arbitrary_types_allowed=True, extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) @@ -77,7 +78,7 @@ def to_config(self, **kwargs) -> Dict[str, Any]: """ Convert model to Ansible config format. """ - return self.model_dump(by_name=True, exclude_none=True, **kwargs) + return self.model_dump(by_alias=False, exclude_none=True, **kwargs) @classmethod def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index dba35aee..713d6040 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -27,7 +27,7 @@ "support_engineer": "support-engineer", "approver": "approver", "designer": "designer", -}) +}).get_dict() class LocalUserSecurityDomainModel(NDNestedModel): @@ -173,7 +173,8 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # -- Extra -- # TODO: to generate from Fields (low priority) - def get_argument_spec(self): + @classmethod + def get_argument_spec(cls) -> Dict: return dict( config=dict( type="list", diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 5306bfe8..5b1f770c 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -16,16 +16,16 @@ # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule # from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -# from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +# from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey # from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -from nd import NDModule -from nd_config_collection import NDConfigCollection -from models.base import NDBaseModel +from .nd import NDModule +from .nd_config_collection import NDConfigCollection from .orchestrators.base import NDBaseOrchestrator -from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from .types import IdentifierKey +from .constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: Revisit Deserialization in every method (high priority) class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. @@ -35,16 +35,21 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest """ Initialize the Network Resource Module. """ - # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: + # TODO: Revisit Module initialization and configuration # nd_module = NDModule() - super().__init__(module) + self.module = module + self.nd_module = NDModule(module) + + # Operation tracking + self.nd_logs: List[Dict[str, Any]] = [] + self.result: Dict[str, Any] = {"changed": False} # Configuration - self.model_orchestrator = model_orchestrator(module=module) + self.model_orchestrator = model_orchestrator(sender=self.nd_module) self.model_class = self.model_orchestrator.model_class # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) - self.state = self.params["state"] - self.ansible_config = self.params["config"] + self.state = self.module.params["state"] + self.ansible_config = self.module.params.get("config", []) # Initialize collections @@ -53,46 +58,64 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() - + self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, model_class=self.model_class ) - self.previous = self.nd_config_collection(model_class=self.model_class) + # Save previous state + self.previous = self.existing.copy() self.proposed = self.nd_config_collection(model_class=self.model_class) self.sent = self.nd_config_collection(model_class=self.model_class) - + + for config in self.ansible_config: + try: + # Parse config into model + item = self.model_class.from_config(config) + self.proposed.add(item) + except ValidationError as e: + self.fail_json( + msg=f"Invalid configuration: {e}", + config=config, + validation_errors=e.errors() + ) + return + except Exception as e: self.fail_json( msg=f"Initialization failed: {str(e)}", error=str(e) ) - - # Operation tracking - self.nd_logs: List[Dict[str, Any]] = [] # Logging # NOTE: format log placeholder # TODO: use a proper logger (low priority) - def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: + def format_log( + self, + identifier: IdentifierKey, + operation_status: Literal["no_change", "created", "updated", "deleted"], + before: Optional[Dict[str, Any]] = None, + after: Optional[Dict[str, Any]] = None, + payload: Optional[Dict[str, Any]] = None, + ) -> None: """ Create and append a log entry. """ log_entry = { "identifier": identifier, - "status": status, - "before": deepcopy(self.existing_config), - "after": deepcopy(after_data) if after_data is not None else self.existing_config, - "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {} + "operation_status": operation_status, + "before": before, + "after": after, + "payload": payload, } # Add HTTP details if not in check mode - if not self.module.check_mode and self.url is not None: + if not self.module.check_mode and self.nd_module.url is not None: log_entry.update({ - "method": self.method, - "response": self.response, - "status": self.status, - "url": self.url + "method": self.nd_module.method, + "response": self.nd_module.response, + "status": self.nd_module.status, + "url": self.nd_module.url }) self.nd_logs.append(log_entry) @@ -103,42 +126,6 @@ def manage_state(self) -> None: """ Manage state according to desired configuration. """ - unwanted_keys = unwanted_keys or [] - - # Parse and validate configs - # TODO: move it to init() (top priority) - # TODO: Modify it if NDConfigCollection becomes a Pydantic RootModel (low priority) - try: - parsed_items = [] - for config in self.ansible_config: - try: - # Parse config into model - item = self.model_class.model_validate(config) - parsed_items.append(item) - except ValidationError as e: - self.fail_json( - msg=f"Invalid configuration: {e}", - config=config, - validation_errors=e.errors() - ) - return - - # Create proposed collection - self.proposed = self.nd_config_collection( - model_class=self.model_class, - items=parsed_items - ) - - # Save previous state - self.previous = self.existing.copy() - - except Exception as e: - self.fail_json( - msg=f"Failed to prepare configurations: {e}", - error=str(e) - ) - return - # Execute state operations if self.state in ["merged", "replaced", "overridden"]: self._manage_create_update_state() @@ -159,18 +146,10 @@ def _manage_create_update_state(self) -> None: Handle merged/replaced/overridden states. """ for proposed_item in self.proposed: + # Extract identifier + identifier = proposed_item.get_identifier_value() + existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: - # Extract identifier - # TODO: Remove self.current_identifier, get it directly into the action functions - identifier = proposed_item.get_identifier_value() - - existing_item = self.existing.get(identifier) - self.existing_config = ( - existing_item.model_dump(by_alias=True, exclude_none=True) - if existing_item - else {} - ) - # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) @@ -178,51 +157,44 @@ def _manage_create_update_state(self) -> None: if diff_status == "no_diff": self.format_log( identifier=identifier, - status="no_change", - after_data=self.existing_config + operation_status="no_change", + before=existing_config, + after=existing_config, ) continue # Prepare final config based on state - if self.state == "merged" and existing_item: + if self.state == "merged": # Merge with existing merged_item = self.existing.merge(proposed_item) final_item = merged_item else: # Replace or create - if existing_item: + if diff_status == "changed": self.existing.replace(proposed_item) else: self.existing.add(proposed_item) final_item = proposed_item - - # Convert to API payload - self.proposed_config = final_item.to_payload() - + # Execute API operation if diff_status == "changed": - response = self.model_orchestrator.update(final_item) + if not self.module.check_mode: + response = self.model_orchestrator.update(final_item) + self.sent.add(final_item) operation_status = "updated" - else: - response = self.model_orchestrator.create(final_item) + elif diff_status == "new": + if not self.module.check_mode: + response = self.model_orchestrator.create(final_item) + self.sent.add(final_item) operation_status = "created" - # Track sent payload - if not self.module.check_mode: - self.sent.add(final_item) - sent_payload = final_item - else: - sent_payload = None - # Log operation self.format_log( identifier=identifier, - status=operation_status, - after_data=( - response if not self.module.check_mode - else final_item.model_dump(by_alias=True, exclude_none=True) - ), - sent_payload_data=sent_payload + operation_status=operation_status, + before=existing_config, + after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), + payload=final_item.to_payload(), ) except Exception as e: @@ -230,11 +202,12 @@ def _manage_create_update_state(self) -> None: self.format_log( identifier=identifier, - status="no_change", - after_data=self.existing_config + operation_status="no_change", + before=existing_config, + after=existing_config, ) - if not self.params.get("ignore_errors", False): + if not self.module.params.get("ignore_errors", False): self.fail_json( msg=error_msg, identifier=str(identifier), @@ -243,30 +216,21 @@ def _manage_create_update_state(self) -> None: return # TODO: Refactor with orchestrator (Top priority) - def _manage_override_deletions(self, override_exceptions: List) -> None: + def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ diff_identifiers = self.previous.get_diff_identifiers(self.proposed) for identifier in diff_identifiers: - if identifier in override_exceptions: - continue - try: - self.current_identifier = identifier - existing_item = self.existing.get(identifier) if not existing_item: continue - - self.existing_config = existing_item.model_dump( - by_alias=True, - exclude_none=True - ) - + # Execute delete - self._delete() + if not self.module.check_mode: + response = self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) @@ -274,8 +238,10 @@ def _manage_override_deletions(self, override_exceptions: List) -> None: # Log deletion self.format_log( identifier=identifier, - status="deleted", - after_data={} + operation_status="deleted", + before=existing_item.to_config(), + after={}, + ) except Exception as e: @@ -295,25 +261,21 @@ def _manage_delete_state(self) -> None: for proposed_item in self.proposed: try: identifier = proposed_item.get_identifier_value() - self.current_identifier = identifier existing_item = self.existing.get(identifier) if not existing_item: # Already deleted or doesn't exist self.format_log( identifier=identifier, - status="no_change", - after_data={} + operation_status="no_change", + before={}, + after={}, ) continue - self.existing_config = existing_item.model_dump( - by_alias=True, - exclude_none=True - ) - # Execute delete - self._delete() + if not self.module.check_mode: + response = self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) @@ -321,8 +283,9 @@ def _manage_delete_state(self) -> None: # Log deletion self.format_log( identifier=identifier, - status="deleted", - after_data={} + operation_status="deleted", + before=existing_item.to_config(), + after={}, ) except Exception as e: @@ -341,35 +304,35 @@ def _manage_delete_state(self) -> None: # TODO: return a defined ordered list of config (for integration test) def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" - output_level = self.params.get("output_level", "normal") - state = self.params.get("state") + output_level = self.module.params.get("output_level", "normal") + state = self.module.params.get("state") # Add previous state for certain states and output levels if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if output_level in ("debug", "info"): - self.result["previous"] = self.previous.to_list() + self.result["previous"] = self.previous.to_ansible_config() # Check if there were changes - if not self.has_modified and self.previous.get_diff_collection(self.existing): + if self.previous.get_diff_collection(self.existing): self.result["changed"] = True # Add stdout if present - if self.stdout: - self.result["stdout"] = self.stdout + if self.nd_module.stdout: + self.result["stdout"] = self.nd_module.stdout # Add debug information if output_level == "debug": self.result["nd_logs"] = self.nd_logs - if self.url is not None: - self.result["httpapi_logs"] = self.httpapi_logs + if self.nd_module.url is not None: + self.result["httpapi_logs"] = self.nd_module.httpapi_logs if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: self.result["sent"] = self.sent.to_payload_list() - self.result["proposed"] = self.proposed.to_list() + self.result["proposed"] = self.proposed.to_ansible_config() # Always include current state - self.result["current"] = self.existing.to_list() + self.result["current"] = self.existing.to_ansible_config() # Module Exit Methods diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index db72b740..924ea4b0 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -11,8 +11,8 @@ from ..models.base import NDBaseModel from ..nd import NDModule from ..api_endpoints.base import NDBaseSmartEndpoint -from typing import Dict, List, Any, Union, ClassVar, Type -from pydantic import BaseModel +from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from pydantic import BaseModel, ConfigDict ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] @@ -21,6 +21,13 @@ # TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): + model_config = ConfigDict( + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + arbitrary_types_allowed=True, + ) + model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required @@ -32,40 +39,31 @@ class NDBaseOrchestrator(BaseModel): # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) - module: NDModule + sender: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.create_endpoint() - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e def update(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.update_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Update failed for {model_instance.get_identifier_value()}: {e}") from e def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.delete_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Delete failed for {model_instance.get_identifier_value()}: {e}") from e @@ -73,14 +71,14 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: api_endpoint = self.query_one_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e # TODO: Revisit the straegy around the query_all (see local_user's case) - def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: + def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.query_all_endpoint.path) + result = self.sender.query_obj(self.query_all_endpoint.path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index ef2aa36a..46a4ea07 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -38,7 +38,7 @@ def query_all(self): Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.query_all_endpoint.base_path) + result = self.sender.query_obj(self.query_all_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 67fb3e80..b6acee72 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -177,9 +177,9 @@ from ansible.module_utils.basic import AnsibleModule # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec -# from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule +# from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -# from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator from ..module_utils.nd import nd_argument_spec from ..module_utils.nd_state_machine import NDStateMachine from ..module_utils.models.local_user import LocalUserModel diff --git a/requirements.txt b/requirements.txt index 514632d1..98907e9a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ requests_toolbelt jsonpath-ng -lxml \ No newline at end of file +lxml +pydantic==2.12.5 \ No newline at end of file diff --git a/tests/integration/network-integration.requirements.txt b/tests/integration/network-integration.requirements.txt index 514632d1..98907e9a 100644 --- a/tests/integration/network-integration.requirements.txt +++ b/tests/integration/network-integration.requirements.txt @@ -1,3 +1,4 @@ requests_toolbelt jsonpath-ng -lxml \ No newline at end of file +lxml +pydantic==2.12.5 \ No newline at end of file From 5badb5ac4953fa67ed01697438da2511e7f01ff1 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 11:46:31 -0500 Subject: [PATCH 045/131] [ignore] Add proper path dependencies and Ran black formatting. --- plugins/module_utils/api_endpoints/base.py | 5 +- plugins/module_utils/api_endpoints/enums.py | 2 +- .../module_utils/api_endpoints/local_user.py | 13 +- plugins/module_utils/api_endpoints/mixins.py | 2 +- plugins/module_utils/constants.py | 7 +- plugins/module_utils/models/base.py | 60 +++--- plugins/module_utils/models/local_user.py | 75 +++----- plugins/module_utils/models/nested.py | 2 +- plugins/module_utils/nd_config_collection.py | 94 +++++----- plugins/module_utils/nd_state_machine.py | 171 +++++++----------- plugins/module_utils/orchestrators/base.py | 9 +- .../module_utils/orchestrators/local_user.py | 12 +- plugins/module_utils/utils.py | 16 +- plugins/modules/nd_api_key.py | 1 - plugins/modules/nd_local_user.py | 25 +-- 15 files changed, 204 insertions(+), 290 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 832476ed..954c1f6a 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -12,13 +12,12 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import Final, Union, Tuple, Any -from ..types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey # TODO: Rename it to APIEndpoint # NOTE: This is a very minimalist endpoint package -> needs to be enhanced class NDBaseSmartEndpoint(BaseModel, ABC): - # TODO: maybe to be modified in the future model_config = ConfigDict(validate_assignment=True) @@ -29,7 +28,7 @@ class NDBaseSmartEndpoint(BaseModel, ABC): @abstractmethod def path(self) -> str: pass - + @property @abstractmethod def verb(self) -> str: diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py index afb4dd5c..ced62ba7 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/api_endpoints/enums.py @@ -43,4 +43,4 @@ class BooleanStringEnum(str, Enum): """ TRUE = "true" - FALSE = "false" \ No newline at end of file + FALSE = "false" diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index cae1326b..72639495 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -16,11 +16,12 @@ __metaclass__ = type # pylint: disable=invalid-name from typing import Literal, Union, Tuple, Any, Final -from .mixins import LoginIdMixin -from .enums import VerbEnum -from .base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field -from ..types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey + class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ @@ -105,7 +106,7 @@ class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): """ class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( - default="EpApiV1InfraAaaLocalUsersPost", + default="EpApiV1InfraAaaLocalUsersPost", description="Class name for backward compatibility", frozen=True, ) @@ -136,7 +137,7 @@ class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): """ class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( - default="EpApiV1InfraAaaLocalUsersPut", + default="EpApiV1InfraAaaLocalUsersPut", description="Class name for backward compatibility", frozen=True, ) diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py index 8ff3218f..9516c9ce 100644 --- a/plugins/module_utils/api_endpoints/mixins.py +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -22,4 +22,4 @@ class LoginIdMixin(BaseModel): """Mixin for endpoints that require login_id parameter.""" - login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") \ No newline at end of file + login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 784a7f51..afa0a2b0 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -13,17 +13,18 @@ from types import MappingProxyType from copy import deepcopy -class NDConstantMapping(Dict): +class NDConstantMapping(Dict): def __init__(self, data: Dict): self.new_dict = deepcopy(data) - for k,v in data.items(): + for k, v in data.items(): self.new_dict[v] = k self.new_dict = MappingProxyType(self.new_dict) - + def get_dict(self): return self.new_dict + OBJECT_TYPES = { "tenant": "OST_TENANT", "vrf": "OST_VRF", diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 7b569a58..94fb9cc5 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -19,13 +19,14 @@ class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. - + Supports three identifier strategies: - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) - singleton: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ + # TODO: revisit initial Model Configurations (low priority) model_config = ConfigDict( str_strip_whitespace=True, @@ -33,14 +34,14 @@ class NDBaseModel(BaseModel, ABC): validate_assignment=True, populate_by_name=True, arbitrary_types_allowed=True, - extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs + extra="allow", # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None # TODO: Revisit no identifiers strategy naming (`singleton` -> `unique`, `unnamed`) (low priority) identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" - + # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List] = [] # TODO: To be removed in the future (see local_user model) @@ -52,7 +53,7 @@ def __init_subclass__(cls, **kwargs): Enforce configuration for identifiers definition. """ super().__init_subclass__(**kwargs) - + # Skip enforcement for nested models if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -73,7 +74,7 @@ def to_payload(self, **kwargs) -> Dict[str, Any]: Convert model to API payload format. """ return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - + def to_config(self, **kwargs) -> Dict[str, Any]: """ Convert model to Ansible config format. @@ -83,11 +84,11 @@ def to_config(self, **kwargs) -> Dict[str, Any]: @classmethod def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: return cls.model_validate(response, by_alias=True, **kwargs) - + @classmethod def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: return cls.model_validate(ansible_config, by_name=True, **kwargs) - + # TODO: Revisit this function when revisiting identifier strategy (low priority) def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ @@ -98,74 +99,61 @@ def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ if not self.identifiers and self.identifier_strategy != "singleton": raise ValueError(f"{self.__class__.__name__} must have identifiers defined with its current identifier strategy: `{self.identifier_strategy}`") - + if self.identifier_strategy == "single": value = getattr(self, self.identifiers[0], None) if value is None: - raise ValueError( - f"Single identifier field '{self.identifiers[0]}' is None" - ) + raise ValueError(f"Single identifier field '{self.identifiers[0]}' is None") return value - + elif self.identifier_strategy == "composite": values = [] missing = [] - + for field in self.identifiers: value = getattr(self, field, None) if value is None: missing.append(field) values.append(value) - + # NOTE: might be redefined with Pydantic (low priority) if missing: - raise ValueError( - f"Composite identifier fields {missing} are None. " - f"All required: {self.identifiers}" - ) - + raise ValueError(f"Composite identifier fields {missing} are None. " f"All required: {self.identifiers}") + return tuple(values) - + elif self.identifier_strategy == "hierarchical": for field in self.identifiers: value = getattr(self, field, None) if value is not None: return (field, value) - - raise ValueError( - f"No non-None value in hierarchical fields {self.identifiers}" - ) - + + raise ValueError(f"No non-None value in hierarchical fields {self.identifiers}") + # TODO: Revisit condition when there is no identifiers (low priority) elif self.identifier_strategy == "singleton": return None - + else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") - def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ - return self.model_dump( - by_alias=True, - exclude_none=True, - exclude=set(self.exclude_from_diff), - **kwargs - ) - + return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) + # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") - + for field, value in other_model: if value is None: continue - + current_value = getattr(self, field) if isinstance(current_value, NDBaseModel) and isinstance(value, NDBaseModel): setattr(self, field, current_value.merge(value)) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 713d6040..e759a6fb 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,26 +8,25 @@ __metaclass__ = type -from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self - -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel -from .base import NDBaseModel -from .nested import NDNestedModel -from ..constants import NDConstantMapping +from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping # Constant defined here as it is only used in this model -USER_ROLES_MAPPING = NDConstantMapping({ - "fabric_admin": "fabric-admin", - "observer": "observer", - "super_admin": "super-admin", - "support_engineer": "support-engineer", - "approver": "approver", - "designer": "designer", -}).get_dict() +USER_ROLES_MAPPING = NDConstantMapping( + { + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", + } +).get_dict() class LocalUserSecurityDomainModel(NDNestedModel): @@ -41,14 +40,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): @model_serializer() def serialize_model(self) -> Dict: - return { - self.name: { - "roles": [ - USER_ROLES_MAPPING.get(role, role) - for role in (self.roles or []) - ] - } - } + return {self.name: {"roles": [USER_ROLES_MAPPING.get(role, role) for role in (self.roles or [])]}} # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity @@ -60,7 +52,7 @@ class LocalUserModel(NDBaseModel): Identifier: login_id (single field) """ - + # Identifier configuration # TODO: Revisit this identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = ["login_id"] @@ -69,11 +61,8 @@ class LocalUserModel(NDBaseModel): # Keys management configurations # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] - unwanted_keys: ClassVar[List]= [ - ["passwordPolicy", "passwordChangeTime"], # Nested path - ["userID"] # Simple key - ] - + unwanted_keys: ClassVar[List] = [["passwordPolicy", "passwordChangeTime"], ["userID"]] # Nested path # Simple key + # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec # TODO: use extra for generating argument_spec (low priority) @@ -96,7 +85,7 @@ def password_policy(self) -> Optional[Dict[str, int]]: """Computed nested structure for API payload.""" if self.reuse_limitation is None and self.time_interval_limitation is None: return None - + policy = {} if self.reuse_limitation is not None: policy["reuseLimitation"] = self.reuse_limitation @@ -108,7 +97,6 @@ def password_policy(self) -> Optional[Dict[str, int]]: def serialize_password(self, value: Optional[SecretStr]) -> Optional[str]: return value.get_secret_value() if value else None - @field_serializer("security_domains") def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) -> Optional[Dict]: # NOTE: exclude `None` values and empty list (-> should we exclude empty list?) @@ -119,9 +107,7 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) for domain in value: domains_dict.update(domain.to_payload()) - return { - "domains": domains_dict - } + return {"domains": domains_dict} # -- Deserialization (API response / Ansible payload -> Model instance) -- @@ -132,17 +118,17 @@ def deserialize_password_policy(cls, data: Any) -> Any: return data password_policy = data.get("passwordPolicy") - + if password_policy and isinstance(password_policy, dict): if "reuseLimitation" in password_policy: data["reuse_limitation"] = password_policy["reuseLimitation"] if "timeIntervalLimitation" in password_policy: data["time_interval_limitation"] = password_policy["timeIntervalLimitation"] - + # Remove the nested structure from data to avoid conflicts # (since it's a computed field, not a real field) data.pop("passwordPolicy", None) - + return data @field_validator("security_domains", mode="before") @@ -150,24 +136,21 @@ def deserialize_password_policy(cls, data: Any) -> Any: def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: if value is None: return None - + # If already in list format (Ansible module representation), return as-is if isinstance(value, list): return value - + # If in the nested dict format (API representation) if isinstance(value, dict) and "domains" in value: domains_dict = value["domains"] domains_list = [] - + for domain_name, domain_data in domains_dict.items(): - domains_list.append({ - "name": domain_name, - "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])] - }) - + domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])]}) + return domains_list - + return value # -- Extra -- diff --git a/plugins/module_utils/models/nested.py b/plugins/module_utils/models/nested.py index f2560819..0573e5f8 100644 --- a/plugins/module_utils/models/nested.py +++ b/plugins/module_utils/models/nested.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import List, ClassVar -from .base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel class NDNestedModel(NDBaseModel): diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 364519b8..1aa0e2ec 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -10,27 +10,26 @@ from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable from copy import deepcopy +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from .models.base import NDBaseModel -from .utils import issubset -from .types import IdentifierKey # Type aliases -ModelType = TypeVar('ModelType', bound=NDBaseModel) +ModelType = TypeVar("ModelType", bound=NDBaseModel) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. """ - + def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = None): """ Initialize collection. """ self._model_class: ModelType = model_class - + # Dual storage self._items: List[ModelType] = [] self._index: Dict[IdentifierKey, int] = {} @@ -38,7 +37,7 @@ def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = No if items: for item in items: self.add(item) - + # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ @@ -48,7 +47,7 @@ def _extract_key(self, item: ModelType) -> IdentifierKey: return item.get_identifier_value() except Exception as e: raise ValueError(f"Failed to extract identifier: {e}") from e - + # TODO: optimize it -> only needed for delete method (low priority) def _rebuild_index(self) -> None: """Rebuild index from scratch (O(n) operation).""" @@ -56,55 +55,47 @@ def _rebuild_index(self) -> None: for index, item in enumerate(self._items): key = self._extract_key(item) self._index[key] = index - + # Core Operations - + def add(self, item: ModelType) -> IdentifierKey: """ Add item to collection (O(1) operation). """ if not isinstance(item, self._model_class): - raise TypeError( - f"Item must be instance of {self._model_class.__name__}, " - f"got {type(item).__name__}" - ) - + raise TypeError(f"Item must be instance of {self._model_class.__name__}, " f"got {type(item).__name__}") + key = self._extract_key(item) - + if key in self._index: - raise ValueError( - f"Item with identifier {key} already exists. Use replace() to update" - ) - + raise ValueError(f"Item with identifier {key} already exists. Use replace() to update") + position = len(self._items) self._items.append(item) self._index[key] = position - + return key - + def get(self, key: IdentifierKey) -> Optional[ModelType]: """ Get item by identifier key (O(1) operation). """ index = self._index.get(key) return self._items[index] if index is not None else None - + def replace(self, item: ModelType) -> bool: """ Replace existing item with same identifier (O(1) operation). """ if not isinstance(item, self._model_class): - raise TypeError( - f"Item must be instance of {self._model_class.__name__}, " - f"got {type(item).__name__}" - ) - + raise TypeError(f"Item must be instance of {self._model_class.__name__}, " f"got {type(item).__name__}") + key = self._extract_key(item) index = self._index.get(key) - + if index is None: return False - + self._items[index] = item return True @@ -114,7 +105,7 @@ def merge(self, item: ModelType) -> ModelType: """ key = self._extract_key(item) existing = self.get(key) - + if existing is None: self.add(item) return item @@ -128,17 +119,17 @@ def delete(self, key: IdentifierKey) -> bool: Delete item by identifier (O(n) operation due to index rebuild) """ index = self._index.get(key) - + if index is None: return False - + del self._items[index] self._rebuild_index() - + return True - + # Diff Operations - + # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "changed"]: """ @@ -148,9 +139,9 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha key = self._extract_key(new_item) except ValueError: return "new" - + existing = self.get(key) - + if existing is None: return "new" @@ -158,16 +149,16 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() is_subset = issubset(new_data, existing_data) - + return "no_diff" if is_subset else "changed" - + def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: """ Check if two collections differ. """ if not isinstance(other, NDConfigCollection): raise TypeError("Argument must be NDConfigCollection") - + if len(self) != len(other): return True @@ -178,9 +169,9 @@ def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: for key in self.keys(): if other.get(key) is None: return True - + return False - + def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[IdentifierKey]: """ Get identifiers in self but not in other. @@ -190,11 +181,11 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I return list(current_keys - other_keys) # Collection Operations - + def __len__(self) -> int: """Return number of items.""" return len(self._items) - + def __iter__(self): """Iterate over items.""" return iter(self._items) @@ -205,10 +196,7 @@ def keys(self) -> List[IdentifierKey]: def copy(self) -> "NDConfigCollection[ModelType]": """Create deep copy of collection.""" - return NDConfigCollection( - model_class=self._model_class, - items=deepcopy(self._items) - ) + return NDConfigCollection(model_class=self._model_class, items=deepcopy(self._items)) # Collection Serialization @@ -217,13 +205,13 @@ def to_ansible_config(self, **kwargs) -> List[Dict]: Export as an Ansible config. """ return [item.to_config(**kwargs) for item in self._items] - + def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: """ Export as list of API payloads. """ return [item.to_payload(**kwargs) for item in self._items] - + @classmethod def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ @@ -231,7 +219,7 @@ def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **k """ items = [model_class.from_config(item_data, **kwargs) for item_data in data] return cls(model_class=model_class, items=items) - + @classmethod def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 5b1f770c..be5849d4 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -12,31 +12,25 @@ from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError from ansible.module_utils.basic import AnsibleModule - -# TODO: To be replaced with: -# from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -# from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator -# from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -from .nd import NDModule -from .nd_config_collection import NDConfigCollection -from .orchestrators.base import NDBaseOrchestrator -from .types import IdentifierKey -from .constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +# TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) +# TODO: class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - + def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ - # TODO: Revisit Module initialization and configuration - # nd_module = NDModule() + # TODO: Revisit Module initialization and configuration with rest_send self.module = module self.nd_module = NDModule(module) @@ -51,18 +45,13 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.state = self.module.params["state"] self.ansible_config = self.module.params.get("config", []) - # Initialize collections - # TODO: Revisit collections initialization especially `init_all_data` (medium priority) # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() - self.existing = self.nd_config_collection.from_api_response( - response_data=init_all_data, - model_class=self.model_class - ) + self.existing = self.nd_config_collection.from_api_response(response_data=init_all_data, model_class=self.model_class) # Save previous state self.previous = self.existing.copy() self.proposed = self.nd_config_collection(model_class=self.model_class) @@ -74,30 +63,23 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest item = self.model_class.from_config(config) self.proposed.add(item) except ValidationError as e: - self.fail_json( - msg=f"Invalid configuration: {e}", - config=config, - validation_errors=e.errors() - ) + self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) return except Exception as e: - self.fail_json( - msg=f"Initialization failed: {str(e)}", - error=str(e) - ) + self.fail_json(msg=f"Initialization failed: {str(e)}", error=str(e)) # Logging # NOTE: format log placeholder # TODO: use a proper logger (low priority) def format_log( - self, - identifier: IdentifierKey, - operation_status: Literal["no_change", "created", "updated", "deleted"], - before: Optional[Dict[str, Any]] = None, - after: Optional[Dict[str, Any]] = None, - payload: Optional[Dict[str, Any]] = None, - ) -> None: + self, + identifier: IdentifierKey, + operation_status: Literal["no_change", "created", "updated", "deleted"], + before: Optional[Dict[str, Any]] = None, + after: Optional[Dict[str, Any]] = None, + payload: Optional[Dict[str, Any]] = None, + ) -> None: """ Create and append a log entry. """ @@ -108,18 +90,15 @@ def format_log( "after": after, "payload": payload, } - + # Add HTTP details if not in check mode if not self.module.check_mode and self.nd_module.url is not None: - log_entry.update({ - "method": self.nd_module.method, - "response": self.nd_module.response, - "status": self.nd_module.status, - "url": self.nd_module.url - }) - + log_entry.update( + {"method": self.nd_module.method, "response": self.nd_module.response, "status": self.nd_module.status, "url": self.nd_module.url} + ) + self.nd_logs.append(log_entry) - + # State Management (core function) # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) def manage_state(self) -> None: @@ -129,17 +108,17 @@ def manage_state(self) -> None: # Execute state operations if self.state in ["merged", "replaced", "overridden"]: self._manage_create_update_state() - + if self.state == "overridden": self._manage_override_deletions() - + elif self.state == "deleted": self._manage_delete_state() - + # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) + # TODO: boil down an Exception instead of using `fail_json` method else: self.fail_json(msg=f"Invalid state: {self.state}") - def _manage_create_update_state(self) -> None: """ @@ -152,7 +131,7 @@ def _manage_create_update_state(self) -> None: try: # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) - + # No changes needed if diff_status == "no_diff": self.format_log( @@ -162,7 +141,7 @@ def _manage_create_update_state(self) -> None: after=existing_config, ) continue - + # Prepare final config based on state if self.state == "merged": # Merge with existing @@ -187,7 +166,7 @@ def _manage_create_update_state(self) -> None: response = self.model_orchestrator.create(final_item) self.sent.add(final_item) operation_status = "created" - + # Log operation self.format_log( identifier=identifier, @@ -196,32 +175,27 @@ def _manage_create_update_state(self) -> None: after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), payload=final_item.to_payload(), ) - + except Exception as e: error_msg = f"Failed to process {identifier}: {e}" - + self.format_log( identifier=identifier, operation_status="no_change", before=existing_config, after=existing_config, ) - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - - # TODO: Refactor with orchestrator (Top priority) + def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ diff_identifiers = self.previous.get_diff_identifiers(self.proposed) - + for identifier in diff_identifiers: try: existing_item = self.existing.get(identifier) @@ -231,37 +205,31 @@ def _manage_override_deletions(self) -> None: # Execute delete if not self.module.check_mode: response = self.model_orchestrator.delete(existing_item) - + # Remove from collection self.existing.delete(identifier) - + # Log deletion self.format_log( identifier=identifier, operation_status="deleted", before=existing_item.to_config(), after={}, - ) - + except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - - # TODO: Refactor with orchestrator (Top priority) + def _manage_delete_state(self) -> None: """Handle deleted state.""" for proposed_item in self.proposed: try: identifier = proposed_item.get_identifier_value() - + existing_item = self.existing.get(identifier) if not existing_item: # Already deleted or doesn't exist @@ -272,14 +240,14 @@ def _manage_delete_state(self) -> None: after={}, ) continue - + # Execute delete if not self.module.check_mode: response = self.model_orchestrator.delete(existing_item) - + # Remove from collection self.existing.delete(identifier) - + # Log deletion self.format_log( identifier=identifier, @@ -287,18 +255,14 @@ def _manage_delete_state(self) -> None: before=existing_item.to_config(), after={}, ) - + except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - + # Output Formatting # TODO: move to separate Class (results) -> align it with rest_send PR # TODO: return a defined ordered list of config (for integration test) @@ -306,36 +270,36 @@ def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.module.params.get("output_level", "normal") state = self.module.params.get("state") - + # Add previous state for certain states and output levels if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if output_level in ("debug", "info"): self.result["previous"] = self.previous.to_ansible_config() - + # Check if there were changes if self.previous.get_diff_collection(self.existing): self.result["changed"] = True - + # Add stdout if present if self.nd_module.stdout: self.result["stdout"] = self.nd_module.stdout - + # Add debug information if output_level == "debug": self.result["nd_logs"] = self.nd_logs - + if self.nd_module.url is not None: self.result["httpapi_logs"] = self.nd_module.httpapi_logs - + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: self.result["sent"] = self.sent.to_payload_list() self.result["proposed"] = self.proposed.to_ansible_config() - + # Always include current state self.result["current"] = self.existing.to_ansible_config() - + # Module Exit Methods - + def fail_json(self, msg: str, **kwargs) -> None: """ Exit module with failure. @@ -343,26 +307,23 @@ def fail_json(self, msg: str, **kwargs) -> None: self.add_logs_and_outputs() self.result.update(**kwargs) self.module.fail_json(msg=msg, **self.result) - + def exit_json(self, **kwargs) -> None: """ Exit module successfully. """ self.add_logs_and_outputs() - + # Add diff if module supports it if self.module._diff and self.result.get("changed") is True: try: # Use diff-safe dicts (excludes sensitive fields) before = [item.to_diff_dict() for item in self.previous] after = [item.to_diff_dict() for item in self.existing] - - self.result["diff"] = dict( - before=before, - after=after - ) + + self.result["diff"] = dict(before=before, after=after) except Exception: pass # Don't fail on diff generation - + self.result.update(**kwargs) self.module.exit_json(**self.result) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 924ea4b0..f9a63fa1 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,11 +8,11 @@ __metaclass__ = type -from ..models.base import NDBaseModel -from ..nd import NDModule -from ..api_endpoints.base import NDBaseSmartEndpoint -from typing import Dict, List, Any, Union, ClassVar, Type, Optional from pydantic import BaseModel, ConfigDict +from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] @@ -20,7 +20,6 @@ # TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): - model_config = ConfigDict( use_enum_values=True, validate_assignment=True, diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 46a4ea07..04f7707f 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -8,12 +8,12 @@ __metaclass__ = type -from .base import NDBaseOrchestrator -from ..models.base import NDBaseModel -from ..models.local_user import LocalUserModel from typing import Dict, List, Any, Union, Type -from ..api_endpoints.base import NDBaseSmartEndpoint -from ..api_endpoints.local_user import ( +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, EpApiV1InfraAaaLocalUsersDelete, @@ -23,8 +23,8 @@ ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] -class LocalUserOrchestrator(NDBaseOrchestrator): +class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index a7c1d3dc..0bf7cfc8 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -37,22 +37,22 @@ def issubset(subset: Any, superset: Any) -> bool: """Check if subset is contained in superset.""" if type(subset) is not type(superset): return False - + if not isinstance(subset, dict): if isinstance(subset, list): return all(item in superset for item in subset) return subset == superset - + for key, value in subset.items(): if value is None: continue - + if key not in superset: return False - + if not issubset(value, superset[key]): return False - + return True @@ -60,12 +60,12 @@ def issubset(subset: Any, superset: Any) -> bool: def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) - + for key in unwanted_keys: if isinstance(key, str): if key in data: del data[key] - + elif isinstance(key, list) and len(key) > 0: try: parent = data @@ -79,5 +79,5 @@ def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) del parent[key[-1]] except (KeyError, TypeError, IndexError): pass - + return data diff --git a/plugins/modules/nd_api_key.py b/plugins/modules/nd_api_key.py index c00428a9..1a3e4823 100644 --- a/plugins/modules/nd_api_key.py +++ b/plugins/modules/nd_api_key.py @@ -146,7 +146,6 @@ def main(): nd.existing = nd.previous = nd.query_objs(path, key="apiKeys") if state == "present": - if len(api_key_name) > 32 or len(api_key_name) < 1: nd.fail_json("A length of 1 to 32 characters is allowed.") elif re.search(r"[^a-zA-Z0-9_.-]", api_key_name): diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index b6acee72..a6972c07 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -175,15 +175,10 @@ """ from ansible.module_utils.basic import AnsibleModule -# TODO: To be replaced with: -# from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec -# from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine -# from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator -from ..module_utils.nd import nd_argument_spec -from ..module_utils.nd_state_machine import NDStateMachine -from ..module_utils.models.local_user import LocalUserModel -from ..module_utils.orchestrators.local_user import LocalUserOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec +from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine +from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator def main(): @@ -196,17 +191,17 @@ def main(): ) try: - # Create NDNetworkResourceModule with LocalUserModel - nd_module = NDStateMachine( + # Initialize StateMachine + nd_state_machine = NDStateMachine( module=module, model_orchestrator=LocalUserOrchestrator, ) - + # Manage state - nd_module.manage_state() + nd_state_machine.manage_state() + + nd_state_machine.exit_json() - nd_module.exit_json() - except Exception as e: module.fail_json(msg=f"Module execution failed: {str(e)}") From eec86e0f74e2ddeaff4f8062d11c4276106a9ba7 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:02:02 -0500 Subject: [PATCH 046/131] [ignore] Clean code for sanity purposes (except Pydantic import checks. --- plugins/module_utils/api_endpoints/base.py | 2 +- plugins/module_utils/api_endpoints/enums.py | 5 +++++ plugins/module_utils/api_endpoints/local_user.py | 4 ++-- plugins/module_utils/api_endpoints/mixins.py | 2 +- plugins/module_utils/models/base.py | 5 +++-- plugins/module_utils/models/local_user.py | 2 -- plugins/module_utils/nd_config_collection.py | 3 +-- plugins/module_utils/nd_state_machine.py | 1 - plugins/module_utils/orchestrators/base.py | 6 ++---- plugins/module_utils/orchestrators/local_user.py | 8 +++----- plugins/module_utils/orchestrators/types.py | 13 +++++++++++++ plugins/module_utils/types.py | 1 - 12 files changed, 31 insertions(+), 21 deletions(-) create mode 100644 plugins/module_utils/orchestrators/types.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 954c1f6a..8428ffe8 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -11,7 +11,7 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import Final, Union, Tuple, Any +from typing import Final from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py index ced62ba7..18a7f5eb 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/api_endpoints/enums.py @@ -7,6 +7,11 @@ """ Enums used in api_endpoints. """ + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + from enum import Enum diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 72639495..890b38e7 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -13,9 +13,9 @@ from __future__ import absolute_import, division, print_function -__metaclass__ = type # pylint: disable=invalid-name +__metaclass__ = type -from typing import Literal, Union, Tuple, Any, Final +from typing import Literal, Final from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py index 9516c9ce..56cdcfc5 100644 --- a/plugins/module_utils/api_endpoints/mixins.py +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -15,7 +15,7 @@ __metaclass__ = type # pylint: disable=invalid-name -from typing import TYPE_CHECKING, Optional +from typing import Optional from pydantic import BaseModel, Field diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 94fb9cc5..8cdcc765 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -8,7 +8,7 @@ __metaclass__ = type -from abc import ABC, abstractmethod +from abc import ABC from pydantic import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional from typing_extensions import Self @@ -144,7 +144,8 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? + # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index e759a6fb..fe2f2bb5 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,9 +8,7 @@ __metaclass__ = type -from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal -from typing_extensions import Self from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 1aa0e2ec..5fd9886d 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -8,13 +8,12 @@ __metaclass__ = type -from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable +from typing import TypeVar, Generic, Optional, List, Dict, Any, Literal from copy import deepcopy from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey - # Type aliases ModelType = TypeVar("ModelType", bound=NDBaseModel) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index be5849d4..923f0b69 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,7 +8,6 @@ __metaclass__ = type -from copy import deepcopy from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError from ansible.module_utils.basic import AnsibleModule diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index f9a63fa1..4df0797d 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -9,13 +9,11 @@ __metaclass__ = type from pydantic import BaseModel, ConfigDict -from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint - - -ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType # TODO: Revisit naming them "Orchestrator" diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 04f7707f..d30b29f8 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -8,11 +8,12 @@ __metaclass__ = type -from typing import Dict, List, Any, Union, Type +from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, @@ -21,9 +22,6 @@ ) -ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] - - class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel @@ -33,7 +31,7 @@ class LocalUserOrchestrator(NDBaseOrchestrator): query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet - def query_all(self): + def query_all(self) -> ResponseType: """ Custom query_all action to extract 'localusers' from response. """ diff --git a/plugins/module_utils/orchestrators/types.py b/plugins/module_utils/orchestrators/types.py new file mode 100644 index 00000000..b721c65b --- /dev/null +++ b/plugins/module_utils/orchestrators/types.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Any, Union, List, Dict + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] diff --git a/plugins/module_utils/types.py b/plugins/module_utils/types.py index 124aedd5..3111a095 100644 --- a/plugins/module_utils/types.py +++ b/plugins/module_utils/types.py @@ -10,5 +10,4 @@ from typing import Any, Union, Tuple - IdentifierKey = Union[str, int, Tuple[Any, ...]] From 9e8bde48cc20962044dbfe841fea3f7f4b32234e Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:18:35 -0500 Subject: [PATCH 047/131] [ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues. --- plugins/module_utils/api_endpoints/base.py | 180 ------------------ plugins/module_utils/api_endpoints/mixins.py | 25 --- plugins/module_utils/endpoints/base.py | 7 + .../{api_endpoints => endpoints}/enums.py | 2 +- plugins/module_utils/endpoints/mixins.py | 3 +- .../v1/infra_aaa_local_users.py} | 32 ++-- plugins/module_utils/nd_state_machine.py | 1 + plugins/module_utils/orchestrators/base.py | 4 +- .../module_utils/orchestrators/local_user.py | 24 +-- 9 files changed, 41 insertions(+), 237 deletions(-) delete mode 100644 plugins/module_utils/api_endpoints/base.py delete mode 100644 plugins/module_utils/api_endpoints/mixins.py rename plugins/module_utils/{api_endpoints => endpoints}/enums.py (97%) rename plugins/module_utils/{api_endpoints/local_user.py => endpoints/v1/infra_aaa_local_users.py} (74%) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py deleted file mode 100644 index 8428ffe8..00000000 --- a/plugins/module_utils/api_endpoints/base.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@allenrobel) -# Copyright: (c) 2026, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -from abc import ABC, abstractmethod -from pydantic import BaseModel, ConfigDict -from typing import Final -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey - - -# TODO: Rename it to APIEndpoint -# NOTE: This is a very minimalist endpoint package -> needs to be enhanced -class NDBaseSmartEndpoint(BaseModel, ABC): - # TODO: maybe to be modified in the future - model_config = ConfigDict(validate_assignment=True) - - # TODO: to remove - base_path: str - - @property - @abstractmethod - def path(self) -> str: - pass - - @property - @abstractmethod - def verb(self) -> str: - pass - - # TODO: Maybe to be modifed to be more Pydantic (low priority) - # TODO: Maybe change function's name (low priority) - # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration - @abstractmethod - def set_identifiers(self, identifier: IdentifierKey = None): - pass - - -class NDBasePath: - """ - # Summary - - Centralized API Base Paths - - ## Description - - Provides centralized base path definitions for all ND API endpoints. - This allows API path changes to be managed in a single location. - - ## Usage - - ```python - # Get a complete base path - path = BasePath.control_fabrics("MyFabric", "config-deploy") - # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/MyFabric/config-deploy - - # Build custom paths - path = BasePath.v1("custom", "endpoint") - # Returns: /appcenter/cisco/ndfc/api/v1/custom/endpoint - ``` - - ## Design Notes - - - All base paths are defined as class constants for easy modification - - Helper methods compose paths from base constants - - Use these methods in Pydantic endpoint models to ensure consistency - - If NDFC changes base API paths, only this class needs updating - """ - - # Root API paths - NDFC_API: Final = "/appcenter/cisco/ndfc/api" - ND_INFRA_API: Final = "/api/v1/infra" - ONEMANAGE: Final = "/onemanage" - LOGIN: Final = "/login" - - @classmethod - def api(cls, *segments: str) -> str: - """ - # Summary - - Build path from NDFC API root. - - ## Parameters - - - segments: Path segments to append - - ## Returns - - - Complete path string - - ## Example - - ```python - path = BasePath.api("custom", "endpoint") - # Returns: /appcenter/cisco/ndfc/api/custom/endpoint - ``` - """ - if not segments: - return cls.NDFC_API - return f"{cls.NDFC_API}/{'/'.join(segments)}" - - @classmethod - def v1(cls, *segments: str) -> str: - """ - # Summary - - Build v1 API path. - - ## Parameters - - - segments: Path segments to append after v1 - - ## Returns - - - Complete v1 API path - - ## Example - - ```python - path = BasePath.v1("lan-fabric", "rest") - # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest - ``` - """ - return cls.api("v1", *segments) - - @classmethod - def nd_infra(cls, *segments: str) -> str: - """ - # Summary - - Build ND infra API path. - - ## Parameters - - - segments: Path segments to append after /api/v1/infra - - ## Returns - - - Complete ND infra API path - - ## Example - - ```python - path = BasePath.nd_infra("aaa", "localUsers") - # Returns: /api/v1/infra/aaa/localUsers - ``` - """ - if not segments: - return cls.ND_INFRA_API - return f"{cls.ND_INFRA_API}/{'/'.join(segments)}" - - @classmethod - def nd_infra_aaa(cls, *segments: str) -> str: - """ - # Summary - - Build ND infra AAA API path. - - ## Parameters - - - segments: Path segments to append after aaa (e.g., "localUsers") - - ## Returns - - - Complete ND infra AAA path - - ## Example - - ```python - path = BasePath.nd_infra_aaa("localUsers") - # Returns: /api/v1/infra/aaa/localUsers - ``` - """ - return cls.nd_infra("aaa", *segments) diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py deleted file mode 100644 index 56cdcfc5..00000000 --- a/plugins/module_utils/api_endpoints/mixins.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@allenrobel) -# Copyright: (c) 2026, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) -""" -Reusable mixin classes for endpoint models. - -This module provides mixin classes that can be composed to add common -fields to endpoint models without duplication. -""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type # pylint: disable=invalid-name - -from typing import Optional -from pydantic import BaseModel, Field - - -class LoginIdMixin(BaseModel): - """Mixin for endpoints that require login_id parameter.""" - - login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index 9da9620e..72dafda4 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -1,4 +1,5 @@ # Copyright: (c) 2026, Allen Robel (@arobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) """ @@ -25,6 +26,7 @@ Field, ) from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import IdentifierKey class NDEndpointBaseModel(BaseModel, ABC): @@ -132,3 +134,8 @@ def verb(self) -> HttpVerbEnum: None """ + + # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration + @abstractmethod + def set_identifiers(self, identifier: IdentifierKey = None): + pass diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/endpoints/enums.py similarity index 97% rename from plugins/module_utils/api_endpoints/enums.py rename to plugins/module_utils/endpoints/enums.py index 18a7f5eb..802b8fe8 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/endpoints/enums.py @@ -5,7 +5,7 @@ # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) """ -Enums used in api_endpoints. +Enums used in endpoints. """ from __future__ import absolute_import, division, print_function diff --git a/plugins/module_utils/endpoints/mixins.py b/plugins/module_utils/endpoints/mixins.py index 47695611..22d9a2dc 100644 --- a/plugins/module_utils/endpoints/mixins.py +++ b/plugins/module_utils/endpoints/mixins.py @@ -1,4 +1,5 @@ -# Copyright: (c) 2026, Allen Robel (@arobel) +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) """ diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py similarity index 74% rename from plugins/module_utils/api_endpoints/local_user.py rename to plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 890b38e7..1e1d7823 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -16,14 +16,14 @@ __metaclass__ = type from typing import Literal, Final -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): +class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ Base class for ND Infra AAA Local Users endpoints. @@ -53,7 +53,7 @@ def set_identifiers(self, identifier: IdentifierKey = None): self.login_id = identifier -class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersGet(_V1InfraAaaLocalUsersBase): """ # Summary @@ -74,8 +74,8 @@ class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): - GET """ - class_name: Literal["EpApiV1InfraAaaLocalUsersGet"] = Field( - default="EpApiV1InfraAaaLocalUsersGet", + class_name: Literal["V1InfraAaaLocalUsersGet"] = Field( + default="V1InfraAaaLocalUsersGet", description="Class name for backward compatibility", frozen=True, ) @@ -86,7 +86,7 @@ def verb(self) -> VerbEnum: return VerbEnum.GET -class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersPost(_V1InfraAaaLocalUsersBase): """ # Summary @@ -105,8 +105,8 @@ class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): - POST """ - class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( - default="EpApiV1InfraAaaLocalUsersPost", + class_name: Literal["V1InfraAaaLocalUsersPost"] = Field( + default="V1InfraAaaLocalUsersPost", description="Class name for backward compatibility", frozen=True, ) @@ -117,7 +117,7 @@ def verb(self) -> VerbEnum: return VerbEnum.POST -class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersPut(_V1InfraAaaLocalUsersBase): """ # Summary @@ -136,8 +136,8 @@ class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): - PUT """ - class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( - default="EpApiV1InfraAaaLocalUsersPut", + class_name: Literal["V1InfraAaaLocalUsersPut"] = Field( + default="V1InfraAaaLocalUsersPut", description="Class name for backward compatibility", frozen=True, ) @@ -148,7 +148,7 @@ def verb(self) -> VerbEnum: return VerbEnum.PUT -class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersDelete(_V1InfraAaaLocalUsersBase): """ # Summary @@ -167,8 +167,8 @@ class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): - DELETE """ - class_name: Literal["EpApiV1InfraAaaLocalUsersDelete"] = Field( - default="EpApiV1InfraAaaLocalUsersDelete", + class_name: Literal["V1InfraAaaLocalUsersDelete"] = Field( + default="V1InfraAaaLocalUsersDelete", description="Class name for backward compatibility", frozen=True, ) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 923f0b69..ae0a67ce 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -125,6 +125,7 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier + response = {} identifier = proposed_item.get_identifier_value() existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 4df0797d..8c84de8e 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,8 +12,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType # TODO: Revisit naming them "Orchestrator" diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index d30b29f8..bea4a486 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,24 +12,24 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( - EpApiV1InfraAaaLocalUsersPost, - EpApiV1InfraAaaLocalUsersPut, - EpApiV1InfraAaaLocalUsersDelete, - EpApiV1InfraAaaLocalUsersGet, +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( + V1InfraAaaLocalUsersPost, + V1InfraAaaLocalUsersPut, + V1InfraAaaLocalUsersDelete, + V1InfraAaaLocalUsersGet, ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete + query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet def query_all(self) -> ResponseType: """ From a6bf7bb9871daadef71c017cfa2fa97848676d74 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 15:06:04 -0500 Subject: [PATCH 048/131] [ignore] Remove NDModule inheritence from NDStateMachine. Add first iteration of (Mock Pydantic objects/methods) to pass sanity checks for Pydantic importation. --- plugins/module_utils/nd_state_machine.py | 6 +- plugins/module_utils/pydantic_compat.py | 200 +++++++++++++++++++++++ 2 files changed, 203 insertions(+), 3 deletions(-) create mode 100644 plugins/module_utils/pydantic_compat.py diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index ae0a67ce..e68010fb 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -19,8 +19,8 @@ # TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) -# TODO: -class NDStateMachine(NDModule): +# TODO: Remove inheritence from NDModule (Top Priority) +class NDStateMachine: """ Generic Network Resource Module for Nexus Dashboard. """ @@ -31,7 +31,7 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest """ # TODO: Revisit Module initialization and configuration with rest_send self.module = module - self.nd_module = NDModule(module) + self.nd_module = NDModule(self.module) # Operation tracking self.nd_logs: List[Dict[str, Any]] = [] diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py new file mode 100644 index 00000000..f1d90fe3 --- /dev/null +++ b/plugins/module_utils/pydantic_compat.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +# pylint: disable=too-few-public-methods +""" +Pydantic compatibility layer. + +This module provides a single location for Pydantic imports with fallback +implementations when Pydantic is not available. This ensures consistent +behavior across all modules and follows the DRY principle. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +import traceback +from typing import TYPE_CHECKING, Any, Callable, Union + +if TYPE_CHECKING: + # Type checkers always see the real Pydantic types + from pydantic import ( + AfterValidator, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + PydanticExperimentalWarning, + StrictBool, + ValidationError, + field_serializer, + model_serializer, + field_validator, + model_validator, + validator, + ) +else: + # Runtime: try to import, with fallback + try: + from pydantic import ( + AfterValidator, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + PydanticExperimentalWarning, + StrictBool, + ValidationError, + field_serializer, + model_serializer, + field_validator, + model_validator, + validator, + ) + except ImportError: + HAS_PYDANTIC = False # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR: Union[str, None] = traceback.format_exc() # pylint: disable=invalid-name + + # Fallback: Minimal BaseModel replacement + class BaseModel: + """Fallback BaseModel when pydantic is not available.""" + + model_config = {"validate_assignment": False, "use_enum_values": False} + + def __init__(self, **kwargs): + """Accept keyword arguments and set them as attributes.""" + for key, value in kwargs.items(): + setattr(self, key, value) + + def model_dump(self, exclude_none: bool = False, exclude_defaults: bool = False) -> dict: # pylint: disable=unused-argument + """Return a dictionary of field names and values. + + Args: + exclude_none: If True, exclude fields with None values + exclude_defaults: Accepted for API compatibility but not implemented in fallback + """ + result = {} + for key, value in self.__dict__.items(): + if exclude_none and value is None: + continue + result[key] = value + return result + + # Fallback: ConfigDict that does nothing + def ConfigDict(**kwargs) -> dict: # pylint: disable=unused-argument,invalid-name + """Pydantic ConfigDict fallback when pydantic is not available.""" + return kwargs + + # Fallback: Field that does nothing + def Field(**kwargs) -> Any: # pylint: disable=unused-argument,invalid-name + """Pydantic Field fallback when pydantic is not available.""" + if "default_factory" in kwargs: + return kwargs["default_factory"]() + return kwargs.get("default") + + # Fallback: field_serializer decorator that does nothing + def field_serializer(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic field_serializer fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: model_serializer decorator that does nothing + def model_serializer(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic model_serializer fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: field_validator decorator that does nothing + def field_validator(*args, **kwargs) -> Callable[..., Any]: # pylint: disable=unused-argument,invalid-name + """Pydantic field_validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: AfterValidator that returns the function unchanged + def AfterValidator(func): # pylint: disable=invalid-name + """Pydantic AfterValidator fallback when pydantic is not available.""" + return func + + # Fallback: BeforeValidator that returns the function unchanged + def BeforeValidator(func): # pylint: disable=invalid-name + """Pydantic BeforeValidator fallback when pydantic is not available.""" + return func + + # Fallback: PydanticExperimentalWarning + PydanticExperimentalWarning = Warning + + # Fallback: StrictBool + StrictBool = bool + + # Fallback: ValidationError + class ValidationError(Exception): + """ + Pydantic ValidationError fallback when pydantic is not available. + """ + + def __init__(self, message="A custom error occurred."): + self.message = message + super().__init__(self.message) + + def __str__(self): + return f"ValidationError: {self.message}" + + # Fallback: model_validator decorator that does nothing + def model_validator(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic model_validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: validator decorator that does nothing + def validator(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + else: + HAS_PYDANTIC = True # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name + +# Set HAS_PYDANTIC for when TYPE_CHECKING is True +if TYPE_CHECKING: + HAS_PYDANTIC = True # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name + +__all__ = [ + "AfterValidator", + "BaseModel", + "BeforeValidator", + "ConfigDict", + "Field", + "HAS_PYDANTIC", + "PYDANTIC_IMPORT_ERROR", + "PydanticExperimentalWarning", + "StrictBool", + "ValidationError", + "field_serializer", + "model_serializer", + "field_validator", + "model_validator", + "validator", +] From f54a1377f67d2d31b313d89e13f4475d138461a4 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 17:19:47 -0500 Subject: [PATCH 049/131] [ignore] Rename NDBaseSmartEndpoint to NDBaseEndpoint. Fix importation issues. --- plugins/module_utils/endpoints/base.py | 4 +++- .../endpoints/v1/infra_aaa_local_users.py | 4 ++-- plugins/module_utils/orchestrators/base.py | 14 +++++++------- plugins/module_utils/orchestrators/local_user.py | 14 +++++++------- plugins/modules/nd_local_user.py | 5 ++++- 5 files changed, 23 insertions(+), 18 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index 72dafda4..e8fa710e 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -26,7 +26,7 @@ Field, ) from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey class NDEndpointBaseModel(BaseModel, ABC): @@ -135,6 +135,8 @@ def verb(self) -> HttpVerbEnum: None """ + # TODO: Maybe to be modifed to be more Pydantic (low priority) + # TODO: Maybe change function's name (low priority) # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 1e1d7823..0008b188 100644 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -18,12 +18,12 @@ from typing import Literal, Final from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath from pydantic import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): +class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseEndpoint): """ Base class for ND Infra AAA Local Users endpoints. diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 8c84de8e..b0e34b61 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,8 +12,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType # TODO: Revisit naming them "Orchestrator" @@ -28,11 +28,11 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - create_endpoint: Type[NDBaseSmartEndpoint] - update_endpoint: Type[NDBaseSmartEndpoint] - delete_endpoint: Type[NDBaseSmartEndpoint] - query_one_endpoint: Type[NDBaseSmartEndpoint] - query_all_endpoint: Type[NDBaseSmartEndpoint] + create_endpoint: Type[NDBaseEndpoint] + update_endpoint: Type[NDBaseEndpoint] + delete_endpoint: Type[NDBaseEndpoint] + query_one_endpoint: Type[NDBaseEndpoint] + query_all_endpoint: Type[NDBaseEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index bea4a486..5e52a00b 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,8 +12,8 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( V1InfraAaaLocalUsersPost, V1InfraAaaLocalUsersPut, @@ -25,11 +25,11 @@ class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet + create_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersDelete + query_one_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet def query_all(self) -> ResponseType: """ diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index a6972c07..6f296065 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -198,8 +198,11 @@ def main(): ) # Manage state + # TODO: return module output class object: + # output = nd_state_machine.manage_state() + # module.exit_json(**output) nd_state_machine.manage_state() - + nd_state_machine.exit_json() except Exception as e: From ea22129fb5960a1abfffd1dfec6164e53b6151cb Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 4 Mar 2026 11:12:27 -0500 Subject: [PATCH 050/131] [ignore] Replace all pydantic imports with pydantic_compat. Fix sanity issues. --- plugins/module_utils/constants.py | 4 ++++ .../endpoints/v1/infra_aaa_local_users.py | 2 +- plugins/module_utils/models/base.py | 9 ++++----- plugins/module_utils/models/local_user.py | 19 ++++++++++++------ plugins/module_utils/nd_state_machine.py | 2 +- plugins/module_utils/orchestrators/base.py | 2 +- plugins/module_utils/pydantic_compat.py | 20 ++++++++++++++++++- plugins/modules/nd_local_user.py | 3 ++- 8 files changed, 45 insertions(+), 16 deletions(-) diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index afa0a2b0..563041a0 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -16,6 +16,7 @@ class NDConstantMapping(Dict): def __init__(self, data: Dict): + self.data = data self.new_dict = deepcopy(data) for k, v in data.items(): self.new_dict[v] = k @@ -24,6 +25,9 @@ def __init__(self, data: Dict): def get_dict(self): return self.new_dict + def get_original_data(self): + return list(self.data.keys()) + OBJECT_TYPES = { "tenant": "OST_TENANT", diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 0008b188..d1013e24 100644 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -19,7 +19,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath -from pydantic import Field +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 8cdcc765..67ce5de0 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,9 +9,8 @@ __metaclass__ = type from abc import ABC -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional -from typing_extensions import Self # TODO: Revisit identifiers strategy (low priority) @@ -82,11 +81,11 @@ def to_config(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=False, exclude_none=True, **kwargs) @classmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + def from_response(cls, response: Dict[str, Any], **kwargs) -> "NDBaseModel": return cls.model_validate(response, by_alias=True, **kwargs) @classmethod - def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: + def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> "NDBaseModel": return cls.model_validate(ansible_config, by_name=True, **kwargs) # TODO: Revisit this function when revisiting identifier strategy (low priority) @@ -146,7 +145,7 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace - def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: + def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index fe2f2bb5..0575c1be 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -9,7 +9,15 @@ __metaclass__ = type from typing import List, Dict, Any, Optional, ClassVar, Literal -from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ( + Field, + SecretStr, + model_serializer, + field_serializer, + field_validator, + model_validator, + computed_field, +) from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping @@ -24,7 +32,7 @@ "approver": "approver", "designer": "designer", } -).get_dict() +) class LocalUserSecurityDomainModel(NDNestedModel): @@ -38,7 +46,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): @model_serializer() def serialize_model(self) -> Dict: - return {self.name: {"roles": [USER_ROLES_MAPPING.get(role, role) for role in (self.roles or [])]}} + return {self.name: {"roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in (self.roles or [])]}} # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity @@ -145,7 +153,7 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: domains_list = [] for domain_name, domain_data in domains_dict.items(): - domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])]}) + domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in domain_data.get("roles", [])]}) return domains_list @@ -174,7 +182,7 @@ def get_argument_spec(cls) -> Dict: elements="dict", options=dict( name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), - roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + roles=dict(type="list", elements="str", choices=USER_ROLES_MAPPING.get_original_data()), ), aliases=["domains"], ), @@ -182,6 +190,5 @@ def get_argument_spec(cls) -> Dict: remote_user_authorization=dict(type="bool"), ), ), - override_exceptions=dict(type="list", elements="str"), state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), ) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index e68010fb..81d6a966 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import Optional, List, Dict, Any, Literal, Type -from pydantic import ValidationError +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index b0e34b61..1a3b1921 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,7 +8,7 @@ __metaclass__ = type -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py index f1d90fe3..e8924cd2 100644 --- a/plugins/module_utils/pydantic_compat.py +++ b/plugins/module_utils/pydantic_compat.py @@ -32,12 +32,14 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, model_serializer, field_validator, model_validator, validator, + computed_field, ) else: # Runtime: try to import, with fallback @@ -50,12 +52,14 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, model_serializer, field_validator, model_validator, validator, + computed_field, ) except ImportError: HAS_PYDANTIC = False # pylint: disable=invalid-name @@ -106,7 +110,7 @@ def decorator(func): return func return decorator - + # Fallback: model_serializer decorator that does nothing def model_serializer(*args, **kwargs): # pylint: disable=unused-argument """Pydantic model_serializer fallback when pydantic is not available.""" @@ -125,6 +129,15 @@ def decorator(func): return decorator + # Fallback: computed_field decorator that does nothing + def computed_field(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic computed_field fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + # Fallback: AfterValidator that returns the function unchanged def AfterValidator(func): # pylint: disable=invalid-name """Pydantic AfterValidator fallback when pydantic is not available.""" @@ -141,6 +154,9 @@ def BeforeValidator(func): # pylint: disable=invalid-name # Fallback: StrictBool StrictBool = bool + # Fallback: SecretStr + SecretStr = str + # Fallback: ValidationError class ValidationError(Exception): """ @@ -191,10 +207,12 @@ def decorator(func): "PYDANTIC_IMPORT_ERROR", "PydanticExperimentalWarning", "StrictBool", + "SecretStr", "ValidationError", "field_serializer", "model_serializer", "field_validator", "model_validator", "validator", + "computed_field", ] diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 6f296065..65f2e464 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -27,6 +27,7 @@ - The list of the local users to configure. type: list elements: dict + required: True suboptions: email: description: @@ -202,7 +203,7 @@ def main(): # output = nd_state_machine.manage_state() # module.exit_json(**output) nd_state_machine.manage_state() - + nd_state_machine.exit_json() except Exception as e: From ced470310ef08e1c2a3c1d21f0075ffdd7d81a97 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 6 Mar 2026 13:35:38 -0500 Subject: [PATCH 051/131] [ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly --- plugins/module_utils/models/base.py | 7 +- plugins/module_utils/models/local_user.py | 3 +- plugins/module_utils/nd_config_collection.py | 3 +- plugins/module_utils/nd_output.py | 70 +++++++ plugins/module_utils/nd_state_machine.py | 186 +++---------------- plugins/module_utils/orchestrators/base.py | 3 - plugins/module_utils/utils.py | 2 +- plugins/modules/nd_local_user.py | 12 +- 8 files changed, 107 insertions(+), 179 deletions(-) create mode 100644 plugins/module_utils/nd_output.py diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 67ce5de0..14c04945 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -143,12 +143,11 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? - # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": if not isinstance(other_model, type(self)): - # TODO: Change error message - return TypeError("models are not of the same type.") + return TypeError( + f"NDBaseModel.merge method requires models of the same type. self of type {type(self)} and other_model of type {type(other_model)}" + ) for field, value in other_model: if value is None: diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 0575c1be..e2e7faf8 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -71,7 +71,6 @@ class LocalUserModel(NDBaseModel): # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec - # TODO: use extra for generating argument_spec (low priority) login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") @@ -161,7 +160,7 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # -- Extra -- - # TODO: to generate from Fields (low priority) + # TODO: to generate from Fields: use extra for generating argument_spec (low priority) @classmethod def get_argument_spec(cls) -> Dict: return dict( diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 5fd9886d..1f751822 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -37,7 +37,6 @@ def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = No for item in items: self.add(item) - # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ Extract identifier key from item. @@ -144,7 +143,7 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha if existing is None: return "new" - # TODO: make a diff class level method for NDBaseModel + # TODO: make a diff class level method for NDBaseModel (high priority) existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() is_subset = issubset(new_data, existing_data) diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py new file mode 100644 index 00000000..027592df --- /dev/null +++ b/plugins/module_utils/nd_output.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Dict, Any, Optional, List, Union +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection + + +class NDOutput: + def __init__(self, module: AnsibleModule): + self._output_level: str = module.params.get("output_level", "normal") + self._changed: bool = False + self._before: Union[NDConfigCollection, List] = [] + self._after: Union[NDConfigCollection, List] = [] + self._diff: Union[NDConfigCollection, List] = [] + self._proposed: Union[NDConfigCollection, List] = [] + self._logs: List = [] + self._extra: Dict[str, Any] = {} + + def format(self, **kwargs): + if isinstance(self._before, NDConfigCollection) and isinstance(self._after, NDConfigCollection) and self._before.get_diff_collection(self._after): + self._changed = True + + output = { + "output_level": self._output_level, + "changed": self._changed, + "after": self._after.to_ansible_config() if isinstance(self._after, NDConfigCollection) else self._after, + "before": self._before.to_ansible_config() if isinstance(self._before, NDConfigCollection) else self._before, + "diff": self._diff.to_ansible_config() if isinstance(self._diff, NDConfigCollection) else self._diff, + } + + if self._output_level in ("debug", "info"): + output["proposed"] = self._proposed.to_ansible_config() if isinstance(self._proposed, NDConfigCollection) else self._proposed + if self._output_level == "debug": + output["logs"] = "Not yet implemented" + + if self._extra: + output.update(self._extra) + + output.update(**kwargs) + + return output + + def assign( + self, + after: Optional[NDConfigCollection] = None, + before: Optional[NDConfigCollection] = None, + diff: Optional[NDConfigCollection] = None, + proposed: Optional[NDConfigCollection] = None, + logs: Optional[List] = None, + **kwargs + ) -> None: + if isinstance(after, NDConfigCollection): + self._after = after + if isinstance(before, NDConfigCollection): + self._before = before + if isinstance(diff, NDConfigCollection): + self._diff = diff + if isinstance(proposed, NDConfigCollection): + self._proposed = proposed + if isinstance(logs, List): + self._logs = logs + self._extra.update(**kwargs) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 81d6a966..4146926e 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,18 +8,15 @@ __metaclass__ = type -from typing import Optional, List, Dict, Any, Literal, Type +from typing import List, Dict, Any, Type from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator -from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) -# TODO: Remove inheritence from NDModule (Top Priority) class NDStateMachine: """ Generic Network Resource Module for Nexus Dashboard. @@ -34,29 +31,27 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_module = NDModule(self.module) # Operation tracking - self.nd_logs: List[Dict[str, Any]] = [] - self.result: Dict[str, Any] = {"changed": False} + self.output = NDOutput(self.module) # Configuration self.model_orchestrator = model_orchestrator(sender=self.nd_module) self.model_class = self.model_orchestrator.model_class - # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) + # TODO: Revisit these class variables when udpating Module intialization and configuration (low priority) self.state = self.module.params["state"] - self.ansible_config = self.module.params.get("config", []) # Initialize collections - # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) self.nd_config_collection = NDConfigCollection[self.model_class] try: - init_all_data = self.model_orchestrator.query_all() - - self.existing = self.nd_config_collection.from_api_response(response_data=init_all_data, model_class=self.model_class) - # Save previous state - self.previous = self.existing.copy() - self.proposed = self.nd_config_collection(model_class=self.model_class) + response_data = self.model_orchestrator.query_all() + # State of configuration objects in ND before change execution + self.before = self.nd_config_collection.from_api_response(response_data=response_data, model_class=self.model_class) + # State of current configuration objects in ND during change execution + self.existing = self.before.copy() + # Ongoing collection of configuration objects that were changed self.sent = self.nd_config_collection(model_class=self.model_class) - - for config in self.ansible_config: + # Collection of configuration objects given by user + self.proposed = self.nd_config_collection(model_class=self.model_class) + for config in self.module.params.get("config", []): try: # Parse config into model item = self.model_class.from_config(config) @@ -64,42 +59,11 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest except ValidationError as e: self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) return - + self.output.assign(after=self.existing, before=self.before, proposed=self.proposed) except Exception as e: - self.fail_json(msg=f"Initialization failed: {str(e)}", error=str(e)) - - # Logging - # NOTE: format log placeholder - # TODO: use a proper logger (low priority) - def format_log( - self, - identifier: IdentifierKey, - operation_status: Literal["no_change", "created", "updated", "deleted"], - before: Optional[Dict[str, Any]] = None, - after: Optional[Dict[str, Any]] = None, - payload: Optional[Dict[str, Any]] = None, - ) -> None: - """ - Create and append a log entry. - """ - log_entry = { - "identifier": identifier, - "operation_status": operation_status, - "before": before, - "after": after, - "payload": payload, - } - - # Add HTTP details if not in check mode - if not self.module.check_mode and self.nd_module.url is not None: - log_entry.update( - {"method": self.nd_module.method, "response": self.nd_module.response, "status": self.nd_module.status, "url": self.nd_module.url} - ) - - self.nd_logs.append(log_entry) + self.fail_json(msg=f"NDStateMachine initialization failed: {str(e)}", error=str(e)) # State Management (core function) - # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) def manage_state(self) -> None: """ Manage state according to desired configuration. @@ -114,7 +78,6 @@ def manage_state(self) -> None: elif self.state == "deleted": self._manage_delete_state() - # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) # TODO: boil down an Exception instead of using `fail_json` method else: self.fail_json(msg=f"Invalid state: {self.state}") @@ -125,28 +88,19 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier - response = {} identifier = proposed_item.get_identifier_value() - existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) # No changes needed if diff_status == "no_diff": - self.format_log( - identifier=identifier, - operation_status="no_change", - before=existing_config, - after=existing_config, - ) continue # Prepare final config based on state if self.state == "merged": # Merge with existing - merged_item = self.existing.merge(proposed_item) - final_item = merged_item + final_item = self.existing.merge(proposed_item) else: # Replace or create if diff_status == "changed": @@ -158,34 +112,18 @@ def _manage_create_update_state(self) -> None: # Execute API operation if diff_status == "changed": if not self.module.check_mode: - response = self.model_orchestrator.update(final_item) + self.model_orchestrator.update(final_item) self.sent.add(final_item) - operation_status = "updated" elif diff_status == "new": if not self.module.check_mode: - response = self.model_orchestrator.create(final_item) + self.model_orchestrator.create(final_item) self.sent.add(final_item) - operation_status = "created" # Log operation - self.format_log( - identifier=identifier, - operation_status=operation_status, - before=existing_config, - after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), - payload=final_item.to_payload(), - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to process {identifier}: {e}" - - self.format_log( - identifier=identifier, - operation_status="no_change", - before=existing_config, - after=existing_config, - ) - if not self.module.params.get("ignore_errors", False): self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return @@ -194,7 +132,7 @@ def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ - diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + diff_identifiers = self.before.get_diff_identifiers(self.proposed) for identifier in diff_identifiers: try: @@ -204,18 +142,13 @@ def _manage_override_deletions(self) -> None: # Execute delete if not self.module.check_mode: - response = self.model_orchestrator.delete(existing_item) + self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) # Log deletion - self.format_log( - identifier=identifier, - operation_status="deleted", - before=existing_item.to_config(), - after={}, - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" @@ -232,29 +165,17 @@ def _manage_delete_state(self) -> None: existing_item = self.existing.get(identifier) if not existing_item: - # Already deleted or doesn't exist - self.format_log( - identifier=identifier, - operation_status="no_change", - before={}, - after={}, - ) continue # Execute delete if not self.module.check_mode: - response = self.model_orchestrator.delete(existing_item) + self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) # Log deletion - self.format_log( - identifier=identifier, - operation_status="deleted", - before=existing_item.to_config(), - after={}, - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" @@ -263,67 +184,10 @@ def _manage_delete_state(self) -> None: self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - # Output Formatting - # TODO: move to separate Class (results) -> align it with rest_send PR - # TODO: return a defined ordered list of config (for integration test) - def add_logs_and_outputs(self) -> None: - """Add logs and outputs to module result based on output_level.""" - output_level = self.module.params.get("output_level", "normal") - state = self.module.params.get("state") - - # Add previous state for certain states and output levels - if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - if output_level in ("debug", "info"): - self.result["previous"] = self.previous.to_ansible_config() - - # Check if there were changes - if self.previous.get_diff_collection(self.existing): - self.result["changed"] = True - - # Add stdout if present - if self.nd_module.stdout: - self.result["stdout"] = self.nd_module.stdout - - # Add debug information - if output_level == "debug": - self.result["nd_logs"] = self.nd_logs - - if self.nd_module.url is not None: - self.result["httpapi_logs"] = self.nd_module.httpapi_logs - - if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - self.result["sent"] = self.sent.to_payload_list() - self.result["proposed"] = self.proposed.to_ansible_config() - - # Always include current state - self.result["current"] = self.existing.to_ansible_config() - # Module Exit Methods def fail_json(self, msg: str, **kwargs) -> None: """ Exit module with failure. """ - self.add_logs_and_outputs() - self.result.update(**kwargs) - self.module.fail_json(msg=msg, **self.result) - - def exit_json(self, **kwargs) -> None: - """ - Exit module successfully. - """ - self.add_logs_and_outputs() - - # Add diff if module supports it - if self.module._diff and self.result.get("changed") is True: - try: - # Use diff-safe dicts (excludes sensitive fields) - before = [item.to_diff_dict() for item in self.previous] - after = [item.to_diff_dict() for item in self.existing] - - self.result["diff"] = dict(before=before, after=after) - except Exception: - pass # Don't fail on diff generation - - self.result.update(**kwargs) - self.module.exit_json(**self.result) + self.module.fail_json(msg=msg) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 1a3b1921..1a8b4f10 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -16,7 +16,6 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -# TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): model_config = ConfigDict( use_enum_values=True, @@ -40,7 +39,6 @@ class NDBaseOrchestrator(BaseModel): # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) - # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: api_endpoint = self.create_endpoint() @@ -72,7 +70,6 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e - # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: result = self.sender.query_obj(self.query_all_endpoint.path) diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 0bf7cfc8..e09bd499 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -56,7 +56,7 @@ def issubset(subset: Any, superset: Any) -> bool: return True -# TODO: Might not necessary with Pydantic validation and serialization built-in methods +# TODO: Might not necessary with Pydantic validation and serialization built-in methods (see models/local_user) def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 65f2e464..d1d871fe 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -128,10 +128,10 @@ reuse_limitation: 20 time_interval_limitation: 10 security_domains: - name: all - roles: - - observer - - support_engineer + - name: all + roles: + - observer + - support_engineer remote_id_claim: remote_user remote_user_authorization: true state: merged @@ -204,10 +204,10 @@ def main(): # module.exit_json(**output) nd_state_machine.manage_state() - nd_state_machine.exit_json() + module.exit_json(**nd_state_machine.output.format()) except Exception as e: - module.fail_json(msg=f"Module execution failed: {str(e)}") + module.fail_json(msg=f"Module execution failed: {str(e)}", **nd_state_machine.output.format()) if __name__ == "__main__": From d619c3fde81fe06acef4d9b45c7a6e0bffa29e5b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 10 Mar 2026 13:36:50 -0400 Subject: [PATCH 052/131] [ignore] Update NDOutput class. Remove all fail_json dependencies in NDStateMachineand add custom Exception for it in common/exceptions dir. Set json mode for to_diff_dict method in NDBaseModel. --- plugins/module_utils/common/exceptions.py | 17 ++++++++++++ plugins/module_utils/models/base.py | 4 +-- plugins/module_utils/nd_output.py | 7 +++-- plugins/module_utils/nd_state_machine.py | 32 +++++++---------------- 4 files changed, 31 insertions(+), 29 deletions(-) create mode 100644 plugins/module_utils/common/exceptions.py diff --git a/plugins/module_utils/common/exceptions.py b/plugins/module_utils/common/exceptions.py new file mode 100644 index 00000000..f0ae4400 --- /dev/null +++ b/plugins/module_utils/common/exceptions.py @@ -0,0 +1,17 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +class NDStateMachineError(Exception): + """ + Raised when NDStateMachine is failing. + """ + + pass diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 14c04945..30e5de5e 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -72,7 +72,7 @@ def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ - return self.model_dump(by_alias=True, exclude_none=True, **kwargs) + return self.model_dump(by_alias=True, exclude_none=True, mode="json", **kwargs) def to_config(self, **kwargs) -> Dict[str, Any]: """ @@ -140,7 +140,7 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ - return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) + return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), mode="json", **kwargs) # NOTE: initialize and return a deep copy of the instance? def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py index 027592df..dbfc2cd2 100644 --- a/plugins/module_utils/nd_output.py +++ b/plugins/module_utils/nd_output.py @@ -9,13 +9,12 @@ __metaclass__ = type from typing import Dict, Any, Optional, List, Union -from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection class NDOutput: - def __init__(self, module: AnsibleModule): - self._output_level: str = module.params.get("output_level", "normal") + def __init__(self, output_level: str): + self._output_level: str = output_level self._changed: bool = False self._before: Union[NDConfigCollection, List] = [] self._after: Union[NDConfigCollection, List] = [] @@ -24,7 +23,7 @@ def __init__(self, module: AnsibleModule): self._logs: List = [] self._extra: Dict[str, Any] = {} - def format(self, **kwargs): + def format(self, **kwargs) -> Dict[str, Any]: if isinstance(self._before, NDConfigCollection) and isinstance(self._after, NDConfigCollection) and self._before.get_diff_collection(self._after): self._changed = True diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 4146926e..bd86da3c 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,13 +8,14 @@ __metaclass__ = type -from typing import List, Dict, Any, Type +from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError class NDStateMachine: @@ -31,7 +32,7 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_module = NDModule(self.module) # Operation tracking - self.output = NDOutput(self.module) + self.output = NDOutput(output_level=module.params.get("output_level", "normal")) # Configuration self.model_orchestrator = model_orchestrator(sender=self.nd_module) @@ -57,11 +58,10 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest item = self.model_class.from_config(config) self.proposed.add(item) except ValidationError as e: - self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) - return + raise NDStateMachineError(f"Invalid configuration. for config {config}: {str(e)}") self.output.assign(after=self.existing, before=self.before, proposed=self.proposed) except Exception as e: - self.fail_json(msg=f"NDStateMachine initialization failed: {str(e)}", error=str(e)) + raise NDStateMachineError(f"Initialization failed: {str(e)}") # State Management (core function) def manage_state(self) -> None: @@ -78,9 +78,8 @@ def manage_state(self) -> None: elif self.state == "deleted": self._manage_delete_state() - # TODO: boil down an Exception instead of using `fail_json` method else: - self.fail_json(msg=f"Invalid state: {self.state}") + raise NDStateMachineError(f"Invalid state: {self.state}") def _manage_create_update_state(self) -> None: """ @@ -125,8 +124,7 @@ def _manage_create_update_state(self) -> None: except Exception as e: error_msg = f"Failed to process {identifier}: {e}" if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return + raise NDStateMachineError(error_msg) def _manage_override_deletions(self) -> None: """ @@ -152,10 +150,8 @@ def _manage_override_deletions(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return + raise NDStateMachineError(error_msg) def _manage_delete_state(self) -> None: """Handle deleted state.""" @@ -179,15 +175,5 @@ def _manage_delete_state(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return - - # Module Exit Methods - - def fail_json(self, msg: str, **kwargs) -> None: - """ - Exit module with failure. - """ - self.module.fail_json(msg=msg) + raise NDStateMachineError(error_msg) From 624d1e28e1dfa7b68f3bf50fef03afad7cecc31f Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 11 Mar 2026 11:48:38 -0400 Subject: [PATCH 053/131] [ignore] Fix serialization of model with minimal changes to base.py and local_user.py. Add method to NDBaseModel and apply relevant changes to nd_config_collection. --- plugins/module_utils/models/base.py | 211 ++++++++++++------- plugins/module_utils/models/local_user.py | 180 ++++++++++------ plugins/module_utils/nd_config_collection.py | 5 +- plugins/module_utils/pydantic_compat.py | 6 + 4 files changed, 256 insertions(+), 146 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 30e5de5e..79f9ec80 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,154 +9,221 @@ __metaclass__ = type from abc import ABC -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict -from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional +from pydantic import BaseModel, ConfigDict +from typing import List, Dict, Any, ClassVar, Set, Tuple, Union, Literal, Optional +from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset -# TODO: Revisit identifiers strategy (low priority) -# NOTE: what about List of NestedModels? -> make it a separate Sub Model class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. - Supports three identifier strategies: - - single: One unique required field (e.g., ["login_id"]) - - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) - - singleton: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) + Class-level configuration attributes: + identifiers: List of field names used to uniquely identify this object. + identifier_strategy: How identifiers are interpreted. + exclude_from_diff: Fields excluded from diff comparisons. + unwanted_keys: Keys to strip from API responses before processing. + payload_nested_fields: Mapping of {payload_key: [field_names]} for fields + that should be grouped under a nested key in payload mode but remain + flat in config mode. + payload_exclude_fields: Fields to exclude from payload output + (e.g., because they are restructured into nested keys). + config_exclude_fields: Fields to exclude from config output + (e.g., computed payload-only structures). """ - # TODO: revisit initial Model Configurations (low priority) model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, validate_assignment=True, populate_by_name=True, arbitrary_types_allowed=True, - extra="allow", # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs + extra="ignore", ) - # TODO: Revisit identifiers strategy (low priority) + # --- Identifier Configuration --- + identifiers: ClassVar[Optional[List[str]]] = None - # TODO: Revisit no identifiers strategy naming (`singleton` -> `unique`, `unnamed`) (low priority) identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" - # Optional: fields to exclude from diffs (e.g., passwords) - exclude_from_diff: ClassVar[List] = [] - # TODO: To be removed in the future (see local_user model) + # --- Serialization Configuration --- + + exclude_from_diff: ClassVar[Set[str]] = set() unwanted_keys: ClassVar[List] = [] - # TODO: Revisit it with identifiers strategy (low priority) + # Declarative nested-field grouping for payload mode + # e.g., {"passwordPolicy": ["reuse_limitation", "time_interval_limitation"]} + # means: in payload mode, remove these fields from top level and nest them + # under "passwordPolicy" with their alias names. + payload_nested_fields: ClassVar[Dict[str, List[str]]] = {} + + # Fields to explicitly exclude per mode + payload_exclude_fields: ClassVar[Set[str]] = set() + config_exclude_fields: ClassVar[Set[str]] = set() + + # --- Subclass Validation --- + def __init_subclass__(cls, **kwargs): - """ - Enforce configuration for identifiers definition. - """ super().__init_subclass__(**kwargs) # Skip enforcement for nested models - if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): + if cls.__name__ == "NDNestedModel" or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return if not hasattr(cls, "identifiers") or cls.identifiers is None: - raise ValueError( - f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." - f"Example: `identifiers: ClassVar[Optional[List[str]]] = ['login_id']`" - ) + raise ValueError(f"Class {cls.__name__} must define 'identifiers'. " f"Example: identifiers: ClassVar[Optional[List[str]]] = ['login_id']") if not hasattr(cls, "identifier_strategy") or cls.identifier_strategy is None: - raise ValueError( - f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." - f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'singleton']]] = 'single'`" - ) + raise ValueError(f"Class {cls.__name__} must define 'identifier_strategy'. " f"Example: identifier_strategy: ClassVar[...] = 'single'") - def to_payload(self, **kwargs) -> Dict[str, Any]: + # --- Core Serialization --- + + def _build_payload_nested(self, data: Dict[str, Any]) -> Dict[str, Any]: """ - Convert model to API payload format. + Apply payload_nested_fields: pull specified fields out of the top-level + dict and group them under their declared parent key. """ - return self.model_dump(by_alias=True, exclude_none=True, mode="json", **kwargs) + if not self.payload_nested_fields: + return data + + result = dict(data) + + for nested_key, field_names in self.payload_nested_fields.items(): + nested_dict = {} + for field_name in field_names: + # Resolve the alias for this field + field_info = self.__class__.model_fields.get(field_name) + if field_info is None: + continue + + alias = field_info.alias or field_name + + # Pull value from the serialized data (which uses aliases in payload mode) + if alias in result: + nested_dict[alias] = result.pop(alias) + + if nested_dict: + result[nested_key] = nested_dict + + return result + + def to_payload(self, **kwargs) -> Dict[str, Any]: + """Convert model to API payload format (aliased keys, nested structures).""" + data = self.model_dump( + by_alias=True, + exclude_none=True, + mode="json", + context={"mode": "payload"}, + exclude=self.payload_exclude_fields or None, + **kwargs, + ) + return self._build_payload_nested(data) def to_config(self, **kwargs) -> Dict[str, Any]: - """ - Convert model to Ansible config format. - """ - return self.model_dump(by_alias=False, exclude_none=True, **kwargs) + """Convert model to Ansible config format (Python field names, flat structure).""" + return self.model_dump( + by_alias=False, + exclude_none=True, + context={"mode": "config"}, + exclude=self.config_exclude_fields or None, + **kwargs, + ) + + # --- Core Deserialization --- @classmethod def from_response(cls, response: Dict[str, Any], **kwargs) -> "NDBaseModel": + """Create model instance from API response dict.""" return cls.model_validate(response, by_alias=True, **kwargs) @classmethod def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> "NDBaseModel": + """Create model instance from Ansible config dict.""" return cls.model_validate(ansible_config, by_name=True, **kwargs) - # TODO: Revisit this function when revisiting identifier strategy (low priority) - def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: + # --- Identifier Access --- + + def get_identifier_value(self) -> Optional[Union[str, int, Tuple[Any, ...]]]: """ - Extract identifier value(s) from this instance: - - single identifier: Returns field value. - - composite identifiers: Returns tuple of all field values. - - hierarchical identifiers: Returns tuple of (field_name, value) for first non-None field. + Extract identifier value(s) based on the configured strategy. + + Returns: + - single: The field value + - composite: Tuple of all field values + - hierarchical: Tuple of (field_name, value) for first non-None field + - singleton: None """ - if not self.identifiers and self.identifier_strategy != "singleton": - raise ValueError(f"{self.__class__.__name__} must have identifiers defined with its current identifier strategy: `{self.identifier_strategy}`") + strategy = self.identifier_strategy - if self.identifier_strategy == "single": + if strategy == "singleton": + return None + + if not self.identifiers: + raise ValueError(f"{self.__class__.__name__} has strategy '{strategy}' but no identifiers defined.") + + if strategy == "single": value = getattr(self, self.identifiers[0], None) if value is None: raise ValueError(f"Single identifier field '{self.identifiers[0]}' is None") return value - elif self.identifier_strategy == "composite": + elif strategy == "composite": values = [] missing = [] - for field in self.identifiers: value = getattr(self, field, None) if value is None: missing.append(field) values.append(value) - - # NOTE: might be redefined with Pydantic (low priority) if missing: raise ValueError(f"Composite identifier fields {missing} are None. " f"All required: {self.identifiers}") - return tuple(values) - elif self.identifier_strategy == "hierarchical": + elif strategy == "hierarchical": for field in self.identifiers: value = getattr(self, field, None) if value is not None: return (field, value) - raise ValueError(f"No non-None value in hierarchical fields {self.identifiers}") - # TODO: Revisit condition when there is no identifiers (low priority) - elif self.identifier_strategy == "singleton": - return None - else: - raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") + raise ValueError(f"Unknown identifier strategy: {strategy}") + + # --- Diff & Merge --- def to_diff_dict(self, **kwargs) -> Dict[str, Any]: + """Export for diff comparison, excluding sensitive fields.""" + return self.model_dump( + by_alias=True, + exclude_none=True, + exclude=self.exclude_from_diff or None, + mode="json", + **kwargs, + ) + + def get_diff(self, other: "NDBaseModel") -> bool: + """Diff comparison.""" + self_data = self.to_diff_dict() + other_data = other.to_diff_dict() + return issubset(other_data, self_data) + + def merge(self, other: "NDBaseModel") -> "NDBaseModel": """ - Export for diff comparison (excludes sensitive fields). - """ - return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), mode="json", **kwargs) + Merge another model's non-None values into this instance. + Recursively merges nested NDBaseModel fields. - # NOTE: initialize and return a deep copy of the instance? - def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": - if not isinstance(other_model, type(self)): - return TypeError( - f"NDBaseModel.merge method requires models of the same type. self of type {type(self)} and other_model of type {type(other_model)}" - ) + Returns self for chaining. + """ + if not isinstance(other, type(self)): + raise TypeError(f"Cannot merge {type(other).__name__} into {type(self).__name__}. " f"Both must be the same type.") - for field, value in other_model: + for field_name, value in other: if value is None: continue - current_value = getattr(self, field) - if isinstance(current_value, NDBaseModel) and isinstance(value, NDBaseModel): - setattr(self, field, current_value.merge(value)) - + current = getattr(self, field_name) + if isinstance(current, NDBaseModel) and isinstance(value, NDBaseModel): + current.merge(value) else: - setattr(self, field, value) + setattr(self, field_name, value) + return self diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index e2e7faf8..0320d3c1 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -16,13 +16,14 @@ field_serializer, field_validator, model_validator, - computed_field, + FieldSerializationInfo, + SerializationInfo, ) from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping -# Constant defined here as it is only used in this model + USER_ROLES_MAPPING = NDConstantMapping( { "fabric_admin": "fabric-admin", @@ -36,131 +37,155 @@ class LocalUserSecurityDomainModel(NDNestedModel): - """Security domain configuration for local user (nested model).""" + """ + Security domain with assigned roles for a local user. - # Fields - name: str = Field(alias="name", exclude=True) - roles: Optional[List[str]] = Field(default=None, alias="roles", exclude=True) + Canonical form (config): {"name": "all", "roles": ["observer", "support_engineer"]} + API payload form: {"all": {"roles": ["observer", "support-engineer"]}} + """ - # -- Serialization (Model instance -> API payload) -- + name: str = Field(alias="name") + roles: Optional[List[str]] = Field(default=None, alias="roles") @model_serializer() - def serialize_model(self) -> Dict: - return {self.name: {"roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in (self.roles or [])]}} + def serialize(self, info: SerializationInfo) -> Any: + mode = (info.context or {}).get("mode", "payload") - # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity + if mode == "config": + result = {"name": self.name} + if self.roles is not None: + result["roles"] = list(self.roles) + return result + + # Payload mode: nested dict with API role names + api_roles = [USER_ROLES_MAPPING.get_dict().get(role, role) for role in (self.roles or [])] + return {self.name: {"roles": api_roles}} -# TODO: Add field validation (e.g. me, le, choices, etc...) (low priority) class LocalUserModel(NDBaseModel): """ - Local user configuration. + Local user configuration for Nexus Dashboard. + + Identifier: login_id (single) - Identifier: login_id (single field) + Serialization notes: + - In payload mode, `reuse_limitation` and `time_interval_limitation` + are nested under `passwordPolicy` (handled by base class via + `payload_nested_fields`). + - In config mode, they remain as flat top-level fields. + - `security_domains` serializes as a nested dict in payload mode + and a flat list of dicts in config mode. """ - # Identifier configuration - # TODO: Revisit this identifiers strategy (low priority) + # --- Identifier Configuration --- + identifiers: ClassVar[Optional[List[str]]] = ["login_id"] identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "single" - # Keys management configurations - # TODO: Revisit these configurations (low priority) - exclude_from_diff: ClassVar[List[str]] = ["user_password"] - unwanted_keys: ClassVar[List] = [["passwordPolicy", "passwordChangeTime"], ["userID"]] # Nested path # Simple key + # --- Serialization Configuration --- + + exclude_from_diff: ClassVar[set] = {"user_password"} + unwanted_keys: ClassVar[List] = [ + ["passwordPolicy", "passwordChangeTime"], + ["userID"], + ] + + # In payload mode, nest these fields under "passwordPolicy" + payload_nested_fields: ClassVar[Dict[str, List[str]]] = { + "passwordPolicy": ["reuse_limitation", "time_interval_limitation"], + } + + # --- Fields --- - # Fields - # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") last_name: Optional[str] = Field(default=None, alias="lastName") user_password: Optional[SecretStr] = Field(default=None, alias="password") - reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation", exclude=True) - time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation", exclude=True) + reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation") + time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation") security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="rbac") remote_id_claim: Optional[str] = Field(default=None, alias="remoteIDClaim") remote_user_authorization: Optional[bool] = Field(default=None, alias="xLaunch") - # -- Serialization (Model instance -> API payload) -- - - @computed_field(alias="passwordPolicy") - @property - def password_policy(self) -> Optional[Dict[str, int]]: - """Computed nested structure for API payload.""" - if self.reuse_limitation is None and self.time_interval_limitation is None: - return None - - policy = {} - if self.reuse_limitation is not None: - policy["reuseLimitation"] = self.reuse_limitation - if self.time_interval_limitation is not None: - policy["timeIntervalLimitation"] = self.time_interval_limitation - return policy + # --- Serializers --- @field_serializer("user_password") def serialize_password(self, value: Optional[SecretStr]) -> Optional[str]: return value.get_secret_value() if value else None @field_serializer("security_domains") - def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) -> Optional[Dict]: - # NOTE: exclude `None` values and empty list (-> should we exclude empty list?) + def serialize_security_domains( + self, + value: Optional[List[LocalUserSecurityDomainModel]], + info: FieldSerializationInfo, + ) -> Any: if not value: return None + mode = (info.context or {}).get("mode", "payload") + + if mode == "config": + return [domain.model_dump(context=info.context) for domain in value] + + # Payload mode: merge all domain dicts into {"domains": {...}} domains_dict = {} for domain in value: - domains_dict.update(domain.to_payload()) - + domains_dict.update(domain.model_dump(context=info.context)) return {"domains": domains_dict} - # -- Deserialization (API response / Ansible payload -> Model instance) -- + # --- Validators (Deserialization) --- @model_validator(mode="before") @classmethod - def deserialize_password_policy(cls, data: Any) -> Any: + def flatten_password_policy(cls, data: Any) -> Any: + """ + Flatten nested passwordPolicy from API response into top-level fields. + This is the inverse of the payload_nested_fields nesting. + """ if not isinstance(data, dict): return data - password_policy = data.get("passwordPolicy") - - if password_policy and isinstance(password_policy, dict): - if "reuseLimitation" in password_policy: - data["reuse_limitation"] = password_policy["reuseLimitation"] - if "timeIntervalLimitation" in password_policy: - data["time_interval_limitation"] = password_policy["timeIntervalLimitation"] - - # Remove the nested structure from data to avoid conflicts - # (since it's a computed field, not a real field) - data.pop("passwordPolicy", None) + policy = data.pop("passwordPolicy", None) + if isinstance(policy, dict): + if "reuseLimitation" in policy: + data.setdefault("reuseLimitation", policy["reuseLimitation"]) + if "timeIntervalLimitation" in policy: + data.setdefault("timeIntervalLimitation", policy["timeIntervalLimitation"]) return data @field_validator("security_domains", mode="before") @classmethod - def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: + def normalize_security_domains(cls, value: Any) -> Optional[List[Dict]]: + """ + Accept security_domains in either format: + - List of dicts (Ansible config): [{"name": "all", "roles": [...]}] + - Nested dict (API response): {"domains": {"all": {"roles": [...]}}} + Always normalizes to the list-of-dicts form for model storage. + """ if value is None: return None - # If already in list format (Ansible module representation), return as-is + # Already normalized (from Ansible config) if isinstance(value, list): return value - # If in the nested dict format (API representation) + # API response format if isinstance(value, dict) and "domains" in value: - domains_dict = value["domains"] - domains_list = [] - - for domain_name, domain_data in domains_dict.items(): - domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in domain_data.get("roles", [])]}) - - return domains_list + reverse_mapping = {v: k for k, v in USER_ROLES_MAPPING.get_dict().items()} + return [ + { + "name": domain_name, + "roles": [reverse_mapping.get(role, role) for role in domain_data.get("roles", [])], + } + for domain_name, domain_data in value["domains"].items() + ] return value - # -- Extra -- + # --- Argument Spec --- - # TODO: to generate from Fields: use extra for generating argument_spec (low priority) @classmethod def get_argument_spec(cls) -> Dict: return dict( @@ -180,8 +205,19 @@ def get_argument_spec(cls) -> Dict: type="list", elements="dict", options=dict( - name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), - roles=dict(type="list", elements="str", choices=USER_ROLES_MAPPING.get_original_data()), + name=dict( + type="str", + required=True, + aliases=[ + "security_domain_name", + "domain_name", + ], + ), + roles=dict( + type="list", + elements="str", + choices=USER_ROLES_MAPPING.get_original_data(), + ), ), aliases=["domains"], ), @@ -189,5 +225,9 @@ def get_argument_spec(cls) -> Dict: remote_user_authorization=dict(type="bool"), ), ), - state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), + state=dict( + type="str", + default="merged", + choices=["merged", "replaced", "overridden", "deleted"], + ), ) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 1f751822..364b8a8f 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -143,10 +143,7 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha if existing is None: return "new" - # TODO: make a diff class level method for NDBaseModel (high priority) - existing_data = existing.to_diff_dict() - new_data = new_item.to_diff_dict() - is_subset = issubset(new_data, existing_data) + is_subset = existing.get_diff(new_item) return "no_diff" if is_subset else "changed" diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py index e8924cd2..4456018a 100644 --- a/plugins/module_utils/pydantic_compat.py +++ b/plugins/module_utils/pydantic_compat.py @@ -40,6 +40,8 @@ model_validator, validator, computed_field, + FieldSerializationInfo, + SerializationInfo, ) else: # Runtime: try to import, with fallback @@ -60,6 +62,8 @@ model_validator, validator, computed_field, + FieldSerializationInfo, + SerializationInfo, ) except ImportError: HAS_PYDANTIC = False # pylint: disable=invalid-name @@ -215,4 +219,6 @@ def decorator(func): "model_validator", "validator", "computed_field", + "FieldSerializationInfo", + "SerializationInfo", ] From 054435134d2b04e785db03e16a626482b34c442d Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 11 Mar 2026 13:56:54 -0400 Subject: [PATCH 054/131] [ignore] Complete nd_local_user integration test for creation and update asserts. --- .../targets/nd_local_user/tasks/main.yml | 296 +++++++++++++++++- 1 file changed, 288 insertions(+), 8 deletions(-) diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml index 77e55cd1..de8ad5ed 100644 --- a/tests/integration/targets/nd_local_user/tasks/main.yml +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -46,15 +46,125 @@ - name: all state: merged check_mode: true - register: cm_create_local_user + register: cm_create_local_users - name: Create local users with full and minimum configuration (normal mode) cisco.nd.nd_local_user: <<: *create_local_user - register: nm_create_local_user + register: nm_create_local_users + +- name: Asserts for local users creation tasks + ansible.builtin.assert: + that: + - cm_create_local_users is changed + - cm_create_local_users.after | length == 3 + - cm_create_local_users.after.0.login_id == "admin" + - cm_create_local_users.after.0.first_name == "admin" + - cm_create_local_users.after.0.remote_user_authorization == false + - cm_create_local_users.after.0.reuse_limitation == 0 + - cm_create_local_users.after.0.security_domains | length == 1 + - cm_create_local_users.after.0.security_domains.0.name == "all" + - cm_create_local_users.after.0.security_domains.0.roles | length == 1 + - cm_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - cm_create_local_users.after.0.time_interval_limitation == 0 + - cm_create_local_users.after.1.email == "ansibleuser@example.com" + - cm_create_local_users.after.1.first_name == "Ansible first name" + - cm_create_local_users.after.1.last_name == "Ansible last name" + - cm_create_local_users.after.1.login_id == "ansible_local_user" + - cm_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - cm_create_local_users.after.1.remote_user_authorization == true + - cm_create_local_users.after.1.reuse_limitation == 20 + - cm_create_local_users.after.1.security_domains | length == 1 + - cm_create_local_users.after.1.security_domains.0.name == "all" + - cm_create_local_users.after.1.security_domains.0.roles | length == 2 + - cm_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - cm_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - cm_create_local_users.after.1.time_interval_limitation == 10 + - cm_create_local_users.after.2.login_id == "ansible_local_user_2" + - cm_create_local_users.after.2.security_domains | length == 1 + - cm_create_local_users.after.2.security_domains.0.name == "all" + - cm_create_local_users.before | length == 1 + - cm_create_local_users.before.0.login_id == "admin" + - cm_create_local_users.before.0.first_name == "admin" + - cm_create_local_users.before.0.remote_user_authorization == false + - cm_create_local_users.before.0.reuse_limitation == 0 + - cm_create_local_users.before.0.security_domains | length == 1 + - cm_create_local_users.before.0.security_domains.0.name == "all" + - cm_create_local_users.before.0.security_domains.0.roles | length == 1 + - cm_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - cm_create_local_users.before.0.time_interval_limitation == 0 + - cm_create_local_users.diff == [] + - cm_create_local_users.proposed.0.email == "ansibleuser@example.com" + - cm_create_local_users.proposed.0.first_name == "Ansible first name" + - cm_create_local_users.proposed.0.last_name == "Ansible last name" + - cm_create_local_users.proposed.0.login_id == "ansible_local_user" + - cm_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - cm_create_local_users.proposed.0.remote_user_authorization == true + - cm_create_local_users.proposed.0.reuse_limitation == 20 + - cm_create_local_users.proposed.0.security_domains | length == 1 + - cm_create_local_users.proposed.0.security_domains.0.name == "all" + - cm_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - cm_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - cm_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - cm_create_local_users.proposed.0.time_interval_limitation == 10 + - cm_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - cm_create_local_users.proposed.1.security_domains | length == 1 + - cm_create_local_users.proposed.1.security_domains.0.name == "all" + - nm_create_local_users is changed + - nm_create_local_users.after.0.first_name == "admin" + - nm_create_local_users.after.0.remote_user_authorization == false + - nm_create_local_users.after.0.reuse_limitation == 0 + - nm_create_local_users.after.0.security_domains | length == 1 + - nm_create_local_users.after.0.security_domains.0.name == "all" + - nm_create_local_users.after.0.security_domains.0.roles | length == 1 + - nm_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - nm_create_local_users.after.0.time_interval_limitation == 0 + - nm_create_local_users.after.1.email == "ansibleuser@example.com" + - nm_create_local_users.after.1.first_name == "Ansible first name" + - nm_create_local_users.after.1.last_name == "Ansible last name" + - nm_create_local_users.after.1.login_id == "ansible_local_user" + - nm_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - nm_create_local_users.after.1.remote_user_authorization == true + - nm_create_local_users.after.1.reuse_limitation == 20 + - nm_create_local_users.after.1.security_domains | length == 1 + - nm_create_local_users.after.1.security_domains.0.name == "all" + - nm_create_local_users.after.1.security_domains.0.roles | length == 2 + - nm_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - nm_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - nm_create_local_users.after.1.time_interval_limitation == 10 + - nm_create_local_users.after.2.login_id == "ansible_local_user_2" + - nm_create_local_users.after.2.security_domains | length == 1 + - nm_create_local_users.after.2.security_domains.0.name == "all" + - nm_create_local_users.before | length == 1 + - nm_create_local_users.before.0.login_id == "admin" + - nm_create_local_users.before.0.first_name == "admin" + - nm_create_local_users.before.0.remote_user_authorization == false + - nm_create_local_users.before.0.reuse_limitation == 0 + - nm_create_local_users.before.0.security_domains | length == 1 + - nm_create_local_users.before.0.security_domains.0.name == "all" + - nm_create_local_users.before.0.security_domains.0.roles | length == 1 + - nm_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - nm_create_local_users.before.0.time_interval_limitation == 0 + - nm_create_local_users.diff == [] + - nm_create_local_users.proposed.0.email == "ansibleuser@example.com" + - nm_create_local_users.proposed.0.first_name == "Ansible first name" + - nm_create_local_users.proposed.0.last_name == "Ansible last name" + - nm_create_local_users.proposed.0.login_id == "ansible_local_user" + - nm_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - nm_create_local_users.proposed.0.remote_user_authorization == true + - nm_create_local_users.proposed.0.reuse_limitation == 20 + - nm_create_local_users.proposed.0.security_domains | length == 1 + - nm_create_local_users.proposed.0.security_domains.0.name == "all" + - nm_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - nm_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - nm_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - nm_create_local_users.proposed.0.time_interval_limitation == 10 + - nm_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - nm_create_local_users.proposed.1.security_domains | length == 1 + - nm_create_local_users.proposed.1.security_domains.0.name == "all" # UPDATE -- name: Update all ansible_local_user's attributes (check mode) +- name: Replace all ansible_local_user's attributes (check mode) cisco.nd.nd_local_user: &update_first_local_user <<: *nd_info config: @@ -72,12 +182,12 @@ remote_user_authorization: false state: replaced check_mode: true - register: cm_update_local_user + register: cm_replace_local_user -- name: Update local user (normal mode) +- name: Replace all ansible_local_user's attributes (normal mode) cisco.nd.nd_local_user: <<: *update_first_local_user - register: nm_update_local_user + register: nm_replace_local_user - name: Update all ansible_local_user_2's attributes except password cisco.nd.nd_local_user: &update_second_local_user @@ -95,12 +205,178 @@ remote_id_claim: ansible_remote_user_2 remote_user_authorization: true state: merged - register: nm_update_local_user_2 + register: nm_merge_local_user_2 - name: Update all ansible_local_user_2's attributes except password again (idempotency) cisco.nd.nd_local_user: <<: *update_second_local_user - register: nm_update_local_user_2_again + register: nm_merge_local_user_2_again + + +- name: Override local users with minimum configuration + cisco.nd.nd_local_user: + <<: *nd_info + config: + - login_id: admin + first_name: admin + remote_user_authorization: false + reuse_limitation: 0 + time_interval_limitation: 0 + security_domains: + - name: all + roles: + - super_admin + - email: overrideansibleuser@example.com + login_id: ansible_local_user + first_name: Overridden Ansible first name + last_name: Overriden Ansible last name + user_password: overideansibleLocalUserPassword1% + reuse_limitation: 15 + time_interval_limitation: 5 + security_domains: + - name: all + roles: + - observer + remote_id_claim: ansible_remote_user + remote_user_authorization: true + - login_id: ansible_local_user_3 + user_password: ansibleLocalUser3Password1%Test + security_domains: + - name: all + state: overridden + register: nm_override_local_users + +- name: Asserts for local users update tasks + ansible.builtin.assert: + that: + - cm_replace_local_user is changed + - cm_replace_local_user.after | length == 3 + - cm_replace_local_user.after.0.login_id == "ansible_local_user_2" + - cm_replace_local_user.after.0.security_domains | length == 1 + - cm_replace_local_user.after.0.security_domains.0.name == "all" + - cm_replace_local_user.after.1.email == "updatedansibleuser@example.com"" + - cm_replace_local_user.after.1.first_name == "Updated Ansible first name" + - cm_replace_local_user.after.1.last_name == "Updated Ansible last name" + - cm_replace_local_user.after.1.login_id == "ansible_local_user" + - cm_replace_local_user.after.1.remote_id_claim == "" + - cm_replace_local_user.after.1.remote_user_authorization == false + - cm_replace_local_user.after.1.reuse_limitation == 25 + - cm_replace_local_user.after.1.security_domains | length == 1 + - cm_replace_local_user.after.1.security_domains.0.name == "all" + - cm_replace_local_user.after.1.security_domains.0.roles | length == 1 + - cm_replace_local_user.after.1.security_domains.0.roles.0 == "super_admin" + - cm_replace_local_user.after.1.time_interval_limitation == 15 + - cm_replace_local_user.after.2.login_id == "admin" + - cm_replace_local_user.after.2.first_name == "admin" + - cm_replace_local_user.after.2.remote_user_authorization == false + - cm_replace_local_user.after.2.reuse_limitation == 0 + - cm_replace_local_user.after.2.security_domains | length == 1 + - cm_replace_local_user.after.2.security_domains.0.name == "all" + - cm_replace_local_user.after.2.security_domains.0.roles | length == 1 + - cm_replace_local_user.after.2.security_domains.0.roles.0 == "super_admin" + - cm_replace_local_user.after.2.time_interval_limitation == 0 + - cm_replace_local_user.before | length == 3 + - cm_replace_local_user.before.2.first_name == "admin" + - cm_replace_local_user.before.2.remote_user_authorization == false + - cm_replace_local_user.before.2.reuse_limitation == 0 + - cm_replace_local_user.before.2.security_domains | length == 1 + - cm_replace_local_user.before.2.security_domains.0.name == "all" + - cm_replace_local_user.before.2.security_domains.0.roles | length == 1 + - cm_replace_local_user.before.2.security_domains.0.roles.0 == "super_admin" + - cm_replace_local_user.before.2.time_interval_limitation == 0 + - cm_replace_local_user.before.1.email == "ansibleuser@example.com" + - cm_replace_local_user.before.1.first_name == "Ansible first name" + - cm_replace_local_user.before.1.last_name == "Ansible last name" + - cm_replace_local_user.before.1.login_id == "ansible_local_user" + - cm_replace_local_user.before.1.remote_id_claim == "ansible_remote_user" + - cm_replace_local_user.before.1.remote_user_authorization == true + - cm_replace_local_user.before.1.reuse_limitation == 20 + - cm_replace_local_user.before.1.security_domains | length == 1 + - cm_replace_local_user.before.1.security_domains.0.name == "all" + - cm_replace_local_user.before.1.security_domains.0.roles | length == 2 + - cm_replace_local_user.before.1.security_domains.0.roles.0 == "observer" + - cm_replace_local_user.before.1.security_domains.0.roles.1 == "support_engineer" + - cm_replace_local_user.before.1.time_interval_limitation == 10 + - cm_replace_local_user.before.0.login_id == "ansible_local_user_2" + - cm_replace_local_user.before.0.security_domains | length == 1 + - cm_replace_local_user.before.0.security_domains.0.name == "all" + - cm_replace_local_user.diff == [] + - cm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com"" + - cm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" + - cm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" + - cm_replace_local_user.proposed.0.login_id == "ansible_local_user" + - cm_replace_local_user.proposed.0.remote_id_claim == "" + - cm_replace_local_user.proposed.0.remote_user_authorization == false + - cm_replace_local_user.proposed.0.reuse_limitation == 25 + - cm_replace_local_user.proposed.0.security_domains | length == 1 + - cm_replace_local_user.proposed.0.security_domains.0.name == "all" + - cm_replace_local_user.proposed.0.security_domains.0.roles | length == 1 + - cm_replace_local_user.proposed.0.security_domains.0.roles.0 == "super_admin" + - cm_replace_local_user.proposed.0.time_interval_limitation == 15 + - nm_replace_local_user is changed + - nm_replace_local_user.after | length == 3 + - nm_replace_local_user.after.0.login_id == "ansible_local_user_2" + - nm_replace_local_user.after.0.security_domains | length == 1 + - nm_replace_local_user.after.0.security_domains.0.name == "all" + - nm_replace_local_user.after.1.email == "updatedansibleuser@example.com"" + - nm_replace_local_user.after.1.first_name == "Updated Ansible first name" + - nm_replace_local_user.after.1.last_name == "Updated Ansible last name" + - nm_replace_local_user.after.1.login_id == "ansible_local_user" + - nm_replace_local_user.after.1.remote_id_claim == "" + - nm_replace_local_user.after.1.remote_user_authorization == false + - nm_replace_local_user.after.1.reuse_limitation == 25 + - nm_replace_local_user.after.1.security_domains | length == 1 + - nm_replace_local_user.after.1.security_domains.0.name == "all" + - nm_replace_local_user.after.1.security_domains.0.roles | length == 1 + - nm_replace_local_user.after.1.security_domains.0.roles.0 == "super_admin" + - nm_replace_local_user.after.1.time_interval_limitation == 15 + - nm_replace_local_user.after.2.login_id == "admin" + - nm_replace_local_user.after.2.first_name == "admin" + - nm_replace_local_user.after.2.remote_user_authorization == false + - nm_replace_local_user.after.2.reuse_limitation == 0 + - nm_replace_local_user.after.2.security_domains | length == 1 + - nm_replace_local_user.after.2.security_domains.0.name == "all" + - nm_replace_local_user.after.2.security_domains.0.roles | length == 1 + - nm_replace_local_user.after.2.security_domains.0.roles.0 == "super_admin" + - nm_replace_local_user.after.2.time_interval_limitation == 0 + - nm_replace_local_user.before | length == 3 + - nm_replace_local_user.before.2.first_name == "admin" + - nm_replace_local_user.before.2.remote_user_authorization == false + - nm_replace_local_user.before.2.reuse_limitation == 0 + - nm_replace_local_user.before.2.security_domains | length == 1 + - nm_replace_local_user.before.2.security_domains.0.name == "all" + - nm_replace_local_user.before.2.security_domains.0.roles | length == 1 + - nm_replace_local_user.before.2.security_domains.0.roles.0 == "super_admin" + - nm_replace_local_user.before.2.time_interval_limitation == 0 + - nm_replace_local_user.before.1.email == "ansibleuser@example.com" + - nm_replace_local_user.before.1.first_name == "Ansible first name" + - nm_replace_local_user.before.1.last_name == "Ansible last name" + - nm_replace_local_user.before.1.login_id == "ansible_local_user" + - nm_replace_local_user.before.1.remote_id_claim == "ansible_remote_user" + - nm_replace_local_user.before.1.remote_user_authorization == true + - nm_replace_local_user.before.1.reuse_limitation == 20 + - nm_replace_local_user.before.1.security_domains | length == 1 + - nm_replace_local_user.before.1.security_domains.0.name == "all" + - nm_replace_local_user.before.1.security_domains.0.roles | length == 2 + - nm_replace_local_user.before.1.security_domains.0.roles.0 == "observer" + - nm_replace_local_user.before.1.security_domains.0.roles.1 == "support_engineer" + - nm_replace_local_user.before.1.time_interval_limitation == 10 + - nm_replace_local_user.before.0.login_id == "ansible_local_user_2" + - nm_replace_local_user.before.0.security_domains | length == 1 + - nm_replace_local_user.before.0.security_domains.0.name == "all" + - nm_replace_local_user.diff == [] + - nm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com"" + - nm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" + - nm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" + - nm_replace_local_user.proposed.0.login_id == "ansible_local_user" + - nm_replace_local_user.proposed.0.remote_id_claim == "" + - nm_replace_local_user.proposed.0.remote_user_authorization == false + - nm_replace_local_user.proposed.0.reuse_limitation == 25 + - nm_replace_local_user.proposed.0.security_domains | length == 1 + - nm_replace_local_user.proposed.0.security_domains.0.name == "all" + - nm_replace_local_user.proposed.0.security_domains.0.roles | length == 1 + - nm_replace_local_user.proposed.0.security_domains.0.roles.0 == "super_admin" + - nm_replace_local_user.proposed.0.time_interval_limitation == 15 # DELETE @@ -123,6 +399,9 @@ <<: *delete_local_user register: nm_delete_local_user_again +- name: Asserts for local users deletion tasks + ansible.builtin.assert: + that: # CLEAN UP - name: Ensure local users do not exist @@ -131,4 +410,5 @@ config: - login_id: ansible_local_user - login_id: ansible_local_user_2 + - login_id: ansible_local_user_3 state: deleted From 9ce24604f7e2da3156db470f94576f9d94ed12b9 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 11:45:29 -0400 Subject: [PATCH 055/131] [ignore] Finish integration test file for nd_local_user module. Remove Generic Class inheritence from NDConfigCollection. Clean Pydantic imports. --- plugins/module_utils/common/exceptions.py | 2 +- plugins/module_utils/models/base.py | 2 +- plugins/module_utils/nd_config_collection.py | 33 +-- plugins/module_utils/nd_output.py | 2 +- plugins/module_utils/nd_state_machine.py | 9 +- plugins/module_utils/utils.py | 2 +- plugins/modules/nd_local_user.py | 2 +- .../targets/nd_local_user/tasks/main.yml | 267 +++++++++++++++++- 8 files changed, 276 insertions(+), 43 deletions(-) diff --git a/plugins/module_utils/common/exceptions.py b/plugins/module_utils/common/exceptions.py index f0ae4400..3730e5d5 100644 --- a/plugins/module_utils/common/exceptions.py +++ b/plugins/module_utils/common/exceptions.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 79f9ec80..21fb983e 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,7 +9,7 @@ __metaclass__ = type from abc import ABC -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Set, Tuple, Union, Literal, Optional from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 364b8a8f..d34ca462 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -11,33 +11,30 @@ from typing import TypeVar, Generic, Optional, List, Dict, Any, Literal from copy import deepcopy from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# Type aliases -ModelType = TypeVar("ModelType", bound=NDBaseModel) -class NDConfigCollection(Generic[ModelType]): +class NDConfigCollection: """ Nexus Dashboard configuration collection for NDBaseModel instances. """ - def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = None): + def __init__(self, model_class: NDBaseModel, items: Optional[List[NDBaseModel]] = None): """ Initialize collection. """ - self._model_class: ModelType = model_class + self._model_class: NDBaseModel = model_class # Dual storage - self._items: List[ModelType] = [] + self._items: List[NDBaseModel] = [] self._index: Dict[IdentifierKey, int] = {} if items: for item in items: self.add(item) - def _extract_key(self, item: ModelType) -> IdentifierKey: + def _extract_key(self, item: NDBaseModel) -> IdentifierKey: """ Extract identifier key from item. """ @@ -56,7 +53,7 @@ def _rebuild_index(self) -> None: # Core Operations - def add(self, item: ModelType) -> IdentifierKey: + def add(self, item: NDBaseModel) -> IdentifierKey: """ Add item to collection (O(1) operation). """ @@ -74,14 +71,14 @@ def add(self, item: ModelType) -> IdentifierKey: return key - def get(self, key: IdentifierKey) -> Optional[ModelType]: + def get(self, key: IdentifierKey) -> Optional[NDBaseModel]: """ Get item by identifier key (O(1) operation). """ index = self._index.get(key) return self._items[index] if index is not None else None - def replace(self, item: ModelType) -> bool: + def replace(self, item: NDBaseModel) -> bool: """ Replace existing item with same identifier (O(1) operation). """ @@ -97,7 +94,7 @@ def replace(self, item: ModelType) -> bool: self._items[index] = item return True - def merge(self, item: ModelType) -> ModelType: + def merge(self, item: NDBaseModel) -> NDBaseModel: """ Merge item with existing, or add if not present. """ @@ -129,7 +126,7 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) - def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "changed"]: + def get_diff_config(self, new_item: NDBaseModel) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. """ @@ -147,7 +144,7 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha return "no_diff" if is_subset else "changed" - def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: + def get_diff_collection(self, other: "NDConfigCollection") -> bool: """ Check if two collections differ. """ @@ -167,7 +164,7 @@ def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: return False - def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[IdentifierKey]: + def get_diff_identifiers(self, other: "NDConfigCollection") -> List[IdentifierKey]: """ Get identifiers in self but not in other. """ @@ -189,7 +186,7 @@ def keys(self) -> List[IdentifierKey]: """Get all identifier keys.""" return list(self._index.keys()) - def copy(self) -> "NDConfigCollection[ModelType]": + def copy(self) -> "NDConfigCollection": """Create deep copy of collection.""" return NDConfigCollection(model_class=self._model_class, items=deepcopy(self._items)) @@ -208,7 +205,7 @@ def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: return [item.to_payload(**kwargs) for item in self._items] @classmethod - def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": + def from_ansible_config(cls, data: List[Dict], model_class: type[NDBaseModel], **kwargs) -> "NDConfigCollection": """ Create collection from Ansible config. """ @@ -216,7 +213,7 @@ def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **k return cls(model_class=model_class, items=items) @classmethod - def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": + def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[NDBaseModel], **kwargs) -> "NDConfigCollection": """ Create collection from API response. """ diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py index dbfc2cd2..0e5ed6ef 100644 --- a/plugins/module_utils/nd_output.py +++ b/plugins/module_utils/nd_output.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index bd86da3c..3b6c891c 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -41,17 +41,16 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.state = self.module.params["state"] # Initialize collections - self.nd_config_collection = NDConfigCollection[self.model_class] try: response_data = self.model_orchestrator.query_all() # State of configuration objects in ND before change execution - self.before = self.nd_config_collection.from_api_response(response_data=response_data, model_class=self.model_class) + self.before = NDConfigCollection.from_api_response(response_data=response_data, model_class=self.model_class) # State of current configuration objects in ND during change execution self.existing = self.before.copy() # Ongoing collection of configuration objects that were changed - self.sent = self.nd_config_collection(model_class=self.model_class) + self.sent = NDConfigCollection(model_class=self.model_class) # Collection of configuration objects given by user - self.proposed = self.nd_config_collection(model_class=self.model_class) + self.proposed = NDConfigCollection(model_class=self.model_class) for config in self.module.params.get("config", []): try: # Parse config into model diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index e09bd499..76e936bb 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index d1d871fe..56e59ad5 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml index de8ad5ed..b7f205ae 100644 --- a/tests/integration/targets/nd_local_user/tasks/main.yml +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -1,5 +1,5 @@ # Test code for the ND modules -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -19,6 +19,7 @@ config: - login_id: ansible_local_user - login_id: ansible_local_user_2 + - login_id: ansible_local_user_3 state: deleted # CREATE @@ -217,19 +218,10 @@ cisco.nd.nd_local_user: <<: *nd_info config: - - login_id: admin - first_name: admin - remote_user_authorization: false - reuse_limitation: 0 - time_interval_limitation: 0 - security_domains: - - name: all - roles: - - super_admin - email: overrideansibleuser@example.com login_id: ansible_local_user first_name: Overridden Ansible first name - last_name: Overriden Ansible last name + last_name: Overridden Ansible last name user_password: overideansibleLocalUserPassword1% reuse_limitation: 15 time_interval_limitation: 5 @@ -239,6 +231,15 @@ - observer remote_id_claim: ansible_remote_user remote_user_authorization: true + - login_id: admin + first_name: admin + remote_user_authorization: false + reuse_limitation: 0 + time_interval_limitation: 0 + security_domains: + - name: all + roles: + - super_admin - login_id: ansible_local_user_3 user_password: ansibleLocalUser3Password1%Test security_domains: @@ -254,7 +255,7 @@ - cm_replace_local_user.after.0.login_id == "ansible_local_user_2" - cm_replace_local_user.after.0.security_domains | length == 1 - cm_replace_local_user.after.0.security_domains.0.name == "all" - - cm_replace_local_user.after.1.email == "updatedansibleuser@example.com"" + - cm_replace_local_user.after.1.email == "updatedansibleuser@example.com" - cm_replace_local_user.after.1.first_name == "Updated Ansible first name" - cm_replace_local_user.after.1.last_name == "Updated Ansible last name" - cm_replace_local_user.after.1.login_id == "ansible_local_user" @@ -301,7 +302,7 @@ - cm_replace_local_user.before.0.security_domains | length == 1 - cm_replace_local_user.before.0.security_domains.0.name == "all" - cm_replace_local_user.diff == [] - - cm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com"" + - cm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com" - cm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" - cm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" - cm_replace_local_user.proposed.0.login_id == "ansible_local_user" @@ -318,7 +319,7 @@ - nm_replace_local_user.after.0.login_id == "ansible_local_user_2" - nm_replace_local_user.after.0.security_domains | length == 1 - nm_replace_local_user.after.0.security_domains.0.name == "all" - - nm_replace_local_user.after.1.email == "updatedansibleuser@example.com"" + - nm_replace_local_user.after.1.email == "updatedansibleuser@example.com" - nm_replace_local_user.after.1.first_name == "Updated Ansible first name" - nm_replace_local_user.after.1.last_name == "Updated Ansible last name" - nm_replace_local_user.after.1.login_id == "ansible_local_user" @@ -365,7 +366,7 @@ - nm_replace_local_user.before.0.security_domains | length == 1 - nm_replace_local_user.before.0.security_domains.0.name == "all" - nm_replace_local_user.diff == [] - - nm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com"" + - nm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com" - nm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" - nm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" - nm_replace_local_user.proposed.0.login_id == "ansible_local_user" @@ -377,6 +378,161 @@ - nm_replace_local_user.proposed.0.security_domains.0.roles | length == 1 - nm_replace_local_user.proposed.0.security_domains.0.roles.0 == "super_admin" - nm_replace_local_user.proposed.0.time_interval_limitation == 15 + - nm_merge_local_user_2 is changed + - nm_merge_local_user_2.after | length == 3 + - nm_merge_local_user_2.after.0.email == "secondansibleuser@example.com" + - nm_merge_local_user_2.after.0.first_name == "Second Ansible first name" + - nm_merge_local_user_2.after.0.last_name == "Second Ansible last name" + - nm_merge_local_user_2.after.0.login_id == "ansible_local_user_2" + - nm_merge_local_user_2.after.0.remote_id_claim == "ansible_remote_user_2" + - nm_merge_local_user_2.after.0.remote_user_authorization == true + - nm_merge_local_user_2.after.0.reuse_limitation == 20 + - nm_merge_local_user_2.after.0.security_domains | length == 1 + - nm_merge_local_user_2.after.0.security_domains.0.name == "all" + - nm_merge_local_user_2.after.0.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.after.0.security_domains.0.roles.0 == "fabric_admin" + - nm_merge_local_user_2.after.0.time_interval_limitation == 10 + - nm_merge_local_user_2.after.1.email == "updatedansibleuser@example.com" + - nm_merge_local_user_2.after.1.first_name == "Updated Ansible first name" + - nm_merge_local_user_2.after.1.last_name == "Updated Ansible last name" + - nm_merge_local_user_2.after.1.login_id == "ansible_local_user" + - nm_merge_local_user_2.after.1.remote_user_authorization == false + - nm_merge_local_user_2.after.1.reuse_limitation == 25 + - nm_merge_local_user_2.after.1.security_domains | length == 1 + - nm_merge_local_user_2.after.1.security_domains.0.name == "all" + - nm_merge_local_user_2.after.1.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.after.1.security_domains.0.roles.0 == "super_admin" + - nm_merge_local_user_2.after.1.time_interval_limitation == 15 + - nm_merge_local_user_2.after.2.login_id == "admin" + - nm_merge_local_user_2.after.2.first_name == "admin" + - nm_merge_local_user_2.after.2.remote_user_authorization == false + - nm_merge_local_user_2.after.2.reuse_limitation == 0 + - nm_merge_local_user_2.after.2.security_domains | length == 1 + - nm_merge_local_user_2.after.2.security_domains.0.name == "all" + - nm_merge_local_user_2.after.2.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.after.2.security_domains.0.roles.0 == "super_admin" + - nm_merge_local_user_2.after.2.time_interval_limitation == 0 + - nm_merge_local_user_2.before | length == 3 + - nm_merge_local_user_2.before.2.first_name == "admin" + - nm_merge_local_user_2.before.2.remote_user_authorization == false + - nm_merge_local_user_2.before.2.reuse_limitation == 0 + - nm_merge_local_user_2.before.2.security_domains | length == 1 + - nm_merge_local_user_2.before.2.security_domains.0.name == "all" + - nm_merge_local_user_2.before.2.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.before.2.security_domains.0.roles.0 == "super_admin" + - nm_merge_local_user_2.before.2.time_interval_limitation == 0 + - nm_merge_local_user_2.before.1.email == "updatedansibleuser@example.com" + - nm_merge_local_user_2.before.1.first_name == "Updated Ansible first name" + - nm_merge_local_user_2.before.1.last_name == "Updated Ansible last name" + - nm_merge_local_user_2.before.1.login_id == "ansible_local_user" + - nm_merge_local_user_2.before.1.remote_user_authorization == false + - nm_merge_local_user_2.before.1.reuse_limitation == 25 + - nm_merge_local_user_2.before.1.security_domains | length == 1 + - nm_merge_local_user_2.before.1.security_domains.0.name == "all" + - nm_merge_local_user_2.before.1.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.before.1.security_domains.0.roles.0 == "super_admin" + - nm_merge_local_user_2.before.1.time_interval_limitation == 15 + - nm_merge_local_user_2.before.0.login_id == "ansible_local_user_2" + - nm_merge_local_user_2.before.0.security_domains | length == 1 + - nm_merge_local_user_2.before.0.security_domains.0.name == "all" + - nm_merge_local_user_2.diff == [] + - nm_merge_local_user_2.proposed.0.email == "secondansibleuser@example.com" + - nm_merge_local_user_2.proposed.0.first_name == "Second Ansible first name" + - nm_merge_local_user_2.proposed.0.last_name == "Second Ansible last name" + - nm_merge_local_user_2.proposed.0.login_id == "ansible_local_user_2" + - nm_merge_local_user_2.proposed.0.remote_id_claim == "ansible_remote_user_2" + - nm_merge_local_user_2.proposed.0.remote_user_authorization == true + - nm_merge_local_user_2.proposed.0.reuse_limitation == 20 + - nm_merge_local_user_2.proposed.0.security_domains | length == 1 + - nm_merge_local_user_2.proposed.0.security_domains.0.name == "all" + - nm_merge_local_user_2.proposed.0.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.proposed.0.security_domains.0.roles.0 == "fabric_admin" + - nm_merge_local_user_2.proposed.0.time_interval_limitation == 10 + - nm_merge_local_user_2_again is not changed + - nm_merge_local_user_2_again.after == nm_merge_local_user_2.after + - nm_merge_local_user_2_again.diff == [] + - nm_merge_local_user_2_again.proposed == nm_merge_local_user_2.proposed + - nm_override_local_users is changed + - nm_override_local_users.after | length == 3 + - nm_override_local_users.after.0.email == "overrideansibleuser@example.com" + - nm_override_local_users.after.0.first_name == "Overridden Ansible first name" + - nm_override_local_users.after.0.last_name == "Overridden Ansible last name" + - nm_override_local_users.after.0.login_id == "ansible_local_user" + - nm_override_local_users.after.0.remote_id_claim == "ansible_remote_user" + - nm_override_local_users.after.0.remote_user_authorization == true + - nm_override_local_users.after.0.reuse_limitation == 15 + - nm_override_local_users.after.0.security_domains | length == 1 + - nm_override_local_users.after.0.security_domains.0.name == "all" + - nm_override_local_users.after.0.security_domains.0.roles | length == 1 + - nm_override_local_users.after.0.security_domains.0.roles.0 == "observer" + - nm_override_local_users.after.0.time_interval_limitation == 5 + - nm_override_local_users.after.1.login_id == "admin" + - nm_override_local_users.after.1.first_name == "admin" + - nm_override_local_users.after.1.remote_user_authorization == false + - nm_override_local_users.after.1.reuse_limitation == 0 + - nm_override_local_users.after.1.security_domains | length == 1 + - nm_override_local_users.after.1.security_domains.0.name == "all" + - nm_override_local_users.after.1.security_domains.0.roles | length == 1 + - nm_override_local_users.after.1.security_domains.0.roles.0 == "super_admin" + - nm_override_local_users.after.1.time_interval_limitation == 0 + - nm_override_local_users.after.2.login_id == "ansible_local_user_3" + - nm_override_local_users.after.2.security_domains.0.name == "all" + - nm_override_local_users.before | length == 3 + - nm_override_local_users.before.2.first_name == "admin" + - nm_override_local_users.before.2.remote_user_authorization == false + - nm_override_local_users.before.2.reuse_limitation == 0 + - nm_override_local_users.before.2.security_domains | length == 1 + - nm_override_local_users.before.2.security_domains.0.name == "all" + - nm_override_local_users.before.2.security_domains.0.roles | length == 1 + - nm_override_local_users.before.2.security_domains.0.roles.0 == "super_admin" + - nm_override_local_users.before.2.time_interval_limitation == 0 + - nm_override_local_users.before.1.email == "updatedansibleuser@example.com" + - nm_override_local_users.before.1.first_name == "Updated Ansible first name" + - nm_override_local_users.before.1.last_name == "Updated Ansible last name" + - nm_override_local_users.before.1.login_id == "ansible_local_user" + - nm_override_local_users.before.1.remote_user_authorization == false + - nm_override_local_users.before.1.reuse_limitation == 25 + - nm_override_local_users.before.1.security_domains | length == 1 + - nm_override_local_users.before.1.security_domains.0.name == "all" + - nm_override_local_users.before.1.security_domains.0.roles | length == 1 + - nm_override_local_users.before.1.security_domains.0.roles.0 == "super_admin" + - nm_override_local_users.before.1.time_interval_limitation == 15 + - nm_override_local_users.before.0.email == "secondansibleuser@example.com" + - nm_override_local_users.before.0.first_name == "Second Ansible first name" + - nm_override_local_users.before.0.last_name == "Second Ansible last name" + - nm_override_local_users.before.0.login_id == "ansible_local_user_2" + - nm_override_local_users.before.0.remote_id_claim == "ansible_remote_user_2" + - nm_override_local_users.before.0.remote_user_authorization == true + - nm_override_local_users.before.0.reuse_limitation == 20 + - nm_override_local_users.before.0.security_domains | length == 1 + - nm_override_local_users.before.0.security_domains.0.name == "all" + - nm_override_local_users.before.0.security_domains.0.roles | length == 1 + - nm_override_local_users.before.0.security_domains.0.roles.0 == "fabric_admin" + - nm_override_local_users.before.0.time_interval_limitation == 10 + - nm_override_local_users.diff == [] + - nm_override_local_users.proposed.0.email == "overrideansibleuser@example.com" + - nm_override_local_users.proposed.0.first_name == "Overridden Ansible first name" + - nm_override_local_users.proposed.0.last_name == "Overridden Ansible last name" + - nm_override_local_users.proposed.0.login_id == "ansible_local_user" + - nm_override_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - nm_override_local_users.proposed.0.remote_user_authorization == true + - nm_override_local_users.proposed.0.reuse_limitation == 15 + - nm_override_local_users.proposed.0.security_domains | length == 1 + - nm_override_local_users.proposed.0.security_domains.0.name == "all" + - nm_override_local_users.proposed.0.security_domains.0.roles | length == 1 + - nm_override_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - nm_override_local_users.proposed.0.time_interval_limitation == 5 + - nm_override_local_users.proposed.1.login_id == "admin" + - nm_override_local_users.proposed.1.first_name == "admin" + - nm_override_local_users.proposed.1.remote_user_authorization == false + - nm_override_local_users.proposed.1.reuse_limitation == 0 + - nm_override_local_users.proposed.1.security_domains | length == 1 + - nm_override_local_users.proposed.1.security_domains.0.name == "all" + - nm_override_local_users.proposed.1.security_domains.0.roles | length == 1 + - nm_override_local_users.proposed.1.security_domains.0.roles.0 == "super_admin" + - nm_override_local_users.proposed.1.time_interval_limitation == 0 + - nm_override_local_users.proposed.2.login_id == "ansible_local_user_3" + - nm_override_local_users.proposed.2.security_domains.0.name == "all" # DELETE @@ -402,6 +558,87 @@ - name: Asserts for local users deletion tasks ansible.builtin.assert: that: + - cm_delete_local_user is changed + - cm_delete_local_user.after | length == 2 + - cm_delete_local_user.after.0.login_id == "ansible_local_user_3" + - cm_delete_local_user.after.0.security_domains.0.name == "all" + - cm_delete_local_user.after.1.login_id == "admin" + - cm_delete_local_user.after.1.first_name == "admin" + - cm_delete_local_user.after.1.remote_user_authorization == false + - cm_delete_local_user.after.1.reuse_limitation == 0 + - cm_delete_local_user.after.1.security_domains | length == 1 + - cm_delete_local_user.after.1.security_domains.0.name == "all" + - cm_delete_local_user.after.1.security_domains.0.roles | length == 1 + - cm_delete_local_user.after.1.security_domains.0.roles.0 == "super_admin" + - cm_delete_local_user.after.1.time_interval_limitation == 0 + - cm_delete_local_user.before | length == 3 + - cm_delete_local_user.before.0.email == "overrideansibleuser@example.com" + - cm_delete_local_user.before.0.first_name == "Overridden Ansible first name" + - cm_delete_local_user.before.0.last_name == "Overridden Ansible last name" + - cm_delete_local_user.before.0.login_id == "ansible_local_user" + - cm_delete_local_user.before.0.remote_id_claim == "ansible_remote_user" + - cm_delete_local_user.before.0.remote_user_authorization == true + - cm_delete_local_user.before.0.reuse_limitation == 15 + - cm_delete_local_user.before.0.security_domains | length == 1 + - cm_delete_local_user.before.0.security_domains.0.name == "all" + - cm_delete_local_user.before.0.security_domains.0.roles | length == 1 + - cm_delete_local_user.before.0.security_domains.0.roles.0 == "observer" + - cm_delete_local_user.before.0.time_interval_limitation == 5 + - cm_delete_local_user.before.1.login_id == "ansible_local_user_3" + - cm_delete_local_user.before.1.security_domains.0.name == "all" + - cm_delete_local_user.before.2.first_name == "admin" + - cm_delete_local_user.before.2.remote_user_authorization == false + - cm_delete_local_user.before.2.reuse_limitation == 0 + - cm_delete_local_user.before.2.security_domains | length == 1 + - cm_delete_local_user.before.2.security_domains.0.name == "all" + - cm_delete_local_user.before.2.security_domains.0.roles | length == 1 + - cm_delete_local_user.before.2.security_domains.0.roles.0 == "super_admin" + - cm_delete_local_user.before.2.time_interval_limitation == 0 + - cm_delete_local_user.diff == [] + - cm_delete_local_user.proposed.0.login_id == "ansible_local_user" + - nm_delete_local_user is changed + - nm_delete_local_user.after | length == 2 + - nm_delete_local_user.after.0.login_id == "ansible_local_user_3" + - nm_delete_local_user.after.0.security_domains.0.name == "all" + - nm_delete_local_user.after.1.login_id == "admin" + - nm_delete_local_user.after.1.first_name == "admin" + - nm_delete_local_user.after.1.remote_user_authorization == false + - nm_delete_local_user.after.1.reuse_limitation == 0 + - nm_delete_local_user.after.1.security_domains | length == 1 + - nm_delete_local_user.after.1.security_domains.0.name == "all" + - nm_delete_local_user.after.1.security_domains.0.roles | length == 1 + - nm_delete_local_user.after.1.security_domains.0.roles.0 == "super_admin" + - nm_delete_local_user.after.1.time_interval_limitation == 0 + - nm_delete_local_user.before | length == 3 + - nm_delete_local_user.before.0.email == "overrideansibleuser@example.com" + - nm_delete_local_user.before.0.first_name == "Overridden Ansible first name" + - nm_delete_local_user.before.0.last_name == "Overridden Ansible last name" + - nm_delete_local_user.before.0.login_id == "ansible_local_user" + - nm_delete_local_user.before.0.remote_id_claim == "ansible_remote_user" + - nm_delete_local_user.before.0.remote_user_authorization == true + - nm_delete_local_user.before.0.reuse_limitation == 15 + - nm_delete_local_user.before.0.security_domains | length == 1 + - nm_delete_local_user.before.0.security_domains.0.name == "all" + - nm_delete_local_user.before.0.security_domains.0.roles | length == 1 + - nm_delete_local_user.before.0.security_domains.0.roles.0 == "observer" + - nm_delete_local_user.before.0.time_interval_limitation == 5 + - nm_delete_local_user.before.1.login_id == "ansible_local_user_3" + - nm_delete_local_user.before.1.security_domains.0.name == "all" + - nm_delete_local_user.before.2.first_name == "admin" + - nm_delete_local_user.before.2.remote_user_authorization == false + - nm_delete_local_user.before.2.reuse_limitation == 0 + - nm_delete_local_user.before.2.security_domains | length == 1 + - nm_delete_local_user.before.2.security_domains.0.name == "all" + - nm_delete_local_user.before.2.security_domains.0.roles | length == 1 + - nm_delete_local_user.before.2.security_domains.0.roles.0 == "super_admin" + - nm_delete_local_user.before.2.time_interval_limitation == 0 + - nm_delete_local_user.diff == [] + - nm_delete_local_user.proposed.0.login_id == "ansible_local_user" + - nm_delete_local_user_again is not changed + - nm_delete_local_user_again.after == nm_delete_local_user.after + - nm_delete_local_user_again.before == nm_delete_local_user.after + - nm_delete_local_user_again.diff == [] + - nm_delete_local_user_again.proposed == nm_delete_local_user.proposed # CLEAN UP - name: Ensure local users do not exist From 89843aaab2cb7770fc482a0ff084f700a543f034 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 12:33:23 -0400 Subject: [PATCH 056/131] [ignore] Fix sanity issues by enhancing pydantic_compat.py. Fix Black formatting. --- plugins/module_utils/models/local_user.py | 1 - plugins/module_utils/nd_config_collection.py | 3 +-- plugins/module_utils/pydantic_compat.py | 14 ++++++++++++++ 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 0320d3c1..38f2b5d2 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -23,7 +23,6 @@ from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping - USER_ROLES_MAPPING = NDConstantMapping( { "fabric_admin": "fabric-admin", diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index d34ca462..fa574ca2 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -8,13 +8,12 @@ __metaclass__ = type -from typing import TypeVar, Generic, Optional, List, Dict, Any, Literal +from typing import Optional, List, Dict, Any, Literal from copy import deepcopy from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey - class NDConfigCollection: """ Nexus Dashboard configuration collection for NDBaseModel instances. diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py index 4456018a..2596d852 100644 --- a/plugins/module_utils/pydantic_compat.py +++ b/plugins/module_utils/pydantic_compat.py @@ -192,6 +192,20 @@ def decorator(func): return decorator + # Fallback: FieldSerializationInfo placeholder class that does nothing + class FieldSerializationInfo: + """Pydantic FieldSerializationInfo fallback when pydantic is not available.""" + + def __init__(self, **kwargs): + pass + + # Fallback: SerializationInfo placeholder class that does nothing + class SerializationInfo: + """Pydantic SerializationInfo fallback when pydantic is not available.""" + + def __init__(self, **kwargs): + pass + else: HAS_PYDANTIC = True # pylint: disable=invalid-name PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name From 790a26bcc8b3e3e8ad2ef77f545f8c0b05feb7c4 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 12:48:36 -0400 Subject: [PATCH 057/131] [ignore] Remove all TODO comments. --- plugins/module_utils/endpoints/base.py | 4 +--- plugins/module_utils/endpoints/v1/infra_aaa_local_users.py | 1 - plugins/module_utils/nd_config_collection.py | 1 - plugins/module_utils/nd_state_machine.py | 2 -- plugins/module_utils/orchestrators/base.py | 2 -- plugins/module_utils/utils.py | 1 - plugins/modules/nd_local_user.py | 3 --- 7 files changed, 1 insertion(+), 13 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index e8fa710e..87b04983 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -134,9 +134,7 @@ def verb(self) -> HttpVerbEnum: None """ - - # TODO: Maybe to be modifed to be more Pydantic (low priority) - # TODO: Maybe change function's name (low priority) + # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index d1013e24..9235afb6 100644 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -31,7 +31,6 @@ class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseEndpoint): /api/v1/infra/aaa/localUsers endpoint. """ - # TODO: Remove it base_path: Final = NDBasePath.nd_infra_aaa("localUsers") @property diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index fa574ca2..abcfc0f7 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -42,7 +42,6 @@ def _extract_key(self, item: NDBaseModel) -> IdentifierKey: except Exception as e: raise ValueError(f"Failed to extract identifier: {e}") from e - # TODO: optimize it -> only needed for delete method (low priority) def _rebuild_index(self) -> None: """Rebuild index from scratch (O(n) operation).""" self._index.clear() diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 3b6c891c..3840b360 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -27,7 +27,6 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest """ Initialize the Network Resource Module. """ - # TODO: Revisit Module initialization and configuration with rest_send self.module = module self.nd_module = NDModule(self.module) @@ -37,7 +36,6 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest # Configuration self.model_orchestrator = model_orchestrator(sender=self.nd_module) self.model_class = self.model_orchestrator.model_class - # TODO: Revisit these class variables when udpating Module intialization and configuration (low priority) self.state = self.module.params["state"] # Initialize collections diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 1a8b4f10..ddcb7569 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -34,11 +34,9 @@ class NDBaseOrchestrator(BaseModel): query_all_endpoint: Type[NDBaseEndpoint] # NOTE: Module Field is always required - # TODO: Replace it with future sender (low priority) sender: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: api_endpoint = self.create_endpoint() diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 76e936bb..2e62c6eb 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -56,7 +56,6 @@ def issubset(subset: Any, superset: Any) -> bool: return True -# TODO: Might not necessary with Pydantic validation and serialization built-in methods (see models/local_user) def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 56e59ad5..f5efea03 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -199,9 +199,6 @@ def main(): ) # Manage state - # TODO: return module output class object: - # output = nd_state_machine.manage_state() - # module.exit_json(**output) nd_state_machine.manage_state() module.exit_json(**nd_state_machine.output.format()) From ceebe4cea76c9b92697f13dbc0131ed8141d48f5 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 13:47:23 -0400 Subject: [PATCH 058/131] [ignore] Update endpoints to match latest nd42_integration branch. Update orchestrators accordingly. --- .../endpoints/v1/infra/aaa_local_users.py | 209 +++++++++ .../endpoints/v1/infra_aaa_local_users.py | 178 ------- plugins/module_utils/orchestrators/base.py | 14 +- .../module_utils/orchestrators/local_user.py | 24 +- ..._endpoints_api_v1_infra_aaa_local_users.py | 437 ++++++++++++++++++ 5 files changed, 665 insertions(+), 197 deletions(-) create mode 100644 plugins/module_utils/endpoints/v1/infra/aaa_local_users.py delete mode 100644 plugins/module_utils/endpoints/v1/infra_aaa_local_users.py create mode 100644 tests/unit/module_utils/endpoints/test_endpoints_api_v1_infra_aaa_local_users.py diff --git a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py new file mode 100644 index 00000000..925c5548 --- /dev/null +++ b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py @@ -0,0 +1,209 @@ +# Copyright: (c) 2026, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +ND Infra AAA Local Users endpoint models. + +This module contains endpoint definitions for AAA Local Users operations in the ND Infra API. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Literal + +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import Field +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.base_path import BasePath + +from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey + + +class _EpInfraAaaLocalUsersBase(LoginIdMixin, NDEndpointBaseModel): + """ + Base class for ND Infra AAA Local Users endpoints. + + Provides common functionality for all HTTP methods on the /api/v1/infra/aaa/localUsers endpoint. + """ + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path. + + ## Returns + + - Complete endpoint path string, optionally including login_id + """ + if self.login_id is not None: + return BasePath.path("aaa", "localUsers", self.login_id) + return BasePath.path("aaa", "localUsers") + + def set_identifiers(self, identifier: IdentifierKey = None): + self.login_id = identifier + + +class EpInfraAaaLocalUsersGet(_EpInfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users GET Endpoint + + ## Description + + Endpoint to retrieve local users from the ND Infra AAA service. + Optionally retrieve a specific local user by login_id. + + ## Path + + - /api/v1/infra/aaa/localUsers + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - GET + + ## Usage + + ```python + # Get all local users + request = EpApiV1InfraAaaLocalUsersGet() + path = request.path + verb = request.verb + + # Get specific local user + request = EpApiV1InfraAaaLocalUsersGet() + request.login_id = "admin" + path = request.path + verb = request.verb + ``` + """ + + class_name: Literal["EpInfraAaaLocalUsersGet"] = Field(default="EpInfraAaaLocalUsersGet", frozen=True, description="Class name for backward compatibility") + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.GET + + +class EpInfraAaaLocalUsersPost(_EpInfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users POST Endpoint + + ## Description + + Endpoint to create a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers + + ## Verb + + - POST + + ## Usage + + ```python + request = EpApiV1InfraAaaLocalUsersPost() + path = request.path + verb = request.verb + ``` + """ + + class_name: Literal["EpInfraAaaLocalUsersPost"] = Field( + default="EpInfraAaaLocalUsersPost", frozen=True, description="Class name for backward compatibility" + ) + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.POST + + +class EpInfraAaaLocalUsersPut(_EpInfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users PUT Endpoint + + ## Description + + Endpoint to update a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - PUT + + ## Usage + + ```python + request = EpApiV1InfraAaaLocalUsersPut() + request.login_id = "admin" + path = request.path + verb = request.verb + ``` + """ + + class_name: Literal["EpInfraAaaLocalUsersPut"] = Field(default="EpInfraAaaLocalUsersPut", frozen=True, description="Class name for backward compatibility") + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.PUT + + +class EpInfraAaaLocalUsersDelete(_EpInfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users DELETE Endpoint + + ## Description + + Endpoint to delete a local user from the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - DELETE + + ## Usage + + ```python + request = EpApiV1InfraAaaLocalUsersDelete() + request.login_id = "admin" + path = request.path + verb = request.verb + ``` + """ + + class_name: Literal["EpInfraAaaLocalUsersDelete"] = Field( + default="EpInfraAaaLocalUsersDelete", frozen=True, description="Class name for backward compatibility" + ) + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.DELETE diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py deleted file mode 100644 index 9235afb6..00000000 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@allenrobel) -# Copyright: (c) 2026, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) -""" -ND Infra AAA LocalUsers endpoint models. - -This module contains endpoint definitions for LocalUsers-related operations -in the ND Infra AAA API. -""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -from typing import Literal, Final -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import Field -from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey - - -class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseEndpoint): - """ - Base class for ND Infra AAA Local Users endpoints. - - Provides common functionality for all HTTP methods on the - /api/v1/infra/aaa/localUsers endpoint. - """ - - base_path: Final = NDBasePath.nd_infra_aaa("localUsers") - - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path. - - ## Returns - - - Complete endpoint path string, optionally including login_id - """ - if self.login_id is not None: - return NDBasePath.nd_infra_aaa("localUsers", self.login_id) - return self.base_path - - def set_identifiers(self, identifier: IdentifierKey = None): - self.login_id = identifier - - -class V1InfraAaaLocalUsersGet(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users GET Endpoint - - ## Description - - Endpoint to retrieve local users from the ND Infra AAA service. - Optionally retrieve a specific local user by login_id. - - ## Path - - - /api/v1/infra/aaa/localUsers - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - GET - """ - - class_name: Literal["V1InfraAaaLocalUsersGet"] = Field( - default="V1InfraAaaLocalUsersGet", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.GET - - -class V1InfraAaaLocalUsersPost(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users POST Endpoint - - ## Description - - Endpoint to create a local user in the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers - - ## Verb - - - POST - """ - - class_name: Literal["V1InfraAaaLocalUsersPost"] = Field( - default="V1InfraAaaLocalUsersPost", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.POST - - -class V1InfraAaaLocalUsersPut(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users PUT Endpoint - - ## Description - - Endpoint to update a local user in the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - PUT - """ - - class_name: Literal["V1InfraAaaLocalUsersPut"] = Field( - default="V1InfraAaaLocalUsersPut", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.PUT - - -class V1InfraAaaLocalUsersDelete(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users DELETE Endpoint - - ## Description - - Endpoint to delete a local user from the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - DELETE - """ - - class_name: Literal["V1InfraAaaLocalUsersDelete"] = Field( - default="V1InfraAaaLocalUsersDelete", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.DELETE diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index ddcb7569..651a9d30 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,7 +12,7 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType @@ -27,11 +27,11 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - create_endpoint: Type[NDBaseEndpoint] - update_endpoint: Type[NDBaseEndpoint] - delete_endpoint: Type[NDBaseEndpoint] - query_one_endpoint: Type[NDBaseEndpoint] - query_all_endpoint: Type[NDBaseEndpoint] + create_endpoint: Type[NDEndpointBaseModel] + update_endpoint: Type[NDEndpointBaseModel] + delete_endpoint: Type[NDEndpointBaseModel] + query_one_endpoint: Type[NDEndpointBaseModel] + query_all_endpoint: Type[NDEndpointBaseModel] # NOTE: Module Field is always required sender: NDModule @@ -70,7 +70,7 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: - result = self.sender.query_obj(self.query_all_endpoint.path) + result = self.sender.query_obj(self.query_all_endpoint().path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 5e52a00b..db7bbfdc 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,31 +12,31 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( - V1InfraAaaLocalUsersPost, - V1InfraAaaLocalUsersPut, - V1InfraAaaLocalUsersDelete, - V1InfraAaaLocalUsersGet, +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( + EpInfraAaaLocalUsersPost, + EpInfraAaaLocalUsersPut, + EpInfraAaaLocalUsersDelete, + EpInfraAaaLocalUsersGet, ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - create_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet + create_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPost + update_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPut + delete_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersDelete + query_one_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet + query_all_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet def query_all(self) -> ResponseType: """ Custom query_all action to extract 'localusers' from response. """ try: - result = self.sender.query_obj(self.query_all_endpoint.base_path) + result = self.sender.query_obj(self.query_all_endpoint().path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/tests/unit/module_utils/endpoints/test_endpoints_api_v1_infra_aaa_local_users.py b/tests/unit/module_utils/endpoints/test_endpoints_api_v1_infra_aaa_local_users.py new file mode 100644 index 00000000..71cfd9b6 --- /dev/null +++ b/tests/unit/module_utils/endpoints/test_endpoints_api_v1_infra_aaa_local_users.py @@ -0,0 +1,437 @@ +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" +Unit tests for infra_aaa_local_users.py + +Tests the ND Infra AAA endpoint classes +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +import pytest # pylint: disable=unused-import +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( + EpInfraAaaLocalUsersDelete, + EpInfraAaaLocalUsersGet, + EpInfraAaaLocalUsersPost, + EpInfraAaaLocalUsersPut, +) +from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum +from ansible_collections.cisco.nd.tests.unit.module_utils.common_utils import ( + does_not_raise, +) + +# ============================================================================= +# Test: EpInfraAaaLocalUsersGet +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00010(): + """ + # Summary + + Verify EpInfraAaaLocalUsersGet basic instantiation + + ## Test + + - Instance can be created + - class_name is set correctly + - verb is GET + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.__init__() + - EpInfraAaaLocalUsersGet.verb + - EpInfraAaaLocalUsersGet.class_name + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet() + assert instance.class_name == "EpInfraAaaLocalUsersGet" + assert instance.verb == HttpVerbEnum.GET + + +def test_endpoints_api_v1_infra_aaa_00020(): + """ + # Summary + + Verify EpInfraAaaLocalUsersGet path without login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers" when login_id is None + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.path + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet() + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers" + + +def test_endpoints_api_v1_infra_aaa_00030(): + """ + # Summary + + Verify EpInfraAaaLocalUsersGet path with login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers/admin" when login_id is set + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.path + - EpInfraAaaLocalUsersGet.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet() + instance.login_id = "admin" + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers/admin" + + +def test_endpoints_api_v1_infra_aaa_00040(): + """ + # Summary + + Verify EpInfraAaaLocalUsersGet login_id can be set at instantiation + + ## Test + + - login_id can be provided during instantiation + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.__init__() + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet(login_id="testuser") + assert instance.login_id == "testuser" + assert instance.path == "/api/v1/infra/aaa/localUsers/testuser" + + +# ============================================================================= +# Test: EpInfraAaaLocalUsersPost +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00100(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPost basic instantiation + + ## Test + + - Instance can be created + - class_name is set correctly + - verb is POST + + ## Classes and Methods + + - EpInfraAaaLocalUsersPost.__init__() + - EpInfraAaaLocalUsersPost.verb + - EpInfraAaaLocalUsersPost.class_name + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPost() + assert instance.class_name == "EpInfraAaaLocalUsersPost" + assert instance.verb == HttpVerbEnum.POST + + +def test_endpoints_api_v1_infra_aaa_00110(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPost path + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers" for POST + + ## Classes and Methods + + - EpInfraAaaLocalUsersPost.path + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPost() + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers" + + +def test_endpoints_api_v1_infra_aaa_00120(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPost path with login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers/admin" when login_id is set + + ## Classes and Methods + + - EpInfraAaaLocalUsersPost.path + - EpInfraAaaLocalUsersPost.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPost() + instance.login_id = "admin" + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers/admin" + + +# ============================================================================= +# Test: EpInfraAaaLocalUsersPut +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00200(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPut basic instantiation + + ## Test + + - Instance can be created + - class_name is set correctly + - verb is PUT + + ## Classes and Methods + + - EpInfraAaaLocalUsersPut.__init__() + - EpInfraAaaLocalUsersPut.verb + - EpInfraAaaLocalUsersPut.class_name + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPut() + assert instance.class_name == "EpInfraAaaLocalUsersPut" + assert instance.verb == HttpVerbEnum.PUT + + +def test_endpoints_api_v1_infra_aaa_00210(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPut path with login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers/admin" when login_id is set + + ## Classes and Methods + + - EpInfraAaaLocalUsersPut.path + - EpInfraAaaLocalUsersPut.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPut() + instance.login_id = "admin" + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers/admin" + + +def test_endpoints_api_v1_infra_aaa_00220(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPut with complex login_id + + ## Test + + - login_id with special characters is handled correctly + + ## Classes and Methods + + - EpInfraAaaLocalUsersPut.path + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPut(login_id="user-name_123") + assert instance.path == "/api/v1/infra/aaa/localUsers/user-name_123" + + +# ============================================================================= +# Test: EpInfraAaaLocalUsersDelete +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00300(): + """ + # Summary + + Verify EpInfraAaaLocalUsersDelete basic instantiation + + ## Test + + - Instance can be created + - class_name is set correctly + - verb is DELETE + + ## Classes and Methods + + - EpInfraAaaLocalUsersDelete.__init__() + - EpInfraAaaLocalUsersDelete.verb + - EpInfraAaaLocalUsersDelete.class_name + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersDelete() + assert instance.class_name == "EpInfraAaaLocalUsersDelete" + assert instance.verb == HttpVerbEnum.DELETE + + +def test_endpoints_api_v1_infra_aaa_00310(): + """ + # Summary + + Verify EpInfraAaaLocalUsersDelete path with login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers/admin" when login_id is set + + ## Classes and Methods + + - EpInfraAaaLocalUsersDelete.path + - EpInfraAaaLocalUsersDelete.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersDelete() + instance.login_id = "admin" + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers/admin" + + +def test_endpoints_api_v1_infra_aaa_00320(): + """ + # Summary + + Verify EpInfraAaaLocalUsersDelete without login_id + + ## Test + + - path returns base path when login_id is None + + ## Classes and Methods + + - EpInfraAaaLocalUsersDelete.path + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersDelete() + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers" + + +# ============================================================================= +# Test: All HTTP methods on same endpoint +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00400(): + """ + # Summary + + Verify all HTTP methods work correctly on same resource + + ## Test + + - GET, POST, PUT, DELETE all return correct paths for same login_id + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet + - EpInfraAaaLocalUsersPost + - EpInfraAaaLocalUsersPut + - EpInfraAaaLocalUsersDelete + """ + login_id = "testuser" + + with does_not_raise(): + get_ep = EpInfraAaaLocalUsersGet(login_id=login_id) + post_ep = EpInfraAaaLocalUsersPost(login_id=login_id) + put_ep = EpInfraAaaLocalUsersPut(login_id=login_id) + delete_ep = EpInfraAaaLocalUsersDelete(login_id=login_id) + + # All should have same path when login_id is set + expected_path = "/api/v1/infra/aaa/localUsers/testuser" + assert get_ep.path == expected_path + assert post_ep.path == expected_path + assert put_ep.path == expected_path + assert delete_ep.path == expected_path + + # But different verbs + assert get_ep.verb == HttpVerbEnum.GET + assert post_ep.verb == HttpVerbEnum.POST + assert put_ep.verb == HttpVerbEnum.PUT + assert delete_ep.verb == HttpVerbEnum.DELETE + + +# ============================================================================= +# Test: Pydantic validation +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00500(): + """ + # Summary + + Verify Pydantic validation for login_id + + ## Test + + - Empty string is rejected for login_id (min_length=1) + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.__init__() + """ + with pytest.raises(ValueError): + EpInfraAaaLocalUsersGet(login_id="") + + +def test_endpoints_api_v1_infra_aaa_00510(): + """ + # Summary + + Verify login_id can be None + + ## Test + + - login_id accepts None as valid value + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.__init__() + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet(login_id=None) + assert instance.login_id is None + + +def test_endpoints_api_v1_infra_aaa_00520(): + """ + # Summary + + Verify login_id can be modified after instantiation + + ## Test + + - login_id can be changed after object creation + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet() + assert instance.login_id is None + instance.login_id = "newuser" + assert instance.login_id == "newuser" + assert instance.path == "/api/v1/infra/aaa/localUsers/newuser" From e4c42b3aa40ad3d4b7bc53b06efed1fabfe3f727 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 14:33:06 -0400 Subject: [PATCH 059/131] [ignore] Update pydantic_compat.py to support extra Pydantic methods and classes. --- .../module_utils/common/pydantic_compat.py | 57 ++++- plugins/module_utils/endpoints/base.py | 1 - .../endpoints/v1/infra/aaa_local_users.py | 12 +- plugins/module_utils/models/base.py | 2 +- plugins/module_utils/models/local_user.py | 2 +- plugins/module_utils/nd_state_machine.py | 2 +- plugins/module_utils/orchestrators/base.py | 5 +- .../module_utils/orchestrators/local_user.py | 3 +- plugins/module_utils/pydantic_compat.py | 238 ------------------ .../module_utils/endpoints/test_base_model.py | 5 - 10 files changed, 61 insertions(+), 266 deletions(-) delete mode 100644 plugins/module_utils/pydantic_compat.py diff --git a/plugins/module_utils/common/pydantic_compat.py b/plugins/module_utils/common/pydantic_compat.py index e1550a18..b26559d2 100644 --- a/plugins/module_utils/common/pydantic_compat.py +++ b/plugins/module_utils/common/pydantic_compat.py @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -34,10 +33,6 @@ # fmt: on # isort: on -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - import traceback from typing import TYPE_CHECKING, Any, Callable, Union @@ -51,11 +46,16 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, + model_serializer, field_validator, model_validator, validator, + computed_field, + FieldSerializationInfo, + SerializationInfo, ) HAS_PYDANTIC = True # pylint: disable=invalid-name @@ -71,11 +71,16 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, + model_serializer, field_validator, model_validator, validator, + computed_field, + FieldSerializationInfo, + SerializationInfo, ) except ImportError: HAS_PYDANTIC = False # pylint: disable=invalid-name @@ -127,6 +132,15 @@ def decorator(func): return decorator + # Fallback: model_serializer decorator that does nothing + def model_serializer(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic model_serializer fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + # Fallback: field_validator decorator that does nothing def field_validator(*args, **kwargs) -> Callable[..., Any]: # pylint: disable=unused-argument,invalid-name """Pydantic field_validator fallback when pydantic is not available.""" @@ -136,6 +150,15 @@ def decorator(func): return decorator + # Fallback: computed_field decorator that does nothing + def computed_field(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic computed_field fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + # Fallback: AfterValidator that returns the function unchanged def AfterValidator(func): # pylint: disable=invalid-name """Pydantic AfterValidator fallback when pydantic is not available.""" @@ -152,6 +175,9 @@ def BeforeValidator(func): # pylint: disable=invalid-name # Fallback: StrictBool StrictBool = bool + # Fallback: SecretStr + SecretStr = str + # Fallback: ValidationError class ValidationError(Exception): """ @@ -183,6 +209,20 @@ def decorator(func): return decorator + # Fallback: FieldSerializationInfo placeholder class that does nothing + class FieldSerializationInfo: + """Pydantic FieldSerializationInfo fallback when pydantic is not available.""" + + def __init__(self, **kwargs): + pass + + # Fallback: SerializationInfo placeholder class that does nothing + class SerializationInfo: + """Pydantic SerializationInfo fallback when pydantic is not available.""" + + def __init__(self, **kwargs): + pass + else: HAS_PYDANTIC = True # pylint: disable=invalid-name PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name @@ -234,10 +274,15 @@ def main(): "PYDANTIC_IMPORT_ERROR", "PydanticExperimentalWarning", "StrictBool", + "SecretStr", "ValidationError", "field_serializer", + "model_serializer", "field_validator", "model_validator", "require_pydantic", "validator", + "computed_field", + "FieldSerializationInfo", + "SerializationInfo", ] diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index 87b04983..e122b1f7 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -136,6 +136,5 @@ def verb(self) -> HttpVerbEnum: """ # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration - @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): pass diff --git a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py index 925c5548..26660622 100644 --- a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py @@ -10,15 +10,7 @@ from __future__ import absolute_import, annotations, division, print_function -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Literal - +from typing import Literal from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import Field from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin @@ -49,7 +41,7 @@ def path(self) -> str: if self.login_id is not None: return BasePath.path("aaa", "localUsers", self.login_id) return BasePath.path("aaa", "localUsers") - + def set_identifiers(self, identifier: IdentifierKey = None): self.login_id = identifier diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 21fb983e..07b6ee28 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,7 +9,7 @@ __metaclass__ = type from abc import ABC -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Set, Tuple, Union, Literal, Optional from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 38f2b5d2..a47a4a0a 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import List, Dict, Any, Optional, ClassVar, Literal -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ( +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ( Field, SecretStr, model_serializer, diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 3840b360..efed3517 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import Type -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 651a9d30..1f4e3e69 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,7 +8,7 @@ __metaclass__ = type -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule @@ -70,7 +70,8 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: - result = self.sender.query_obj(self.query_all_endpoint().path) + api_endpoint = self.query_all_endpoint() + result = self.sender.query_obj(api_endpoint.path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index db7bbfdc..689ba9dc 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -36,7 +36,8 @@ def query_all(self) -> ResponseType: Custom query_all action to extract 'localusers' from response. """ try: - result = self.sender.query_obj(self.query_all_endpoint().path) + api_endpoint = self.query_all_endpoint() + result = self.sender.query_obj(api_endpoint.path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py deleted file mode 100644 index 2596d852..00000000 --- a/plugins/module_utils/pydantic_compat.py +++ /dev/null @@ -1,238 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@arobel) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -# pylint: disable=too-few-public-methods -""" -Pydantic compatibility layer. - -This module provides a single location for Pydantic imports with fallback -implementations when Pydantic is not available. This ensures consistent -behavior across all modules and follows the DRY principle. -""" - -from __future__ import absolute_import, annotations, division, print_function - -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - -import traceback -from typing import TYPE_CHECKING, Any, Callable, Union - -if TYPE_CHECKING: - # Type checkers always see the real Pydantic types - from pydantic import ( - AfterValidator, - BaseModel, - BeforeValidator, - ConfigDict, - Field, - PydanticExperimentalWarning, - StrictBool, - SecretStr, - ValidationError, - field_serializer, - model_serializer, - field_validator, - model_validator, - validator, - computed_field, - FieldSerializationInfo, - SerializationInfo, - ) -else: - # Runtime: try to import, with fallback - try: - from pydantic import ( - AfterValidator, - BaseModel, - BeforeValidator, - ConfigDict, - Field, - PydanticExperimentalWarning, - StrictBool, - SecretStr, - ValidationError, - field_serializer, - model_serializer, - field_validator, - model_validator, - validator, - computed_field, - FieldSerializationInfo, - SerializationInfo, - ) - except ImportError: - HAS_PYDANTIC = False # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR: Union[str, None] = traceback.format_exc() # pylint: disable=invalid-name - - # Fallback: Minimal BaseModel replacement - class BaseModel: - """Fallback BaseModel when pydantic is not available.""" - - model_config = {"validate_assignment": False, "use_enum_values": False} - - def __init__(self, **kwargs): - """Accept keyword arguments and set them as attributes.""" - for key, value in kwargs.items(): - setattr(self, key, value) - - def model_dump(self, exclude_none: bool = False, exclude_defaults: bool = False) -> dict: # pylint: disable=unused-argument - """Return a dictionary of field names and values. - - Args: - exclude_none: If True, exclude fields with None values - exclude_defaults: Accepted for API compatibility but not implemented in fallback - """ - result = {} - for key, value in self.__dict__.items(): - if exclude_none and value is None: - continue - result[key] = value - return result - - # Fallback: ConfigDict that does nothing - def ConfigDict(**kwargs) -> dict: # pylint: disable=unused-argument,invalid-name - """Pydantic ConfigDict fallback when pydantic is not available.""" - return kwargs - - # Fallback: Field that does nothing - def Field(**kwargs) -> Any: # pylint: disable=unused-argument,invalid-name - """Pydantic Field fallback when pydantic is not available.""" - if "default_factory" in kwargs: - return kwargs["default_factory"]() - return kwargs.get("default") - - # Fallback: field_serializer decorator that does nothing - def field_serializer(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic field_serializer fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: model_serializer decorator that does nothing - def model_serializer(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic model_serializer fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: field_validator decorator that does nothing - def field_validator(*args, **kwargs) -> Callable[..., Any]: # pylint: disable=unused-argument,invalid-name - """Pydantic field_validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: computed_field decorator that does nothing - def computed_field(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic computed_field fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: AfterValidator that returns the function unchanged - def AfterValidator(func): # pylint: disable=invalid-name - """Pydantic AfterValidator fallback when pydantic is not available.""" - return func - - # Fallback: BeforeValidator that returns the function unchanged - def BeforeValidator(func): # pylint: disable=invalid-name - """Pydantic BeforeValidator fallback when pydantic is not available.""" - return func - - # Fallback: PydanticExperimentalWarning - PydanticExperimentalWarning = Warning - - # Fallback: StrictBool - StrictBool = bool - - # Fallback: SecretStr - SecretStr = str - - # Fallback: ValidationError - class ValidationError(Exception): - """ - Pydantic ValidationError fallback when pydantic is not available. - """ - - def __init__(self, message="A custom error occurred."): - self.message = message - super().__init__(self.message) - - def __str__(self): - return f"ValidationError: {self.message}" - - # Fallback: model_validator decorator that does nothing - def model_validator(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic model_validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: validator decorator that does nothing - def validator(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: FieldSerializationInfo placeholder class that does nothing - class FieldSerializationInfo: - """Pydantic FieldSerializationInfo fallback when pydantic is not available.""" - - def __init__(self, **kwargs): - pass - - # Fallback: SerializationInfo placeholder class that does nothing - class SerializationInfo: - """Pydantic SerializationInfo fallback when pydantic is not available.""" - - def __init__(self, **kwargs): - pass - - else: - HAS_PYDANTIC = True # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name - -# Set HAS_PYDANTIC for when TYPE_CHECKING is True -if TYPE_CHECKING: - HAS_PYDANTIC = True # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name - -__all__ = [ - "AfterValidator", - "BaseModel", - "BeforeValidator", - "ConfigDict", - "Field", - "HAS_PYDANTIC", - "PYDANTIC_IMPORT_ERROR", - "PydanticExperimentalWarning", - "StrictBool", - "SecretStr", - "ValidationError", - "field_serializer", - "model_serializer", - "field_validator", - "model_validator", - "validator", - "computed_field", - "FieldSerializationInfo", - "SerializationInfo", -] diff --git a/tests/unit/module_utils/endpoints/test_base_model.py b/tests/unit/module_utils/endpoints/test_base_model.py index e2db13be..b07ed4ce 100644 --- a/tests/unit/module_utils/endpoints/test_base_model.py +++ b/tests/unit/module_utils/endpoints/test_base_model.py @@ -102,7 +102,6 @@ def test_base_model_00200(): with pytest.raises(TypeError, match=match): class _BadEndpoint(NDEndpointBaseModel): - @property def path(self) -> str: return "/api/v1/test/bad" @@ -135,7 +134,6 @@ def test_base_model_00300(): """ class _MiddleABC(NDEndpointBaseModel, ABC): - @property @abstractmethod def extra(self) -> str: @@ -185,7 +183,6 @@ def test_base_model_00310(): """ class _MiddleABC2(NDEndpointBaseModel, ABC): - @property @abstractmethod def extra(self) -> str: @@ -195,7 +192,6 @@ def extra(self) -> str: with pytest.raises(TypeError, match=match): class _BadConcreteFromMiddle(_MiddleABC2): - @property def path(self) -> str: return "/api/v1/test/bad-middle" @@ -232,7 +228,6 @@ def test_base_model_00400(): with pytest.raises(TypeError, match=r'Literal\["_ExampleEndpoint"\]') as exc_info: class _ExampleEndpoint(NDEndpointBaseModel): - @property def path(self) -> str: return "/api/v1/test/example" From fb135c795b48d678f90ed703bd6f9050a682a852 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Thu, 12 Mar 2026 14:40:08 -0400 Subject: [PATCH 060/131] Fabric Module Tests Working --- plugins/module_utils/endpoints/base_path.py | 29 + .../endpoints/base_paths_manage.py | 134 ++ .../module_utils/endpoints/endpoint_mixins.py | 88 ++ .../module_utils/endpoints/query_params.py | 323 ++++ .../endpoints/v1/manage_fabrics.py | 626 ++++++++ plugins/module_utils/enums.py | 153 ++ .../models/nd_manage_fabric/enums.py | 116 ++ .../nd_manage_fabric/manage_fabric_ibgp.py | 1370 ++++++++++++++++ .../orchestrators/manage_fabric.py | 43 + plugins/module_utils/pydantic_compat.py | 4 + plugins/modules/nd_manage_fabric_ibgp.py | 1393 +++++++++++++++++ 11 files changed, 4279 insertions(+) create mode 100644 plugins/module_utils/endpoints/base_path.py create mode 100644 plugins/module_utils/endpoints/base_paths_manage.py create mode 100644 plugins/module_utils/endpoints/endpoint_mixins.py create mode 100644 plugins/module_utils/endpoints/query_params.py create mode 100644 plugins/module_utils/endpoints/v1/manage_fabrics.py create mode 100644 plugins/module_utils/enums.py create mode 100644 plugins/module_utils/models/nd_manage_fabric/enums.py create mode 100644 plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py create mode 100644 plugins/module_utils/orchestrators/manage_fabric.py create mode 100644 plugins/modules/nd_manage_fabric_ibgp.py diff --git a/plugins/module_utils/endpoints/base_path.py b/plugins/module_utils/endpoints/base_path.py new file mode 100644 index 00000000..b1ddb5eb --- /dev/null +++ b/plugins/module_utils/endpoints/base_path.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Centralized base paths for ND API endpoints. + +This module provides a single location to manage all API Infra base paths, +allowing easy modification when API paths change. All endpoint classes +should use these path builders for consistency. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +from typing import Final + +# Root API paths +ND_ANALYZE_API: Final = "/api/v1/analyze" +ND_INFRA_API: Final = "/api/v1/infra" +ND_MANAGE_API: Final = "/api/v1/manage" +ND_ONEMANAGE_API: Final = "/api/v1/onemanage" +ND_MSO_API: Final = "/mso" +NDFC_API: Final = "/appcenter/cisco/ndfc/api" +LOGIN: Final = "/login" diff --git a/plugins/module_utils/endpoints/base_paths_manage.py b/plugins/module_utils/endpoints/base_paths_manage.py new file mode 100644 index 00000000..069cd7ec --- /dev/null +++ b/plugins/module_utils/endpoints/base_paths_manage.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Centralized base paths for ND Manage API endpoints. + +/api/v1/manage + +This module provides a single location to manage all API Manage base paths, +allowing easy modification when API paths change. All endpoint classes +should use these path builders for consistency. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +from typing import Final + +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base_path import ND_MANAGE_API + + +class BasePath: + """ + # Summary + + API Endpoints for ND Manage + + ## Description + + Provides centralized endpoint definitions for all ND Manage API endpoints. + This allows API path changes to be managed in a single location. + + ## Usage + + ```python + from ansible_collections.cisco.nd.plugins.module_utils.ep.base_paths_manage import BasePath + + # Get a complete base path for ND Manage + path = BasePath.nd_manage("inventory", "switches") + # Returns: /api/v1/manage/inventory/switches + + # Leverage a convenience method + path = BasePath.nd_manage_inventory("switches") + # Returns: /api/v1/manage/inventory/switches + ``` + + ## Design Notes + + - All base paths are defined as class constants for easy modification + - Helper methods compose paths from base constants + - Use these methods in Pydantic endpoint models to ensure consistency + - If ND Manage changes base API paths, only this class needs updating + """ + + API: Final = ND_MANAGE_API + + @classmethod + def nd_manage(cls, *segments: str) -> str: + """ + # Summary + + Build ND manage API path. + + ## Parameters + + - segments: Path segments to append after /api/v1/manage + + ## Returns + + - Complete ND manage API path + + ## Example + + ```python + path = BasePath.nd_manage("inventory", "switches") + # Returns: /api/v1/manage/inventory/switches + ``` + """ + if not segments: + return cls.API + return f"{cls.API}/{'/'.join(segments)}" + + @classmethod + def nd_manage_inventory(cls, *segments: str) -> str: + """ + # Summary + + Build ND manage inventory API path. + + ## Parameters + + - segments: Path segments to append after inventory (e.g., "switches") + + ## Returns + + - Complete ND manage inventory path + + ## Example + + ```python + path = BasePath.nd_manage_inventory("switches") + # Returns: /api/v1/manage/inventory/switches + ``` + """ + return cls.nd_manage("inventory", *segments) + + @classmethod + def nd_manage_fabrics(cls, *segments: str) -> str: + """ + # Summary + + Build ND manage fabrics API path. + + ## Parameters + + - segments: Path segments to append after fabrics (e.g., "my-fabric") + + ## Returns + + - Complete ND manage fabrics path + + ## Example + + ```python + path = BasePath.nd_manage_fabrics() + # Returns: /api/v1/manage/fabrics + ``` + """ + return cls.nd_manage("fabrics", *segments) diff --git a/plugins/module_utils/endpoints/endpoint_mixins.py b/plugins/module_utils/endpoints/endpoint_mixins.py new file mode 100644 index 00000000..e472e92f --- /dev/null +++ b/plugins/module_utils/endpoints/endpoint_mixins.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Reusable mixin classes for endpoint models. + +This module provides mixin classes that can be composed to add common +fields to endpoint models without duplication. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +from typing import Optional + +from ansible_collections.cisco.nd.plugins.module_utils.enums import BooleanStringEnum +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, Field + + +class ClusterNameMixin(BaseModel): + """Mixin for endpoints that require cluster_name parameter.""" + + cluster_name: Optional[str] = Field(default=None, min_length=1, description="Cluster name") + + +class FabricNameMixin(BaseModel): + """Mixin for endpoints that require fabric_name parameter.""" + + fabric_name: Optional[str] = Field(default=None, min_length=1, max_length=64, description="Fabric name") + + +class ForceShowRunMixin(BaseModel): + """Mixin for endpoints that require force_show_run parameter.""" + + force_show_run: BooleanStringEnum = Field(default=BooleanStringEnum.FALSE, description="Force show running config") + + +class HealthCategoryMixin(BaseModel): + """Mixin for endpoints that require health_category parameter.""" + + health_category: Optional[str] = Field(default=None, min_length=1, description="Health category") + + +class InclAllMsdSwitchesMixin(BaseModel): + """Mixin for endpoints that require incl_all_msd_switches parameter.""" + + incl_all_msd_switches: BooleanStringEnum = Field(default=BooleanStringEnum.FALSE, description="Include all MSD switches") + + +class LinkUuidMixin(BaseModel): + """Mixin for endpoints that require link_uuid parameter.""" + + link_uuid: Optional[str] = Field(default=None, min_length=1, description="Link UUID") + + +class LoginIdMixin(BaseModel): + """Mixin for endpoints that require login_id parameter.""" + + login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") + + +class NetworkNameMixin(BaseModel): + """Mixin for endpoints that require network_name parameter.""" + + network_name: Optional[str] = Field(default=None, min_length=1, max_length=64, description="Network name") + + +class NodeNameMixin(BaseModel): + """Mixin for endpoints that require node_name parameter.""" + + node_name: Optional[str] = Field(default=None, min_length=1, description="Node name") + + +class SwitchSerialNumberMixin(BaseModel): + """Mixin for endpoints that require switch_sn parameter.""" + + switch_sn: Optional[str] = Field(default=None, min_length=1, description="Switch serial number") + + +class VrfNameMixin(BaseModel): + """Mixin for endpoints that require vrf_name parameter.""" + + vrf_name: Optional[str] = Field(default=None, min_length=1, max_length=64, description="VRF name") diff --git a/plugins/module_utils/endpoints/query_params.py b/plugins/module_utils/endpoints/query_params.py new file mode 100644 index 00000000..dd082f05 --- /dev/null +++ b/plugins/module_utils/endpoints/query_params.py @@ -0,0 +1,323 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Query parameter classes for API endpoints. + +This module provides composable query parameter classes for building +URL query strings. Supports endpoint-specific parameters and Lucene-style +filtering with type safety via Pydantic. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +from abc import ABC, abstractmethod +from enum import Enum +from typing import Optional, Union +from urllib.parse import quote + +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, Field, field_validator + + +class QueryParams(ABC): + """ + # Summary + + Abstract Base Class for Query Parameters + + ## Description + + Base class for all query parameter types. Subclasses implement + `to_query_string()` to convert their parameters to URL query string format. + + ## Design + + This allows composition of different query parameter types: + + - Endpoint-specific parameters (e.g., forceShowRun, ticketId) + - Generic Lucene-style filtering (e.g., filter, max, sort) + - Future parameter types can be added without changing existing code + """ + + @abstractmethod + def to_query_string(self) -> str: + """ + # Summary + + Convert parameters to URL query string format. + + ## Returns + + - Query string (without leading '?') + - Empty string if no parameters are set + + ### Example return value + + ```python + "forceShowRun=true&ticketId=12345" + ``` + """ + + def is_empty(self) -> bool: + """ + # Summary + + Check if any parameters are set. + + ## Returns + + - True if no parameters are set + - False if at least one parameter is set + """ + return len(self.to_query_string()) == 0 + + +class EndpointQueryParams(BaseModel): + """ + # Summary + + Endpoint-Specific Query Parameters + + ## Description + + Query parameters specific to a particular endpoint. + These are typed and validated by Pydantic. + + ## Usage + + Subclass this for each endpoint that needs custom query parameters: + + ```python + class ConfigDeployQueryParams(EndpointQueryParams): + force_show_run: bool = False + include_all_msd_switches: bool = False + + def to_query_string(self) -> str: + params = [f"forceShowRun={str(self.force_show_run).lower()}"] + params.append(f"inclAllMSDSwitches={str(self.include_all_msd_switches).lower()}") + return "&".join(params) + ``` + """ + + def to_query_string(self) -> str: + """ + # Summary + + - Default implementation: convert all fields to key=value pairs. + - Override this method for custom formatting. + """ + params = [] + for field_name, field_value in self.model_dump(exclude_none=True).items(): + # Convert snake_case to camelCase for API compatibility + api_key = self._to_camel_case(field_name) + + # Handle different value types + if isinstance(field_value, bool): + api_value = str(field_value).lower() + elif isinstance(field_value, Enum): + # Get the enum's value (e.g., "true" or "false") + api_value = field_value.value + else: + api_value = str(field_value) + + params.append(f"{api_key}={api_value}") + return "&".join(params) + + @staticmethod + def _to_camel_case(snake_str: str) -> str: + """Convert snake_case to camelCase.""" + components = snake_str.split("_") + return components[0] + "".join(x.title() for x in components[1:]) + + def is_empty(self) -> bool: + """Check if any parameters are set.""" + return len(self.model_dump(exclude_none=True, exclude_defaults=True)) == 0 + + +class LuceneQueryParams(BaseModel): + """ + # Summary + + Lucene-Style Query Parameters + + ## Description + + Generic Lucene-style filtering query parameters for ND API. + Supports filtering, pagination, and sorting. + + ## Parameters + + - filter: Lucene filter expression (e.g., "name:MyFabric AND state:deployed") + - max: Maximum number of results to return + - offset: Offset for pagination + - sort: Sort field and direction (e.g., "name:asc", "created:desc") + - fields: Comma-separated list of fields to return + + ## Usage + + ```python + lucene = LuceneQueryParams( + filter="name:Fabric*", + max=100, + sort="name:asc" + ) + query_string = lucene.to_query_string() + # Returns: "filter=name:Fabric*&max=100&sort=name:asc" + ``` + + ## Lucene Filter Examples + + - Single field: `name:MyFabric` + - Wildcard: `name:Fabric*` + - Multiple conditions: `name:MyFabric AND state:deployed` + - Range: `created:[2024-01-01 TO 2024-12-31]` + - OR conditions: `state:deployed OR state:pending` + - NOT conditions: `NOT state:deleted` + """ + + filter: Optional[str] = Field(default=None, description="Lucene filter expression") + max: Optional[int] = Field(default=None, ge=1, le=10000, description="Maximum results") + offset: Optional[int] = Field(default=None, ge=0, description="Pagination offset") + sort: Optional[str] = Field(default=None, description="Sort field and direction (e.g., 'name:asc')") + fields: Optional[str] = Field(default=None, description="Comma-separated list of fields to return") + + @field_validator("sort") + @classmethod + def validate_sort(cls, value): + """Validate sort format: field:direction.""" + if value is not None and ":" in value: + parts = value.split(":") + if len(parts) == 2 and parts[1].lower() not in ["asc", "desc"]: + raise ValueError("Sort direction must be 'asc' or 'desc'") + return value + + def to_query_string(self, url_encode: bool = True) -> str: + """ + Convert to URL query string format. + + ### Parameters + - url_encode: If True, URL-encode parameter values (default: True) + + ### Returns + - URL query string with encoded values + """ + params = [] + for field_name, field_value in self.model_dump(exclude_none=True).items(): + if field_value is not None: + # URL-encode the value if requested + encoded_value = quote(str(field_value), safe="") if url_encode else str(field_value) + params.append(f"{field_name}={encoded_value}") + return "&".join(params) + + def is_empty(self) -> bool: + """Check if any filter parameters are set.""" + return all(v is None for v in self.model_dump().values()) + + +class CompositeQueryParams: + """ + # Summary + + Composite Query Parameters + + ## Description + + Composes multiple query parameter types into a single query string. + This allows combining endpoint-specific parameters with Lucene filtering. + + ## Design Pattern + + Uses composition to combine different query parameter types without + inheritance. Each parameter type can be independently configured and tested. + + ## Usage + + ```python + # Endpoint-specific params + endpoint_params = ConfigDeployQueryParams( + force_show_run=True, + include_all_msd_switches=False + ) + + # Lucene filtering params + lucene_params = LuceneQueryParams( + filter="name:MySwitch*", + max=50, + sort="name:asc" + ) + + # Compose them together + composite = CompositeQueryParams() + composite.add(endpoint_params) + composite.add(lucene_params) + + query_string = composite.to_query_string() + # Returns: "forceShowRun=true&inclAllMSDSwitches=false&filter=name:MySwitch*&max=50&sort=name:asc" + ``` + """ + + def __init__(self) -> None: + self._param_groups: list[Union[EndpointQueryParams, LuceneQueryParams]] = [] + + def add(self, params: Union[EndpointQueryParams, LuceneQueryParams]) -> "CompositeQueryParams": + """ + # Summary + + Add a query parameter group to the composite. + + ## Parameters + + - params: EndpointQueryParams or LuceneQueryParams instance + + ## Returns + + - Self (for method chaining) + + ## Example + + ```python + composite = CompositeQueryParams() + composite.add(endpoint_params).add(lucene_params) + ``` + """ + self._param_groups.append(params) + return self + + def to_query_string(self, url_encode: bool = True) -> str: + """ + # Summary + + Build complete query string from all parameter groups. + + ## Parameters + + - url_encode: If True, URL-encode parameter values (default: True) + + ## Returns + + - Complete query string (without leading '?') + - Empty string if no parameters are set + """ + parts = [] + for param_group in self._param_groups: + if not param_group.is_empty(): + # LuceneQueryParams supports url_encode parameter, EndpointQueryParams doesn't + if isinstance(param_group, LuceneQueryParams): + parts.append(param_group.to_query_string(url_encode=url_encode)) + else: + parts.append(param_group.to_query_string()) + return "&".join(parts) + + def is_empty(self) -> bool: + """Check if any parameters are set across all groups.""" + return all(param_group.is_empty() for param_group in self._param_groups) + + def clear(self) -> None: + """Remove all parameter groups.""" + self._param_groups.clear() diff --git a/plugins/module_utils/endpoints/v1/manage_fabrics.py b/plugins/module_utils/endpoints/v1/manage_fabrics.py new file mode 100644 index 00000000..e3396d8a --- /dev/null +++ b/plugins/module_utils/endpoints/v1/manage_fabrics.py @@ -0,0 +1,626 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +ND Manage Fabrics endpoint models. + +This module contains endpoint definitions for fabric-related operations +in the ND Manage API. + +## Endpoints + +- `EpApiV1ManageFabricsGet` - Get a specific fabric by name + (GET /api/v1/manage/fabrics/{fabric_name}) +- `EpApiV1ManageFabricsListGet` - List all fabrics with optional filtering + (GET /api/v1/manage/fabrics) +- `EpApiV1ManageFabricsPost` - Create a new fabric + (POST /api/v1/manage/fabrics) +- `EpApiV1ManageFabricsPut` - Update a specific fabric + (PUT /api/v1/manage/fabrics/{fabric_name}) +- `EpApiV1ManageFabricsDelete` - Delete a specific fabric + (DELETE /api/v1/manage/fabrics/{fabric_name}) +- `EpApiV1ManageFabricsSummaryGet` - Get summary for a specific fabric + (GET /api/v1/manage/fabrics/{fabric_name}/summary) +""" + +from __future__ import absolute_import, annotations, division, print_function + +# from plugins.module_utils.endpoints.base import NDBaseEndpoint + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=inFinal, valid-name + +from typing import Literal, Optional, Final + +from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base_paths_manage import BasePath +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.endpoint_mixins import FabricNameMixin +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.query_params import EndpointQueryParams +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict, Field +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey + +# Common config for basic validation +COMMON_CONFIG = ConfigDict(validate_assignment=True) + + +class FabricsEndpointParams(EndpointQueryParams): + """ + # Summary + + Endpoint-specific query parameters for the fabrics endpoint. + + ## Parameters + + - cluster_name: Name of the target Nexus Dashboard cluster to execute this API, + in a multi-cluster deployment (optional) + + ## Usage + + ```python + params = FabricsEndpointParams(cluster_name="cluster1") + query_string = params.to_query_string() + # Returns: "clusterName=cluster1" + ``` + """ + + cluster_name: Optional[str] = Field( + default=None, + min_length=1, + description="Name of the target Nexus Dashboard cluster to execute this API, in a multi-cluster deployment", + ) + + +class _EpApiV1ManageFabricsBase(FabricNameMixin, BaseModel): + """ + Base class for ND Manage Fabrics endpoints. + + Provides common functionality for all HTTP methods on the + /api/v1/manage/fabrics endpoint. + """ + + # TODO: Remove it + base_path: Final = BasePath.nd_manage_fabrics() + + def set_identifiers(self, identifier: IdentifierKey = None): + self.fabric_name = identifier + +class EpApiV1ManageFabricsGet(_EpApiV1ManageFabricsBase): + """ + # Summary + + ND Manage Fabrics GET Endpoint + + ## Description + + Endpoint to retrieve details for a specific named fabric from the ND Manage service. + The fabric name is a required path parameter. Optionally filter by cluster name + using the clusterName query parameter in multi-cluster deployments. + + ## Path + + - /api/v1/manage/fabrics/{fabric_name} + - /api/v1/manage/fabrics/{fabric_name}?clusterName=cluster1 + + ## Verb + + - GET + + ## Raises + + - `ValueError` if `fabric_name` is not set when accessing `path` + + ## Usage + + ```python + # Get details for a specific fabric + request = EpApiV1ManageFabricsGet() + request.fabric_name = "my-fabric" + path = request.path + verb = request.verb + # Path will be: /api/v1/manage/fabrics/my-fabric + + # Get fabric details targeting a specific cluster in a multi-cluster deployment + request = EpApiV1ManageFabricsGet() + request.fabric_name = "my-fabric" + request.endpoint_params.cluster_name = "cluster1" + path = request.path + verb = request.verb + # Path will be: /api/v1/manage/fabrics/my-fabric?clusterName=cluster1 + ``` + """ + + model_config = COMMON_CONFIG + + class_name: Literal["EpApiV1ManageFabricsGet"] = Field( + default="EpApiV1ManageFabricsGet", description="Class name for backward compatibility" + ) + + endpoint_params: FabricsEndpointParams = Field( + default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" + ) + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path with the fabric name and optional query string. + + ## Returns + + - Complete endpoint path string including fabric_name and optional query parameters + + ## Raises + + - `ValueError` if `fabric_name` is None + """ + if self.fabric_name is None: + raise ValueError(f"{self.class_name}.path: fabric_name must be set before accessing path.") + base_path = BasePath.nd_manage_fabrics(self.fabric_name) + query_string = self.endpoint_params.to_query_string() + if query_string: + return f"{base_path}?{query_string}" + return base_path + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.GET + + +class FabricsListEndpointParams(EndpointQueryParams): + """ + # Summary + + Query parameters for the ``GET /api/v1/manage/fabrics`` list endpoint. + + ## Parameters + + - cluster_name: Name of the target Nexus Dashboard cluster (multi-cluster deployments) + - category: Filter by fabric category (``"fabric"`` or ``"fabricGroup"``) + - filter: Lucene-format filter string + - max: Maximum number of records to return + - offset: Number of records to skip for pagination + - sort: Sort field with optional ``:desc`` suffix + + ## Usage + + ```python + params = FabricsListEndpointParams(category="fabric", max=10, offset=0) + query_string = params.to_query_string() + # Returns: "category=fabric&max=10&offset=0" + ``` + """ + + cluster_name: Optional[str] = Field( + default=None, + min_length=1, + description="Name of the target Nexus Dashboard cluster to execute this API, in a multi-cluster deployment", + ) + + category: Optional[str] = Field( + default=None, + description="Filter by category of fabric (fabric or fabricGroup)", + ) + + filter: Optional[str] = Field( + default=None, + description="Lucene format filter - Filter the response based on this filter field", + ) + + max: Optional[int] = Field( + default=None, + ge=1, + description="Number of records to return", + ) + + offset: Optional[int] = Field( + default=None, + ge=0, + description="Number of records to skip for pagination", + ) + + sort: Optional[str] = Field( + default=None, + description="Sort the records by the declared fields in either ascending (default) or descending (:desc) order", + ) + + +class EpApiV1ManageFabricsListGet(_EpApiV1ManageFabricsBase): + """ + # Summary + + ND Manage Fabrics List GET Endpoint + + ## Description + + Endpoint to list all fabrics from the ND Manage service. + Supports optional query parameters for filtering, pagination, and sorting. + + ## Path + + - ``/api/v1/manage/fabrics`` + - ``/api/v1/manage/fabrics?category=fabric&max=10`` + + ## Verb + + - GET + + ## Raises + + - None + + ## Usage + + ```python + # List all fabrics + ep = EpApiV1ManageFabricsListGet() + path = ep.path + verb = ep.verb + # Path: /api/v1/manage/fabrics + + # List fabrics with filtering and pagination + ep = EpApiV1ManageFabricsListGet() + ep.endpoint_params.category = "fabric" + ep.endpoint_params.max = 10 + path = ep.path + # Path: /api/v1/manage/fabrics?category=fabric&max=10 + ``` + """ + + model_config = COMMON_CONFIG + + class_name: Literal["EpApiV1ManageFabricsListGet"] = Field( + default="EpApiV1ManageFabricsListGet", description="Class name for backward compatibility" + ) + + endpoint_params: FabricsListEndpointParams = Field( + default_factory=FabricsListEndpointParams, description="Endpoint-specific query parameters" + ) + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path with optional query string. + + ## Returns + + - Complete endpoint path string including optional query parameters + + ## Raises + + - None + """ + base_path = BasePath.nd_manage_fabrics() + query_string = self.endpoint_params.to_query_string() + if query_string: + return f"{base_path}?{query_string}" + return base_path + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.GET + + +class EpApiV1ManageFabricsPost(BaseModel): + """ + # Summary + + ND Manage Fabrics POST Endpoint + + ## Description + + Endpoint to create a new fabric via the ND Manage service. + The request body must conform to the ``baseFabric`` schema (discriminated + by ``category``). For standard fabrics the category is ``"fabric"`` and + the body includes ``name`` plus fabric-specific properties such as + ``location``, ``licenseTier``, ``telemetryCollection``, etc. + + ## Path + + - ``/api/v1/manage/fabrics`` + - ``/api/v1/manage/fabrics?clusterName=cluster1`` + + ## Verb + + - POST + + ## Request Body (application/json) + + ``baseFabric`` schema — for a standard fabric use ``category: "fabric"`` + with at minimum: + + - ``name`` (str, required): Name of the fabric + - ``category`` (str, required): ``"fabric"`` + + ## Raises + + - None + + ## Usage + + ```python + ep = EpApiV1ManageFabricsPost() + rest_send.path = ep.path + rest_send.verb = ep.verb + rest_send.payload = { + "name": "my-fabric", + "category": "fabric", + "telemetryCollection": True, + "telemetryCollectionType": "inBand", + } + ``` + """ + + model_config = COMMON_CONFIG + + class_name: Literal["EpApiV1ManageFabricsPost"] = Field( + default="EpApiV1ManageFabricsPost", description="Class name for backward compatibility" + ) + + endpoint_params: FabricsEndpointParams = Field( + default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" + ) + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path with optional query string. + + ## Returns + + - Complete endpoint path string + + ## Raises + + - None + """ + base_path = BasePath.nd_manage_fabrics() + query_string = self.endpoint_params.to_query_string() + if query_string: + return f"{base_path}?{query_string}" + return base_path + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.POST + + +class EpApiV1ManageFabricsPut(_EpApiV1ManageFabricsBase): + """ + # Summary + + ND Manage Fabrics PUT Endpoint + + ## Description + + Endpoint to update an existing fabric via the ND Manage service. + The fabric name is a required path parameter. The request body must + conform to the ``baseFabric`` schema (same shape as POST/create). + + ## Path + + - ``/api/v1/manage/fabrics/{fabric_name}`` + - ``/api/v1/manage/fabrics/{fabric_name}?clusterName=cluster1`` + + ## Verb + + - PUT + + ## Request Body (application/json) + + ``baseFabric`` schema — same as create (POST). + + ## Raises + + - `ValueError` if `fabric_name` is not set when accessing `path` + + ## Usage + + ```python + ep = EpApiV1ManageFabricsPut() + ep.fabric_name = "my-fabric" + rest_send.path = ep.path + rest_send.verb = ep.verb + rest_send.payload = { + "name": "my-fabric", + "category": "fabric", + "telemetryCollection": False, + } + ``` + """ + + model_config = COMMON_CONFIG + + class_name: Literal["EpApiV1ManageFabricsPut"] = Field( + default="EpApiV1ManageFabricsPut", description="Class name for backward compatibility" + ) + + endpoint_params: FabricsEndpointParams = Field( + default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" + ) + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path with the fabric name and optional query string. + + ## Returns + + - Complete endpoint path string + + ## Raises + + - `ValueError` if `fabric_name` is None + """ + if self.fabric_name is None: + raise ValueError(f"{self.class_name}.path: fabric_name must be set before accessing path.") + base_path = BasePath.nd_manage_fabrics(self.fabric_name) + query_string = self.endpoint_params.to_query_string() + if query_string: + return f"{base_path}?{query_string}" + return base_path + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.PUT + + +class EpApiV1ManageFabricsDelete(_EpApiV1ManageFabricsBase): + """ + # Summary + + ND Manage Fabrics DELETE Endpoint + + ## Description + + Endpoint to delete a specific fabric from the ND Manage service. + The fabric name is a required path parameter. + + ## Path + + - ``/api/v1/manage/fabrics/{fabric_name}`` + - ``/api/v1/manage/fabrics/{fabric_name}?clusterName=cluster1`` + + ## Verb + + - DELETE + + ## Raises + + - `ValueError` if `fabric_name` is not set when accessing `path` + + ## Usage + + ```python + ep = EpApiV1ManageFabricsDelete() + ep.fabric_name = "my-fabric" + rest_send.path = ep.path + rest_send.verb = ep.verb + ``` + """ + + model_config = COMMON_CONFIG + + class_name: Literal["EpApiV1ManageFabricsDelete"] = Field( + default="EpApiV1ManageFabricsDelete", description="Class name for backward compatibility" + ) + + endpoint_params: FabricsEndpointParams = Field( + default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" + ) + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path with the fabric name and optional query string. + + ## Returns + + - Complete endpoint path string + + ## Raises + + - `ValueError` if `fabric_name` is None + """ + if self.fabric_name is None: + raise ValueError(f"{self.class_name}.path: fabric_name must be set before accessing path.") + base_path = BasePath.nd_manage_fabrics(self.fabric_name) + query_string = self.endpoint_params.to_query_string() + if query_string: + return f"{base_path}?{query_string}" + return base_path + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.DELETE + + +class EpApiV1ManageFabricsSummaryGet(_EpApiV1ManageFabricsBase): + """ + # Summary + + ND Manage Fabrics Summary GET Endpoint + + ## Description + + Endpoint to retrieve summary information for a specific fabric from + the ND Manage service. The fabric name is a required path parameter. + + ## Path + + - ``/api/v1/manage/fabrics/{fabric_name}/summary`` + - ``/api/v1/manage/fabrics/{fabric_name}/summary?clusterName=cluster1`` + + ## Verb + + - GET + + ## Raises + + - `ValueError` if `fabric_name` is not set when accessing `path` + + ## Usage + + ```python + ep = EpApiV1ManageFabricsSummaryGet() + ep.fabric_name = "my-fabric" + path = ep.path + verb = ep.verb + # Path: /api/v1/manage/fabrics/my-fabric/summary + ``` + """ + + model_config = COMMON_CONFIG + + class_name: Literal["EpApiV1ManageFabricsSummaryGet"] = Field( + default="EpApiV1ManageFabricsSummaryGet", description="Class name for backward compatibility" + ) + + endpoint_params: FabricsEndpointParams = Field( + default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" + ) + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path with the fabric name and optional query string. + + ## Returns + + - Complete endpoint path string + + ## Raises + + - `ValueError` if `fabric_name` is None + """ + if self.fabric_name is None: + raise ValueError(f"{self.class_name}.path: fabric_name must be set before accessing path.") + base_path = BasePath.nd_manage_fabrics(self.fabric_name, "summary") + query_string = self.endpoint_params.to_query_string() + if query_string: + return f"{base_path}?{query_string}" + return base_path + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.GET diff --git a/plugins/module_utils/enums.py b/plugins/module_utils/enums.py new file mode 100644 index 00000000..a9eccb4d --- /dev/null +++ b/plugins/module_utils/enums.py @@ -0,0 +1,153 @@ +# -*- coding: utf-8 -*- +# pylint: disable=wrong-import-position +# pylint: disable=missing-module-docstring +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +# Summary + +Enum definitions for Nexus Dashboard Ansible modules. + +## Enums + +- HttpVerbEnum: Enum for HTTP verb values used in endpoints. +- OperationType: Enum for operation types used by Results to determine if changes have occurred. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +from enum import Enum + + +class BooleanStringEnum(str, Enum): + """ + # Summary + + Enum for boolean string values used in query parameters. + + ## Members + + - TRUE: Represents the string "true". + - FALSE: Represents the string "false". + """ + + TRUE = "true" + FALSE = "false" + + +class HttpVerbEnum(str, Enum): + """ + # Summary + + Enum for HTTP verb values used in endpoints. + + ## Members + + - GET: Represents the HTTP GET method. + - POST: Represents the HTTP POST method. + - PUT: Represents the HTTP PUT method. + - DELETE: Represents the HTTP DELETE method. + - PATCH: Represents the HTTP PATCH method. + """ + + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + PATCH = "PATCH" + + @classmethod + def values(cls) -> list[str]: + """ + # Summary + + Returns a list of all enum values. + + ## Returns + + - A list of string values representing the enum members. + """ + return sorted([member.value for member in cls]) + + +class OperationType(Enum): + """ + # Summary + + Enumeration for operation types. + + Used by Results to determine if changes have occurred based on the operation type. + + - QUERY: Represents a query operation which does not change state. + - CREATE: Represents a create operation which adds new resources. + - UPDATE: Represents an update operation which modifies existing resources. + - DELETE: Represents a delete operation which removes resources. + + # Usage + + ```python + from plugins.module_utils.enums import OperationType + class MyModule: + def __init__(self): + self.operation_type = OperationType.QUERY + ``` + + The above informs the Results class that the current operation is a query, and thus + no changes should be expected. + + Specifically, Results._determine_if_changed() will return False for QUERY operations, + while it will evaluate CREATE, UPDATE, and DELETE operations in more detail to + determine if any changes have occurred. + """ + + QUERY = "query" + CREATE = "create" + UPDATE = "update" + DELETE = "delete" + + def changes_state(self) -> bool: + """ + # Summary + + Return True if this operation type can change controller state. + + ## Returns + + - `bool`: True if operation can change state, False otherwise + + ## Examples + + ```python + OperationType.QUERY.changes_state() # Returns False + OperationType.CREATE.changes_state() # Returns True + OperationType.DELETE.changes_state() # Returns True + ``` + """ + return self in ( + OperationType.CREATE, + OperationType.UPDATE, + OperationType.DELETE, + ) + + def is_read_only(self) -> bool: + """ + # Summary + + Return True if this operation type is read-only. + + ## Returns + + - `bool`: True if operation is read-only, False otherwise + + ## Examples + + ```python + OperationType.QUERY.is_read_only() # Returns True + OperationType.CREATE.is_read_only() # Returns False + ``` + """ + return self == OperationType.QUERY diff --git a/plugins/module_utils/models/nd_manage_fabric/enums.py b/plugins/module_utils/models/nd_manage_fabric/enums.py new file mode 100644 index 00000000..aa084802 --- /dev/null +++ b/plugins/module_utils/models/nd_manage_fabric/enums.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +# pylint: disable=wrong-import-position +# pylint: disable=missing-module-docstring +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +# Summary + +Enum definitions for Nexus Dashboard Ansible modules. + +## Enums + +- HttpVerbEnum: Enum for HTTP verb values used in endpoints. +- OperationType: Enum for operation types used by Results to determine if changes have occurred. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +from enum import Enum + +class FabricTypeEnum(str, Enum): + """ + # Summary + + Enumeration of supported fabric types for discriminated union. + + ## Values + + - `VXLAN_IBGP` - VXLAN fabric with iBGP overlay + """ + + VXLAN_IBGP = "vxlanIbgp" + + +class AlertSuspendEnum(str, Enum): + """ + # Summary + + Enumeration for alert suspension states. + + ## Values + + - `ENABLED` - Alerts are enabled + - `DISABLED` - Alerts are disabled + """ + + ENABLED = "enabled" + DISABLED = "disabled" + + +class LicenseTierEnum(str, Enum): + """ + # Summary + + Enumeration for license tier options. + + ## Values + + - `ESSENTIALS` - Essentials license tier + - `PREMIER` - Premier license tier + """ + + ESSENTIALS = "essentials" + PREMIER = "premier" + + +class ReplicationModeEnum(str, Enum): + """ + # Summary + + Enumeration for replication modes. + + ## Values + + - `MULTICAST` - Multicast replication + - `INGRESS` - Ingress replication + """ + + MULTICAST = "multicast" + INGRESS = "ingress" + + +class OverlayModeEnum(str, Enum): + """ + # Summary + + Enumeration for overlay modes. + + ## Values + + - `CLI` - CLI based configuration + - `CONFIG_PROFILE` - Configuration profile based + """ + + CLI = "cli" + CONFIG_PROFILE = "config-profile" + + +class LinkStateRoutingProtocolEnum(str, Enum): + """ + # Summary + + Enumeration for underlay routing protocols. + + ## Values + + - `OSPF` - Open Shortest Path First + - `ISIS` - Intermediate System to Intermediate System + """ + + OSPF = "ospf" + ISIS = "isis" diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py new file mode 100644 index 00000000..27ed0b09 --- /dev/null +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py @@ -0,0 +1,1370 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +import re +# from datetime import datetime +from enum import Enum +from typing import List, Dict, Any, Optional, ClassVar, Literal + +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ( + BaseModel, + ConfigDict, + Field, + field_validator, + model_validator, +) +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.enums import ( + FabricTypeEnum, + AlertSuspendEnum, + LicenseTierEnum, + OverlayModeEnum, + ReplicationModeEnum, + LinkStateRoutingProtocolEnum, +) + + +""" +# Comprehensive Pydantic models for iBGP VXLAN fabric management via Nexus Dashboard + +This module provides comprehensive Pydantic models for creating, updating, and deleting +iBGP VXLAN fabrics through the Nexus Dashboard Fabric Controller (NDFC) API. + +## Models Overview + +- `LocationModel` - Geographic location coordinates +- `NetflowExporterModel` - Netflow exporter configuration +- `NetflowRecordModel` - Netflow record configuration +- `NetflowMonitorModel` - Netflow monitor configuration +- `NetflowSettingsModel` - Complete netflow settings +- `BootstrapSubnetModel` - Bootstrap subnet configuration +- `FabricDesignSettingsModel` - Fabric designer settings +- `TelemetryFlowCollectionModel` - Telemetry flow collection settings +- `TelemetrySettingsModel` - Complete telemetry configuration +- `ExternalStreamingSettingsModel` - External streaming configuration +- `VxlanIbgpManagementModel` - iBGP VXLAN specific management settings +- `FabricModel` - Complete fabric creation model +- `FabricDeleteModel` - Fabric deletion model + +## Usage + +```python +# Create a new iBGP VXLAN fabric +fabric_data = { + "name": "MyFabric", + "location": {"latitude": 37.7749, "longitude": -122.4194}, + "management": { + "type": "vxlanIbgp", + "bgp_asn": "65001", + "site_id": "65001" + } +} +fabric = FabricModel(**fabric_data) +``` +""" + +class LocationModel(NDNestedModel): + """ + # Summary + + Geographic location coordinates for the fabric. + + ## Raises + + - `ValueError` - If latitude or longitude are outside valid ranges + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + latitude: float = Field( + description="Latitude coordinate (-90 to 90)", + ge=-90.0, + le=90.0 + ) + longitude: float = Field( + description="Longitude coordinate (-180 to 180)", + ge=-180.0, + le=180.0 + ) + + +class NetflowExporterModel(NDNestedModel): + """ + # Summary + + Netflow exporter configuration for telemetry. + + ## Raises + + - `ValueError` - If UDP port is outside valid range or IP address is invalid + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + exporter_name: str = Field(alias="exporterName", description="Name of the netflow exporter") + exporter_ip: str = Field(alias="exporterIp", description="IP address of the netflow collector") + vrf: str = Field(description="VRF name for the exporter", default="management") + source_interface_name: str = Field(alias="sourceInterfaceName", description="Source interface name") + udp_port: int = Field(alias="udpPort", description="UDP port for netflow export", ge=1, le=65535) + + +class NetflowRecordModel(NDNestedModel): + """ + # Summary + + Netflow record configuration defining flow record templates. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + record_name: str = Field(alias="recordName", description="Name of the netflow record") + record_template: str = Field(alias="recordTemplate", description="Template type for the record") + layer2_record: bool = Field(alias="layer2Record", description="Enable layer 2 record fields", default=False) + + +class NetflowMonitorModel(NDNestedModel): + """ + # Summary + + Netflow monitor configuration linking records to exporters. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + monitor_name: str = Field(alias="monitorName", description="Name of the netflow monitor") + record_name: str = Field(alias="recordName", description="Associated record name") + exporter1_name: str = Field(alias="exporter1Name", description="Primary exporter name") + exporter2_name: str = Field(alias="exporter2Name", description="Secondary exporter name", default="") + + +class NetflowSettingsModel(NDNestedModel): + """ + # Summary + + Complete netflow configuration including exporters, records, and monitors. + + ## Raises + + - `ValueError` - If netflow lists are inconsistent with netflow enabled state + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + netflow: bool = Field(description="Enable netflow collection", default=False) + netflow_exporter_collection: List[NetflowExporterModel] = Field( + alias="netflowExporterCollection", + description="List of netflow exporters", + default_factory=list + ) + netflow_record_collection: List[NetflowRecordModel] = Field( + alias="netflowRecordCollection", + description="List of netflow records", + default_factory=list + ) + netflow_monitor_collection: List[NetflowMonitorModel] = Field( + alias="netflowMonitorCollection", + description="List of netflow monitors", + default_factory=list + ) + + +class BootstrapSubnetModel(NDNestedModel): + """ + # Summary + + Bootstrap subnet configuration for fabric initialization. + + ## Raises + + - `ValueError` - If IP addresses or subnet prefix are invalid + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + start_ip: str = Field(alias="startIp", description="Starting IP address of the bootstrap range") + end_ip: str = Field(alias="endIp", description="Ending IP address of the bootstrap range") + default_gateway: str = Field(alias="defaultGateway", description="Default gateway for bootstrap subnet") + subnet_prefix: int = Field(alias="subnetPrefix", description="Subnet prefix length", ge=8, le=30) + + +class FabricDesignSettingsModel(NDNestedModel): + """ + # Summary + + Fabric designer settings for automated fabric deployment. + + ## Raises + + - `ValueError` - If leaf/spine/border counts are invalid + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + link_capacity: str = Field(alias="linkCapacity", description="Link capacity (e.g., '400Gb')", default="400Gb") + leaf_count: int = Field(alias="leafCount", description="Number of leaf switches", ge=1, le=128) + leaf_model: str = Field(alias="leafModel", description="Leaf switch model") + spine_count: int = Field(alias="spineCount", description="Number of spine switches", ge=1, le=32) + spine_model: str = Field(alias="spineModel", description="Spine switch model") + border_count: int = Field(alias="borderCount", description="Number of border switches", ge=0, le=32, default=0) + border_model: Optional[str] = Field(alias="borderModel", description="Border switch model", default=None) + leaf_vpc_pair_policy: str = Field( + alias="leafVpcPairPolicy", + description="Leaf vPC pairing policy", + default="pairWithPhysicalPeerLink" + ) + border_vpc_pair_policy: str = Field( + alias="borderVpcPairPolicy", + description="Border vPC pairing policy", + default="pairWithPhysicalPeerLink" + ) + designer_management_ip_pool: str = Field( + alias="designerManagementIPPool", + description="Management IP pool for designer" + ) + designer_management_gateway: str = Field( + alias="designerManagementGateway", + description="Management gateway for designer" + ) + spine_to_leaf_distance: int = Field( + alias="spineToLeafDistance", + description="Cable distance from spine to leaf", + ge=1, + le=100, + default=20 + ) + airflow_direction: str = Field(alias="airflowDirection", description="Airflow direction", default="frontToBack") + breakout_spine_interfaces: bool = Field( + alias="breakoutSpineInterfaces", + description="Enable spine interface breakout", + default=False + ) + cabling_type: str = Field(alias="cablingType", description="Cabling type", default="fiber") + designer_password: Optional[str] = Field(alias="designerPassword", description="Designer password", default=None) + + +class TelemetryFlowCollectionModel(NDNestedModel): + """ + # Summary + + Telemetry flow collection configuration. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + traffic_analytics: str = Field(alias="trafficAnalytics", description="Traffic analytics state", default="enabled") + traffic_analytics_scope: str = Field( + alias="trafficAnalyticsScope", + description="Traffic analytics scope", + default="intraFabric" + ) + operating_mode: str = Field(alias="operatingMode", description="Operating mode", default="flowTelemetry") + udp_categorization: str = Field(alias="udpCategorization", description="UDP categorization", default="enabled") + + +class TelemetryMicroburstModel(NDNestedModel): + """ + # Summary + + Microburst detection configuration. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + microburst: bool = Field(description="Enable microburst detection", default=False) + sensitivity: str = Field(description="Microburst sensitivity level", default="low") + + +class TelemetryAnalysisSettingsModel(NDNestedModel): + """ + # Summary + + Telemetry analysis configuration. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + is_enabled: bool = Field(alias="isEnabled", description="Enable telemetry analysis", default=False) + + +class TelemetryEnergyManagementModel(NDNestedModel): + """ + # Summary + + Energy management telemetry configuration. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + cost: float = Field(description="Energy cost per unit", default=1.2) + + +class TelemetryNasExportSettingsModel(NDNestedModel): + """ + # Summary + + NAS export settings for telemetry. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + export_type: str = Field(alias="exportType", description="Export type", default="full") + export_format: str = Field(alias="exportFormat", description="Export format", default="json") + + +class TelemetryNasModel(NDNestedModel): + """ + # Summary + + NAS (Network Attached Storage) telemetry configuration. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + server: str = Field(description="NAS server address", default="") + export_settings: TelemetryNasExportSettingsModel = Field( + alias="exportSettings", + description="NAS export settings", + default_factory=TelemetryNasExportSettingsModel + ) + + +class TelemetrySettingsModel(NDNestedModel): + """ + # Summary + + Complete telemetry configuration for the fabric. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + flow_collection: TelemetryFlowCollectionModel = Field( + alias="flowCollection", + description="Flow collection settings", + default_factory=TelemetryFlowCollectionModel + ) + microburst: TelemetryMicroburstModel = Field( + description="Microburst detection settings", + default_factory=TelemetryMicroburstModel + ) + analysis_settings: TelemetryAnalysisSettingsModel = Field( + alias="analysisSettings", + description="Analysis settings", + default_factory=TelemetryAnalysisSettingsModel + ) + nas: TelemetryNasModel = Field( + description="NAS telemetry configuration", + default_factory=TelemetryNasModel + ) + energy_management: TelemetryEnergyManagementModel = Field( + alias="energyManagement", + description="Energy management settings", + default_factory=TelemetryEnergyManagementModel + ) + + +class ExternalStreamingSettingsModel(NDNestedModel): + """ + # Summary + + External streaming configuration for events and data export. + + ## Raises + + None + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + email: List[Dict[str, Any]] = Field(description="Email streaming configuration", default_factory=list) + message_bus: List[Dict[str, Any]] = Field(alias="messageBus", description="Message bus configuration", default_factory=list) + syslog: Dict[str, Any] = Field( + description="Syslog streaming configuration", + default_factory=lambda: { + "collectionSettings": {"anomalies": []}, + "facility": "", + "servers": [] + } + ) + webhooks: List[Dict[str, Any]] = Field(description="Webhook configuration", default_factory=list) + + +class VxlanIbgpManagementModel(NDNestedModel): + """ + # Summary + + Comprehensive iBGP VXLAN fabric management configuration. + + This model contains all settings specific to iBGP VXLAN fabric types including + overlay configuration, underlay routing, multicast settings, and advanced features. + + ## Raises + + - `ValueError` - If BGP ASN, VLAN ranges, or IP ranges are invalid + - `TypeError` - If required string fields are not provided + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + # Fabric Type (required for discriminated union) + type: Literal[FabricTypeEnum.VXLAN_IBGP] = Field(description="Fabric management type", default=FabricTypeEnum.VXLAN_IBGP) + + # Core iBGP Configuration + bgp_asn: str = Field(alias="bgpAsn", description="BGP Autonomous System Number") + site_id: Optional[str] = Field(alias="siteId", description="Site identifier for the fabric", default="") + + # Missing Fields + name: Optional[str] = Field(description="Fabric name", min_length=1, max_length=64, default="") + border_count: Optional[int] = Field(alias="borderCount", description="Number of border switches", ge=0, le=32, default=0) + breakout_spine_interfaces: Optional[bool] = Field(alias="breakoutSpineInterfaces", description="Enable breakout spine interfaces", default=False) + designer_use_robot_password: Optional[bool] = Field(alias="designerUseRobotPassword", description="Use robot password for designer", default=False) + leaf_count: Optional[int] = Field(alias="leafCount", description="Number of leaf switches", ge=1, le=128, default=1) + spine_count: Optional[int] = Field(alias="spineCount", description="Number of spine switches", ge=1, le=32, default=1) + vrf_lite_ipv6_subnet_range: Optional[str] = Field(alias="vrfLiteIpv6SubnetRange", description="VRF Lite IPv6 subnet range", default="fd00::a33:0/112") + vrf_lite_ipv6_subnet_target_mask: Optional[int] = Field(alias="vrfLiteIpv6SubnetTargetMask", description="VRF Lite IPv6 subnet target mask", ge=112, le=128, default=126) + + + # Network Addressing + bgp_loopback_ip_range: str = Field( + alias="bgpLoopbackIpRange", + description="BGP loopback IP range", + default="10.2.0.0/22" + ) + nve_loopback_ip_range: str = Field( + alias="nveLoopbackIpRange", + description="NVE loopback IP range", + default="10.3.0.0/22" + ) + anycast_rendezvous_point_ip_range: str = Field( + alias="anycastRendezvousPointIpRange", + description="Anycast RP IP range", + default="10.254.254.0/24" + ) + intra_fabric_subnet_range: str = Field( + alias="intraFabricSubnetRange", + description="Intra-fabric subnet range", + default="10.4.0.0/16" + ) + + # VLAN and VNI Ranges + l2_vni_range: str = Field(alias="l2VniRange", description="Layer 2 VNI range", default="30000-49000") + l3_vni_range: str = Field(alias="l3VniRange", description="Layer 3 VNI range", default="50000-59000") + network_vlan_range: str = Field(alias="networkVlanRange", description="Network VLAN range", default="2300-2999") + vrf_vlan_range: str = Field(alias="vrfVlanRange", description="VRF VLAN range", default="2000-2299") + + # Overlay Configuration + overlay_mode: OverlayModeEnum = Field(alias="overlayMode", description="Overlay configuration mode", default=OverlayModeEnum.CLI) + replication_mode: ReplicationModeEnum = Field( + alias="replicationMode", + description="Multicast replication mode", + default=ReplicationModeEnum.MULTICAST + ) + multicast_group_subnet: str = Field( + alias="multicastGroupSubnet", + description="Multicast group subnet", + default="239.1.1.0/25" + ) + auto_generate_multicast_group_address: bool = Field( + alias="autoGenerateMulticastGroupAddress", + description="Auto-generate multicast group addresses", + default=False + ) + underlay_multicast_group_address_limit: int = Field( + alias="underlayMulticastGroupAddressLimit", + description="Underlay multicast group address limit", + ge=1, + le=255, + default=128 + ) + tenant_routed_multicast: bool = Field( + alias="tenantRoutedMulticast", + description="Enable tenant routed multicast", + default=False + ) + + # Underlay Configuration + link_state_routing_protocol: LinkStateRoutingProtocolEnum = Field( + alias="linkStateRoutingProtocol", + description="Underlay routing protocol", + default=LinkStateRoutingProtocolEnum.OSPF + ) + ospf_area_id: str = Field(alias="ospfAreaId", description="OSPF area ID", default="0.0.0.0") + fabric_interface_type: str = Field(alias="fabricInterfaceType", description="Fabric interface type", default="p2p") + + # Advanced Features + target_subnet_mask: int = Field(alias="targetSubnetMask", description="Target subnet mask", ge=24, le=31, default=30) + anycast_gateway_mac: str = Field( + alias="anycastGatewayMac", + description="Anycast gateway MAC address", + default="2020.0000.00aa" + ) + fabric_mtu: int = Field(alias="fabricMtu", description="Fabric MTU size", ge=1500, le=9216, default=9216) + l2_host_interface_mtu: int = Field( + alias="l2HostInterfaceMtu", + description="L2 host interface MTU", + ge=1500, + le=9216, + default=9216 + ) + + # VPC Configuration + vpc_domain_id_range: str = Field(alias="vpcDomainIdRange", description="vPC domain ID range", default="1-1000") + vpc_peer_link_vlan: str = Field(alias="vpcPeerLinkVlan", description="vPC peer link VLAN", default="3600") + vpc_peer_link_enable_native_vlan: bool = Field( + alias="vpcPeerLinkEnableNativeVlan", + description="Enable native VLAN on vPC peer link", + default=False + ) + vpc_peer_keep_alive_option: str = Field( + alias="vpcPeerKeepAliveOption", + description="vPC peer keep-alive option", + default="loopback" + ) + vpc_auto_recovery_timer: int = Field( + alias="vpcAutoRecoveryTimer", + description="vPC auto recovery timer", + ge=240, + le=3600, + default=360 + ) + vpc_delay_restore_timer: int = Field( + alias="vpcDelayRestoreTimer", + description="vPC delay restore timer", + ge=1, + le=3600, + default=150 + ) + + # Loopback Configuration + bgp_loopback_id: int = Field(alias="bgpLoopbackId", description="BGP loopback interface ID", ge=0, le=1023, default=0) + nve_loopback_id: int = Field(alias="nveLoopbackId", description="NVE loopback interface ID", ge=0, le=1023, default=1) + route_reflector_count: int = Field( + alias="routeReflectorCount", + description="Number of route reflectors", + ge=1, + le=4, + default=2 + ) + + # Templates + vrf_template: str = Field(alias="vrfTemplate", description="VRF template", default="Default_VRF_Universal") + network_template: str = Field(alias="networkTemplate", description="Network template", default="Default_Network_Universal") + vrf_extension_template: str = Field( + alias="vrfExtensionTemplate", + description="VRF extension template", + default="Default_VRF_Extension_Universal" + ) + network_extension_template: str = Field( + alias="networkExtensionTemplate", + description="Network extension template", + default="Default_Network_Extension_Universal" + ) + + # Optional Advanced Settings + performance_monitoring: bool = Field(alias="performanceMonitoring", description="Enable performance monitoring", default=False) + tenant_dhcp: bool = Field(alias="tenantDhcp", description="Enable tenant DHCP", default=True) + advertise_physical_ip: bool = Field(alias="advertisePhysicalIp", description="Advertise physical IP", default=False) + advertise_physical_ip_on_border: bool = Field( + alias="advertisePhysicalIpOnBorder", + description="Advertise physical IP on border", + default=True + ) + + # Protocol Settings + bgp_authentication: bool = Field(alias="bgpAuthentication", description="Enable BGP authentication", default=False) + bgp_authentication_key_type: str = Field( + alias="bgpAuthenticationKeyType", + description="BGP authentication key type", + default="3des" + ) + bfd: bool = Field(description="Enable BFD", default=False) + bfd_ibgp: bool = Field(alias="bfdIbgp", description="Enable BFD for iBGP", default=False) + + # Management Settings + nxapi: bool = Field(description="Enable NX-API", default=False) + nxapi_http: bool = Field(alias="nxapiHttp", description="Enable NX-API HTTP", default=True) + nxapi_https_port: int = Field(alias="nxapiHttpsPort", description="NX-API HTTPS port", ge=1, le=65535, default=443) + nxapi_http_port: int = Field(alias="nxapiHttpPort", description="NX-API HTTP port", ge=1, le=65535, default=80) + + # Bootstrap Settings + day0_bootstrap: bool = Field(alias="day0Bootstrap", description="Enable day-0 bootstrap", default=False) + bootstrap_subnet_collection: List[BootstrapSubnetModel] = Field( + alias="bootstrapSubnetCollection", + description="Bootstrap subnet collection", + default_factory=list + ) + + # Netflow Settings + netflow_settings: NetflowSettingsModel = Field( + alias="netflowSettings", + description="Netflow configuration", + default_factory=NetflowSettingsModel + ) + + # Multicast Settings + rendezvous_point_count: int = Field( + alias="rendezvousPointCount", + description="Number of rendezvous points", + ge=1, + le=4, + default=2 + ) + rendezvous_point_loopback_id: int = Field( + alias="rendezvousPointLoopbackId", + description="RP loopback interface ID", + ge=0, + le=1023, + default=254 + ) + + # System Settings + snmp_trap: bool = Field(alias="snmpTrap", description="Enable SNMP traps", default=True) + cdp: bool = Field(description="Enable CDP", default=False) + real_time_interface_statistics_collection: bool = Field( + alias="realTimeInterfaceStatisticsCollection", + description="Enable real-time interface statistics", + default=False + ) + tcam_allocation: bool = Field(alias="tcamAllocation", description="Enable TCAM allocation", default=True) + + # VPC Extended Configuration + vpc_peer_link_port_channel_id: str = Field(alias="vpcPeerLinkPortChannelId", description="vPC peer link port-channel ID", default="500") + vpc_ipv6_neighbor_discovery_sync: bool = Field( + alias="vpcIpv6NeighborDiscoverySync", description="Enable vPC IPv6 ND sync", default=True + ) + vpc_layer3_peer_router: bool = Field(alias="vpcLayer3PeerRouter", description="Enable vPC layer-3 peer router", default=True) + vpc_tor_delay_restore_timer: int = Field(alias="vpcTorDelayRestoreTimer", description="vPC TOR delay restore timer", default=30) + fabric_vpc_domain_id: bool = Field(alias="fabricVpcDomainId", description="Enable fabric vPC domain ID", default=False) + shared_vpc_domain_id: int = Field(alias="sharedVpcDomainId", description="Shared vPC domain ID", default=1) + fabric_vpc_qos: bool = Field(alias="fabricVpcQos", description="Enable fabric vPC QoS", default=False) + fabric_vpc_qos_policy_name: str = Field( + alias="fabricVpcQosPolicyName", description="Fabric vPC QoS policy name", default="spine_qos_for_fabric_vpc_peering" + ) + enable_peer_switch: bool = Field(alias="enablePeerSwitch", description="Enable peer switch", default=False) + + # Bootstrap / Day-0 / DHCP + local_dhcp_server: bool = Field(alias="localDhcpServer", description="Enable local DHCP server", default=False) + dhcp_protocol_version: str = Field(alias="dhcpProtocolVersion", description="DHCP protocol version", default="dhcpv4") + dhcp_start_address: str = Field(alias="dhcpStartAddress", description="DHCP start address", default="") + dhcp_end_address: str = Field(alias="dhcpEndAddress", description="DHCP end address", default="") + management_gateway: str = Field(alias="managementGateway", description="Management gateway", default="") + management_ipv4_prefix: int = Field(alias="managementIpv4Prefix", description="Management IPv4 prefix length", default=24) + management_ipv6_prefix: int = Field(alias="managementIpv6Prefix", description="Management IPv6 prefix length", default=64) + extra_config_nxos_bootstrap: str = Field(alias="extraConfigNxosBootstrap", description="Extra NX-OS bootstrap config", default="") + un_numbered_bootstrap_loopback_id: int = Field( + alias="unNumberedBootstrapLoopbackId", description="Unnumbered bootstrap loopback ID", default=253 + ) + un_numbered_dhcp_start_address: str = Field(alias="unNumberedDhcpStartAddress", description="Unnumbered DHCP start address", default="") + un_numbered_dhcp_end_address: str = Field(alias="unNumberedDhcpEndAddress", description="Unnumbered DHCP end address", default="") + inband_management: bool = Field(alias="inbandManagement", description="Enable in-band management", default=False) + inband_dhcp_servers: List[str] = Field(alias="inbandDhcpServers", description="In-band DHCP servers", default_factory=list) + seed_switch_core_interfaces: List[str] = Field( + alias="seedSwitchCoreInterfaces", description="Seed switch core interfaces", default_factory=list + ) + spine_switch_core_interfaces: List[str] = Field( + alias="spineSwitchCoreInterfaces", description="Spine switch core interfaces", default_factory=list + ) + + # Backup / Restore + real_time_backup: bool = Field(alias="realTimeBackup", description="Enable real-time backup", default=False) + scheduled_backup: bool = Field(alias="scheduledBackup", description="Enable scheduled backup", default=False) + scheduled_backup_time: str = Field(alias="scheduledBackupTime", description="Scheduled backup time", default="") + + # IPv6 / Dual-Stack + underlay_ipv6: bool = Field(alias="underlayIpv6", description="Enable IPv6 underlay", default=False) + ipv6_multicast_group_subnet: str = Field( + alias="ipv6MulticastGroupSubnet", description="IPv6 multicast group subnet", default="ff1e::/121" + ) + tenant_routed_multicast_ipv6: bool = Field( + alias="tenantRoutedMulticastIpv6", description="Enable tenant routed multicast IPv6", default=False + ) + ipv6_link_local: bool = Field(alias="ipv6LinkLocal", description="Enable IPv6 link-local", default=True) + ipv6_subnet_target_mask: int = Field(alias="ipv6SubnetTargetMask", description="IPv6 subnet target mask", default=126) + ipv6_subnet_range: str = Field(alias="ipv6SubnetRange", description="IPv6 subnet range", default="fd00::a04:0/112") + bgp_loopback_ipv6_range: str = Field(alias="bgpLoopbackIpv6Range", description="BGP loopback IPv6 range", default="fd00::a02:0/119") + nve_loopback_ipv6_range: str = Field(alias="nveLoopbackIpv6Range", description="NVE loopback IPv6 range", default="fd00::a03:0/118") + ipv6_anycast_rendezvous_point_ip_range: str = Field( + alias="ipv6AnycastRendezvousPointIpRange", description="IPv6 anycast RP IP range", default="fd00::254:254:0/118" + ) + + # Multicast / Rendezvous Point Extended + mvpn_vrf_route_import_id: bool = Field(alias="mvpnVrfRouteImportId", description="Enable MVPN VRF route import ID", default=True) + mvpn_vrf_route_import_id_range: str = Field( + alias="mvpnVrfRouteImportIdRange", description="MVPN VRF route import ID range", default="" + ) + vrf_route_import_id_reallocation: bool = Field( + alias="vrfRouteImportIdReallocation", description="Enable VRF route import ID reallocation", default=False + ) + l3vni_multicast_group: str = Field(alias="l3vniMulticastGroup", description="L3 VNI multicast group", default="239.1.1.0") + l3_vni_ipv6_multicast_group: str = Field(alias="l3VniIpv6MulticastGroup", description="L3 VNI IPv6 multicast group", default="ff1e::") + rendezvous_point_mode: str = Field(alias="rendezvousPointMode", description="Rendezvous point mode", default="asm") + phantom_rendezvous_point_loopback_id1: int = Field( + alias="phantomRendezvousPointLoopbackId1", description="Phantom RP loopback ID 1", default=2 + ) + phantom_rendezvous_point_loopback_id2: int = Field( + alias="phantomRendezvousPointLoopbackId2", description="Phantom RP loopback ID 2", default=3 + ) + phantom_rendezvous_point_loopback_id3: int = Field( + alias="phantomRendezvousPointLoopbackId3", description="Phantom RP loopback ID 3", default=4 + ) + phantom_rendezvous_point_loopback_id4: int = Field( + alias="phantomRendezvousPointLoopbackId4", description="Phantom RP loopback ID 4", default=5 + ) + anycast_loopback_id: int = Field(alias="anycastLoopbackId", description="Anycast loopback ID", default=10) + + # VRF Lite / Sub-Interface + sub_interface_dot1q_range: str = Field(alias="subInterfaceDot1qRange", description="Sub-interface 802.1q range", default="2-511") + vrf_lite_auto_config: str = Field(alias="vrfLiteAutoConfig", description="VRF lite auto-config mode", default="manual") + vrf_lite_subnet_range: str = Field(alias="vrfLiteSubnetRange", description="VRF lite subnet range", default="10.33.0.0/16") + vrf_lite_subnet_target_mask: int = Field(alias="vrfLiteSubnetTargetMask", description="VRF lite subnet target mask", default=30) + auto_unique_vrf_lite_ip_prefix: bool = Field( + alias="autoUniqueVrfLiteIpPrefix", description="Auto unique VRF lite IP prefix", default=False + ) + auto_symmetric_vrf_lite: bool = Field(alias="autoSymmetricVrfLite", description="Auto symmetric VRF lite", default=False) + auto_vrf_lite_default_vrf: bool = Field(alias="autoVrfLiteDefaultVrf", description="Auto VRF lite default VRF", default=False) + auto_symmetric_default_vrf: bool = Field(alias="autoSymmetricDefaultVrf", description="Auto symmetric default VRF", default=False) + default_vrf_redistribution_bgp_route_map: str = Field( + alias="defaultVrfRedistributionBgpRouteMap", description="Default VRF redistribution BGP route map", default="extcon-rmap-filter" + ) + + # Per-VRF Loopback + per_vrf_loopback_auto_provision: bool = Field( + alias="perVrfLoopbackAutoProvision", description="Per-VRF loopback auto-provision", default=False + ) + per_vrf_loopback_ip_range: str = Field( + alias="perVrfLoopbackIpRange", description="Per-VRF loopback IP range", default="10.5.0.0/22" + ) + per_vrf_loopback_auto_provision_ipv6: bool = Field( + alias="perVrfLoopbackAutoProvisionIpv6", description="Per-VRF loopback auto-provision IPv6", default=False + ) + per_vrf_loopback_ipv6_range: str = Field( + alias="perVrfLoopbackIpv6Range", description="Per-VRF loopback IPv6 range", default="fd00::a05:0/112" + ) + per_vrf_unique_loopback_auto_provision: bool = Field( + alias="perVrfUniqueLoopbackAutoProvision", description="Per-VRF unique loopback auto-provision", default=False + ) + per_vrf_unique_loopback_ip_range: str = Field( + alias="perVrfUniqueLoopbackIpRange", description="Per-VRF unique loopback IP range", default="10.6.0.0/22" + ) + per_vrf_unique_loopback_auto_provision_v6: bool = Field( + alias="perVrfUniqueLoopbackAutoProvisionV6", description="Per-VRF unique loopback auto-provision IPv6", default=False + ) + per_vrf_unique_loopback_ipv6_range: str = Field( + alias="perVrfUniqueLoopbackIpv6Range", description="Per-VRF unique loopback IPv6 range", default="fd00::a06:0/112" + ) + + # Authentication — BGP Extended + bgp_authentication_key: str = Field(alias="bgpAuthenticationKey", description="BGP authentication key", default="") + + # Authentication — PIM + pim_hello_authentication: bool = Field(alias="pimHelloAuthentication", description="Enable PIM hello authentication", default=False) + pim_hello_authentication_key: str = Field(alias="pimHelloAuthenticationKey", description="PIM hello authentication key", default="") + + # Authentication — BFD + bfd_authentication: bool = Field(alias="bfdAuthentication", description="Enable BFD authentication", default=False) + bfd_authentication_key_id: int = Field(alias="bfdAuthenticationKeyId", description="BFD authentication key ID", default=100) + bfd_authentication_key: str = Field(alias="bfdAuthenticationKey", description="BFD authentication key", default="") + bfd_ospf: bool = Field(alias="bfdOspf", description="Enable BFD for OSPF", default=False) + bfd_isis: bool = Field(alias="bfdIsis", description="Enable BFD for IS-IS", default=False) + bfd_pim: bool = Field(alias="bfdPim", description="Enable BFD for PIM", default=False) + + # Authentication — OSPF + ospf_authentication: bool = Field(alias="ospfAuthentication", description="Enable OSPF authentication", default=False) + ospf_authentication_key_id: int = Field(alias="ospfAuthenticationKeyId", description="OSPF authentication key ID", default=127) + ospf_authentication_key: str = Field(alias="ospfAuthenticationKey", description="OSPF authentication key", default="") + + # IS-IS + isis_level: str = Field(alias="isisLevel", description="IS-IS level", default="level-2") + isis_area_number: str = Field(alias="isisAreaNumber", description="IS-IS area number", default="0001") + isis_point_to_point: bool = Field(alias="isisPointToPoint", description="IS-IS point-to-point", default=True) + isis_authentication: bool = Field(alias="isisAuthentication", description="Enable IS-IS authentication", default=False) + isis_authentication_keychain_name: str = Field( + alias="isisAuthenticationKeychainName", description="IS-IS authentication keychain name", default="" + ) + isis_authentication_keychain_key_id: int = Field( + alias="isisAuthenticationKeychainKeyId", description="IS-IS authentication keychain key ID", default=127 + ) + isis_authentication_key: str = Field(alias="isisAuthenticationKey", description="IS-IS authentication key", default="") + isis_overload: bool = Field(alias="isisOverload", description="Enable IS-IS overload bit", default=True) + isis_overload_elapse_time: int = Field(alias="isisOverloadElapseTime", description="IS-IS overload elapse time", default=60) + + # MACsec + macsec: bool = Field(description="Enable MACsec", default=False) + macsec_cipher_suite: str = Field(alias="macsecCipherSuite", description="MACsec cipher suite", default="GCM-AES-XPN-256") + macsec_key_string: str = Field(alias="macsecKeyString", description="MACsec key string", default="") + macsec_algorithm: str = Field(alias="macsecAlgorithm", description="MACsec algorithm", default="AES_128_CMAC") + macsec_fallback_key_string: str = Field(alias="macsecFallbackKeyString", description="MACsec fallback key string", default="") + macsec_fallback_algorithm: str = Field(alias="macsecFallbackAlgorithm", description="MACsec fallback algorithm", default="AES_128_CMAC") + macsec_report_timer: int = Field(alias="macsecReportTimer", description="MACsec report timer", default=5) + + # VRF Lite MACsec + vrf_lite_macsec: bool = Field(alias="vrfLiteMacsec", description="Enable VRF lite MACsec", default=False) + vrf_lite_macsec_cipher_suite: str = Field( + alias="vrfLiteMacsecCipherSuite", description="VRF lite MACsec cipher suite", default="GCM-AES-XPN-256" + ) + vrf_lite_macsec_key_string: str = Field(alias="vrfLiteMacsecKeyString", description="VRF lite MACsec key string", default="") + vrf_lite_macsec_algorithm: str = Field( + alias="vrfLiteMacsecAlgorithm", description="VRF lite MACsec algorithm", default="AES_128_CMAC" + ) + vrf_lite_macsec_fallback_key_string: str = Field( + alias="vrfLiteMacsecFallbackKeyString", description="VRF lite MACsec fallback key string", default="" + ) + vrf_lite_macsec_fallback_algorithm: str = Field( + alias="vrfLiteMacsecFallbackAlgorithm", description="VRF lite MACsec fallback algorithm", default="AES_128_CMAC" + ) + + # Quantum Key Distribution / Trustpoint + quantum_key_distribution: bool = Field(alias="quantumKeyDistribution", description="Enable quantum key distribution", default=False) + quantum_key_distribution_profile_name: str = Field( + alias="quantumKeyDistributionProfileName", description="Quantum key distribution profile name", default="" + ) + key_management_entity_server_ip: str = Field( + alias="keyManagementEntityServerIp", description="Key management entity server IP", default="" + ) + key_management_entity_server_port: int = Field( + alias="keyManagementEntityServerPort", description="Key management entity server port", default=0 + ) + trustpoint_label: str = Field(alias="trustpointLabel", description="Trustpoint label", default="") + skip_certificate_verification: bool = Field( + alias="skipCertificateVerification", description="Skip certificate verification", default=False + ) + + # BGP / Routing Enhancements + auto_bgp_neighbor_description: bool = Field( + alias="autoBgpNeighborDescription", description="Auto BGP neighbor description", default=True + ) + ibgp_peer_template: str = Field(alias="ibgpPeerTemplate", description="iBGP peer template", default="") + leaf_ibgp_peer_template: str = Field(alias="leafIbgpPeerTemplate", description="Leaf iBGP peer template", default="") + link_state_routing_tag: str = Field(alias="linkStateRoutingTag", description="Link state routing tag", default="UNDERLAY") + static_underlay_ip_allocation: bool = Field( + alias="staticUnderlayIpAllocation", description="Static underlay IP allocation", default=False + ) + router_id_range: str = Field(alias="routerIdRange", description="Router ID range", default="10.2.0.0/23") + + # Security Group Tags (SGT) + security_group_tag: bool = Field(alias="securityGroupTag", description="Enable security group tag", default=False) + security_group_tag_prefix: str = Field(alias="securityGroupTagPrefix", description="SGT prefix", default="SG_") + security_group_tag_mac_segmentation: bool = Field( + alias="securityGroupTagMacSegmentation", description="Enable SGT MAC segmentation", default=False + ) + security_group_tag_id_range: str = Field( + alias="securityGroupTagIdRange", description="SGT ID range", default="10000-14000" + ) + security_group_tag_preprovision: bool = Field( + alias="securityGroupTagPreprovision", description="Enable SGT preprovision", default=False + ) + security_group_status: str = Field(alias="securityGroupStatus", description="Security group status", default="enabled") + + # Queuing / QoS + default_queuing_policy: bool = Field(alias="defaultQueuingPolicy", description="Enable default queuing policy", default=False) + default_queuing_policy_cloudscale: str = Field( + alias="defaultQueuingPolicyCloudscale", description="Default queuing policy cloudscale", default="queuing_policy_default_8q_cloudscale" + ) + default_queuing_policy_r_series: str = Field( + alias="defaultQueuingPolicyRSeries", description="Default queuing policy R-Series", default="queuing_policy_default_r_series" + ) + default_queuing_policy_other: str = Field( + alias="defaultQueuingPolicyOther", description="Default queuing policy other", default="queuing_policy_default_other" + ) + aiml_qos: bool = Field(alias="aimlQos", description="Enable AI/ML QoS", default=False) + aiml_qos_policy: str = Field(alias="aimlQosPolicy", description="AI/ML QoS policy", default="400G") + roce_v2: str = Field(alias="roceV2", description="RoCEv2 DSCP value", default="26") + cnp: str = Field(description="CNP value", default="48") + wred_min: int = Field(alias="wredMin", description="WRED minimum threshold", default=950) + wred_max: int = Field(alias="wredMax", description="WRED maximum threshold", default=3000) + wred_drop_probability: int = Field(alias="wredDropProbability", description="WRED drop probability", default=7) + wred_weight: int = Field(alias="wredWeight", description="WRED weight", default=0) + bandwidth_remaining: int = Field(alias="bandwidthRemaining", description="Bandwidth remaining percentage", default=50) + dlb: bool = Field(description="Enable dynamic load balancing", default=False) + dlb_mode: str = Field(alias="dlbMode", description="DLB mode", default="flowlet") + dlb_mixed_mode_default: str = Field(alias="dlbMixedModeDefault", description="DLB mixed mode default", default="ecmp") + flowlet_aging: int = Field(alias="flowletAging", description="Flowlet aging interval", default=1) + flowlet_dscp: str = Field(alias="flowletDscp", description="Flowlet DSCP value", default="") + per_packet_dscp: str = Field(alias="perPacketDscp", description="Per-packet DSCP value", default="") + ai_load_sharing: bool = Field(alias="aiLoadSharing", description="Enable AI load sharing", default=False) + priority_flow_control_watch_interval: int = Field( + alias="priorityFlowControlWatchInterval", description="Priority flow control watch interval", default=101 + ) + + # PTP + ptp: bool = Field(description="Enable PTP", default=False) + ptp_loopback_id: int = Field(alias="ptpLoopbackId", description="PTP loopback ID", default=0) + ptp_domain_id: int = Field(alias="ptpDomainId", description="PTP domain ID", default=0) + ptp_vlan_id: int = Field(alias="ptpVlanId", description="PTP VLAN ID", default=2) + + # STP + stp_root_option: str = Field(alias="stpRootOption", description="STP root option", default="mst") + stp_vlan_range: str = Field(alias="stpVlanRange", description="STP VLAN range", default="") + mst_instance_range: str = Field(alias="mstInstanceRange", description="MST instance range", default="0-3,5,7-9") + stp_bridge_priority: int = Field(alias="stpBridgePriority", description="STP bridge priority", default=0) + + # MPLS Handoff + mpls_handoff: bool = Field(alias="mplsHandoff", description="Enable MPLS handoff", default=False) + mpls_loopback_identifier: int = Field(alias="mplsLoopbackIdentifier", description="MPLS loopback identifier", default=101) + mpls_isis_area_number: str = Field(alias="mplsIsisAreaNumber", description="MPLS IS-IS area number", default="0001") + mpls_loopback_ip_range: str = Field(alias="mplsLoopbackIpRange", description="MPLS loopback IP range", default="10.101.0.0/25") + + # Private VLAN + private_vlan: bool = Field(alias="privateVlan", description="Enable private VLAN", default=False) + default_private_vlan_secondary_network_template: str = Field( + alias="defaultPrivateVlanSecondaryNetworkTemplate", + description="Default private VLAN secondary network template", + default="Pvlan_Secondary_Network" + ) + allow_vlan_on_leaf_tor_pairing: str = Field( + alias="allowVlanOnLeafTorPairing", description="Allow VLAN on leaf/TOR pairing", default="none" + ) + + # Leaf / TOR + leaf_tor_id_range: bool = Field(alias="leafTorIdRange", description="Enable leaf/TOR ID range", default=False) + leaf_tor_vpc_port_channel_id_range: str = Field( + alias="leafTorVpcPortChannelIdRange", description="Leaf/TOR vPC port-channel ID range", default="1-499" + ) + + # Resource ID Ranges + l3_vni_no_vlan_default_option: bool = Field( + alias="l3VniNoVlanDefaultOption", description="L3 VNI no-VLAN default option", default=False + ) + ip_service_level_agreement_id_range: str = Field( + alias="ipServiceLevelAgreementIdRange", description="IP SLA ID range", default="10000-19999" + ) + object_tracking_number_range: str = Field( + alias="objectTrackingNumberRange", description="Object tracking number range", default="100-299" + ) + service_network_vlan_range: str = Field( + alias="serviceNetworkVlanRange", description="Service network VLAN range", default="3000-3199" + ) + route_map_sequence_number_range: str = Field( + alias="routeMapSequenceNumberRange", description="Route map sequence number range", default="1-65534" + ) + + # DNS / NTP / Syslog Collections + ntp_server_collection: List[str] = Field(default_factory=lambda: ["string"], alias="ntpServerCollection") + ntp_server_vrf_collection: List[str] = Field(default_factory=lambda: ["string"], alias="ntpServerVrfCollection") + dns_collection: List[str] = Field(default_factory=lambda: ["5.192.28.174"], alias="dnsCollection") + dns_vrf_collection: List[str] = Field(default_factory=lambda: ["string"], alias="dnsVrfCollection") + syslog_server_collection: List[str] = Field(default_factory=lambda: ["string"], alias="syslogServerCollection") + syslog_server_vrf_collection: List[str] = Field(default_factory=lambda: ["string"], alias="syslogServerVrfCollection") + syslog_severity_collection: List[int] = Field(default_factory=lambda: [7], alias="syslogSeverityCollection", description="Syslog severity levels (0-7)") + + # Extra Config / Pre-Interface Config / AAA / Banner + banner: str = Field(description="Fabric banner text", default="") + extra_config_leaf: str = Field(alias="extraConfigLeaf", description="Extra leaf config", default="") + extra_config_spine: str = Field(alias="extraConfigSpine", description="Extra spine config", default="") + extra_config_tor: str = Field(alias="extraConfigTor", description="Extra TOR config", default="") + extra_config_intra_fabric_links: str = Field( + alias="extraConfigIntraFabricLinks", description="Extra intra-fabric links config", default="" + ) + extra_config_aaa: str = Field(alias="extraConfigAaa", description="Extra AAA config", default="") + aaa: bool = Field(description="Enable AAA", default=False) + pre_interface_config_leaf: str = Field(alias="preInterfaceConfigLeaf", description="Pre-interface leaf config", default="") + pre_interface_config_spine: str = Field(alias="preInterfaceConfigSpine", description="Pre-interface spine config", default="") + pre_interface_config_tor: str = Field(alias="preInterfaceConfigTor", description="Pre-interface TOR config", default="") + + # System / Compliance / OAM / Misc + anycast_border_gateway_advertise_physical_ip: bool = Field( + alias="anycastBorderGatewayAdvertisePhysicalIp", description="Anycast border gateway advertise physical IP", default=False + ) + greenfield_debug_flag: str = Field(alias="greenfieldDebugFlag", description="Greenfield debug flag", default="enable") + interface_statistics_load_interval: int = Field( + alias="interfaceStatisticsLoadInterval", description="Interface statistics load interval", default=10 + ) + nve_hold_down_timer: int = Field(alias="nveHoldDownTimer", description="NVE hold-down timer", default=180) + next_generation_oam: bool = Field(alias="nextGenerationOAM", description="Enable next-generation OAM", default=True) + ngoam_south_bound_loop_detect: bool = Field( + alias="ngoamSouthBoundLoopDetect", description="Enable NGOAM south bound loop detect", default=False + ) + ngoam_south_bound_loop_detect_probe_interval: int = Field( + alias="ngoamSouthBoundLoopDetectProbeInterval", description="NGOAM south bound loop detect probe interval", default=300 + ) + ngoam_south_bound_loop_detect_recovery_interval: int = Field( + alias="ngoamSouthBoundLoopDetectRecoveryInterval", description="NGOAM south bound loop detect recovery interval", default=600 + ) + strict_config_compliance_mode: bool = Field( + alias="strictConfigComplianceMode", description="Enable strict config compliance mode", default=False + ) + advanced_ssh_option: bool = Field(alias="advancedSshOption", description="Enable advanced SSH option", default=False) + copp_policy: str = Field(alias="coppPolicy", description="CoPP policy", default="dense") + power_redundancy_mode: str = Field(alias="powerRedundancyMode", description="Power redundancy mode", default="redundant") + host_interface_admin_state: bool = Field( + alias="hostInterfaceAdminState", description="Host interface admin state", default=True + ) + heartbeat_interval: int = Field(alias="heartbeatInterval", description="Heartbeat interval", default=190) + policy_based_routing: bool = Field(alias="policyBasedRouting", description="Enable policy-based routing", default=False) + brownfield_network_name_format: str = Field( + alias="brownfieldNetworkNameFormat", description="Brownfield network name format", default="Auto_Net_VNI$$VNI$$_VLAN$$VLAN_ID$$" + ) + brownfield_skip_overlay_network_attachments: bool = Field( + alias="brownfieldSkipOverlayNetworkAttachments", description="Skip brownfield overlay network attachments", default=False + ) + allow_smart_switch_onboarding: bool = Field( + alias="allowSmartSwitchOnboarding", description="Allow smart switch onboarding", default=False + ) + + @field_validator("bgp_asn") + @classmethod + def validate_bgp_asn(cls, value: str) -> str: + """ + # Summary + + Validate BGP ASN format and range. + + ## Raises + + - `ValueError` - If ASN is not numeric or outside valid range (1-4294967295) + """ + if not value.isdigit(): + raise ValueError(f"BGP ASN must be numeric, got: {value}") + + asn_int = int(value) + if not (1 <= asn_int <= 4294967295): + raise ValueError(f"BGP ASN must be between 1 and 4294967295, got: {asn_int}") + + return value + + @field_validator("site_id") + @classmethod + def validate_site_id(cls, value: str) -> str: + """ + # Summary + + Validate site ID format. + + ## Raises + + - `ValueError` - If site ID is not numeric or outside valid range + """ + if not value.isdigit(): + raise ValueError(f"Site ID must be numeric, got: {value}") + + site_id_int = int(value) + if not (1 <= site_id_int <= 65535): + raise ValueError(f"Site ID must be between 1 and 65535, got: {site_id_int}") + + return value + + @field_validator("anycast_gateway_mac") + @classmethod + def validate_mac_address(cls, value: str) -> str: + """ + # Summary + + Validate MAC address format. + + ## Raises + + - `ValueError` - If MAC address format is invalid + """ + mac_pattern = re.compile(r'^([0-9a-fA-F]{4}\.){2}[0-9a-fA-F]{4}$') + if not mac_pattern.match(value): + raise ValueError(f"Invalid MAC address format, expected xxxx.xxxx.xxxx, got: {value}") + + return value.lower() + + +class FabricModel(NDBaseModel): + """ + # Summary + + Complete model for creating a new iBGP VXLAN fabric. + + This model combines all necessary components for fabric creation including + basic fabric properties, management settings, telemetry, and streaming configuration. + + ## Raises + + - `ValueError` - If required fields are missing or invalid + - `TypeError` - If field types don't match expected types + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" # Allow extra fields from API responses + ) + + identifiers: ClassVar[Optional[List[str]]] = ["name"] + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "single" + + # Basic Fabric Properties + category: Literal["fabric"] = Field(description="Resource category", default="fabric") + name: str = Field(description="Fabric name", min_length=1, max_length=64) + location: Optional[LocationModel] = Field(description="Geographic location of the fabric", default=None) + + # License and Operations + license_tier: LicenseTierEnum = Field(alias="licenseTier", description="License tier", default=LicenseTierEnum.PREMIER) + alert_suspend: AlertSuspendEnum = Field(alias="alertSuspend", description="Alert suspension state", default=AlertSuspendEnum.DISABLED) + telemetry_collection: bool = Field(alias="telemetryCollection", description="Enable telemetry collection", default=False) + telemetry_collection_type: str = Field(alias="telemetryCollectionType", description="Telemetry collection type", default="outOfBand") + telemetry_streaming_protocol: str = Field(alias="telemetryStreamingProtocol", description="Telemetry streaming protocol", default="ipv4") + telemetry_source_interface: str = Field(alias="telemetrySourceInterface", description="Telemetry source interface", default="") + telemetry_source_vrf: str = Field(alias="telemetrySourceVrf", description="Telemetry source VRF", default="") + security_domain: str = Field(alias="securityDomain", description="Security domain", default="all") + + # Core Management Configuration + management: Optional[VxlanIbgpManagementModel] = Field(description="iBGP VXLAN management configuration", default=None) + + # Optional Advanced Settings + telemetry_settings: Optional[TelemetrySettingsModel] = Field( + alias="telemetrySettings", + description="Telemetry configuration", + default=None + ) + external_streaming_settings: ExternalStreamingSettingsModel = Field( + alias="externalStreamingSettings", + description="External streaming settings", + default_factory=ExternalStreamingSettingsModel + ) + + @field_validator("name") + @classmethod + def validate_fabric_name(cls, value: str) -> str: + """ + # Summary + + Validate fabric name format and characters. + + ## Raises + + - `ValueError` - If name contains invalid characters or format + """ + if not re.match(r'^[a-zA-Z0-9_-]+$', value): + raise ValueError(f"Fabric name can only contain letters, numbers, underscores, and hyphens, got: {value}") + + return value + + @model_validator(mode='after') + def validate_fabric_consistency(self) -> 'FabricModel': + """ + # Summary + + Validate consistency between fabric settings and management configuration. + + ## Raises + + - `ValueError` - If fabric settings are inconsistent + """ + # Ensure management type matches model type + if self.management is not None and self.management.type != FabricTypeEnum.VXLAN_IBGP: + raise ValueError(f"Management type must be {FabricTypeEnum.VXLAN_IBGP}") + + # Propagate fabric name to management model + if self.management is not None: + self.management.name = self.name + + # Propgate BGP ASN to Site ID management model if not set + if self.management is not None and self.management.site_id == "": + self.management.site_id = self.management.bgp_asn # Default site ID to BGP ASN if not provided + + # Validate telemetry consistency + if self.telemetry_collection and self.telemetry_settings is None: + # Auto-create default telemetry settings if collection is enabled + self.telemetry_settings = TelemetrySettingsModel() + + return self + + # TODO: to generate from Fields (low priority) + @classmethod + def get_argument_spec(cls) -> Dict: + return dict( + state={ + "type": "str", + "default": "merged", + "choices": ["merged", "replaced", "deleted", "overridden", "query"], + }, + config={"required": False, "type": "list", "elements": "dict"}, + ) + + +class FabricDeleteModel(BaseModel): + """ + # Summary + + Model for deleting an iBGP VXLAN fabric. + + Only requires the fabric name for identification. + + ## Raises + + - `ValueError` - If fabric name is invalid + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + name: str = Field(description="Name of the fabric to delete", min_length=1, max_length=64) + + @field_validator("name") + @classmethod + def validate_fabric_name(cls, value: str) -> str: + """ + # Summary + + Validate fabric name format for deletion. + + ## Raises + + - `ValueError` - If name format is invalid + """ + if not re.match(r'^[a-zA-Z0-9_-]+$', value): + raise ValueError(f"Fabric name can only contain letters, numbers, underscores, and hyphens, got: {value}") + + return value + + + +# Export all models for external use +__all__ = [ + "LocationModel", + "NetflowExporterModel", + "NetflowRecordModel", + "NetflowMonitorModel", + "NetflowSettingsModel", + "BootstrapSubnetModel", + "FabricDesignSettingsModel", + "TelemetryFlowCollectionModel", + "TelemetryMicroburstModel", + "TelemetryAnalysisSettingsModel", + "TelemetryEnergyManagementModel", + "TelemetrySettingsModel", + "ExternalStreamingSettingsModel", + "VxlanIbgpManagementModel", + "FabricModel", + "FabricDeleteModel", + "FabricTypeEnum", + "AlertSuspendEnum", + "LicenseTierEnum", + "ReplicationModeEnum", + "OverlayModeEnum", + "LinkStateRoutingProtocolEnum" +] \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/manage_fabric.py b/plugins/module_utils/orchestrators/manage_fabric.py new file mode 100644 index 00000000..0d70bdad --- /dev/null +++ b/plugins/module_utils/orchestrators/manage_fabric.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Type +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricModel +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.manage_fabrics import ( + EpApiV1ManageFabricsGet, + EpApiV1ManageFabricsListGet, + EpApiV1ManageFabricsPost, + EpApiV1ManageFabricsPut, + EpApiV1ManageFabricsDelete, +) + + +class ManageFabricOrchestrator(NDBaseOrchestrator): + model_class: Type[NDBaseModel] = FabricModel + + create_endpoint: Type[NDBaseEndpoint] = EpApiV1ManageFabricsPost + update_endpoint: Type[NDBaseEndpoint] = EpApiV1ManageFabricsPut + delete_endpoint: Type[NDBaseEndpoint] = EpApiV1ManageFabricsDelete + query_one_endpoint: Type[NDBaseEndpoint] = EpApiV1ManageFabricsGet + query_all_endpoint: Type[NDBaseEndpoint] = EpApiV1ManageFabricsListGet + + def query_all(self) -> ResponseType: + """ + Custom query_all action to extract 'localusers' from response. + """ + try: + result = self.sender.query_obj(self.query_all_endpoint.base_path) + return result.get("fabrics", []) or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py index e8924cd2..fb43f1b5 100644 --- a/plugins/module_utils/pydantic_compat.py +++ b/plugins/module_utils/pydantic_compat.py @@ -157,6 +157,10 @@ def BeforeValidator(func): # pylint: disable=invalid-name # Fallback: SecretStr SecretStr = str + # Fallback: IPvAnyAddress and IPvAnyNetwork + IPvAnyAddress = str + IPvAnyNetwork = str + # Fallback: ValidationError class ValidationError(Exception): """ diff --git a/plugins/modules/nd_manage_fabric_ibgp.py b/plugins/modules/nd_manage_fabric_ibgp.py new file mode 100644 index 00000000..a3bf42e6 --- /dev/null +++ b/plugins/modules/nd_manage_fabric_ibgp.py @@ -0,0 +1,1393 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"} + +DOCUMENTATION = r""" +--- +module: nd_manage_fabric_ibgp +version_added: "1.4.0" +short_description: Manage iBGP VXLAN fabrics on Cisco Nexus Dashboard Fabric Controller +description: +- Manage iBGP VXLAN fabrics on Cisco Nexus Dashboard Fabric Controller (NDFC). +- It supports creating, updating, replacing, and deleting iBGP VXLAN fabrics. +author: +- Mike Wiebe (@mwiebe) +options: + config: + description: + - The list of iBGP VXLAN fabrics to configure. + type: list + elements: dict + suboptions: + name: + description: + - The name of the fabric. + - Only letters, numbers, underscores, and hyphens are allowed. + - The O(config.name) must be defined when creating, updating or deleting a fabric. + type: str + required: true + category: + description: + - The resource category. + type: str + default: fabric + location: + description: + - The geographic location of the fabric. + type: dict + suboptions: + latitude: + description: + - Latitude coordinate of the fabric location (-90 to 90). + type: float + required: true + longitude: + description: + - Longitude coordinate of the fabric location (-180 to 180). + type: float + required: true + license_tier: + description: + - The license tier for the fabric. + type: str + default: premier + choices: [ essentials, premier ] + alert_suspend: + description: + - The alert suspension state for the fabric. + type: str + default: disabled + choices: [ enabled, disabled ] + telemetry_collection: + description: + - Enable telemetry collection for the fabric. + type: bool + default: false + telemetry_collection_type: + description: + - The telemetry collection type. + type: str + default: outOfBand + telemetry_streaming_protocol: + description: + - The telemetry streaming protocol. + type: str + default: ipv4 + telemetry_source_interface: + description: + - The telemetry source interface. + type: str + default: "" + telemetry_source_vrf: + description: + - The telemetry source VRF. + type: str + default: "" + security_domain: + description: + - The security domain associated with the fabric. + type: str + default: all + management: + description: + - The iBGP VXLAN management configuration for the fabric. + type: dict + suboptions: + type: + description: + - The fabric management type. Must be C(vxlanIbgp) for iBGP VXLAN fabrics. + type: str + default: vxlanIbgp + choices: [ vxlanIbgp ] + bgp_asn: + description: + - The BGP Autonomous System Number for the fabric. + - Must be a numeric value between 1 and 4294967295. + type: str + required: true + site_id: + description: + - The site identifier for the fabric. + - Must be a numeric value between 1 and 65535. + - Defaults to the value of O(config.management.bgp_asn) if not provided. + type: str + default: "" + target_subnet_mask: + description: + - The target subnet mask for intra-fabric links. + type: int + default: 30 + anycast_gateway_mac: + description: + - The anycast gateway MAC address in xxxx.xxxx.xxxx format. + type: str + default: 2020.0000.00aa + replication_mode: + description: + - The multicast replication mode. + type: str + default: multicast + choices: [ multicast, ingress ] + multicast_group_subnet: + description: + - The multicast group subnet. + type: str + default: "239.1.1.0/25" + auto_generate_multicast_group_address: + description: + - Automatically generate multicast group addresses. + type: bool + default: false + underlay_multicast_group_address_limit: + description: + - The underlay multicast group address limit (1-255). + type: int + default: 128 + tenant_routed_multicast: + description: + - Enable tenant routed multicast. + type: bool + default: false + rendezvous_point_count: + description: + - The number of rendezvous points (1-4). + type: int + default: 2 + rendezvous_point_loopback_id: + description: + - The rendezvous point loopback interface ID (0-1023). + type: int + default: 254 + overlay_mode: + description: + - The overlay configuration mode. + type: str + default: cli + choices: [ cli, config-profile ] + link_state_routing_protocol: + description: + - The underlay link-state routing protocol. + type: str + default: ospf + choices: [ ospf, isis ] + ospf_area_id: + description: + - The OSPF area ID. + type: str + default: "0.0.0.0" + fabric_interface_type: + description: + - The fabric interface type. + type: str + default: p2p + bgp_loopback_id: + description: + - The BGP loopback interface ID (0-1023). + type: int + default: 0 + nve_loopback_id: + description: + - The NVE loopback interface ID (0-1023). + type: int + default: 1 + route_reflector_count: + description: + - The number of BGP route reflectors (1-4). + type: int + default: 2 + bgp_loopback_ip_range: + description: + - The BGP loopback IP address pool. + type: str + default: "10.2.0.0/22" + nve_loopback_ip_range: + description: + - The NVE loopback IP address pool. + type: str + default: "10.3.0.0/22" + anycast_rendezvous_point_ip_range: + description: + - The anycast rendezvous point IP address pool. + type: str + default: "10.254.254.0/24" + intra_fabric_subnet_range: + description: + - The intra-fabric subnet IP address pool. + type: str + default: "10.4.0.0/16" + router_id_range: + description: + - The router ID IP address pool. + type: str + default: "10.2.0.0/23" + l2_vni_range: + description: + - The Layer 2 VNI range. + type: str + default: "30000-49000" + l3_vni_range: + description: + - The Layer 3 VNI range. + type: str + default: "50000-59000" + network_vlan_range: + description: + - The network VLAN range. + type: str + default: "2300-2999" + vrf_vlan_range: + description: + - The VRF VLAN range. + type: str + default: "2000-2299" + sub_interface_dot1q_range: + description: + - The sub-interface 802.1q range. + type: str + default: "2-511" + service_network_vlan_range: + description: + - The service network VLAN range. + type: str + default: "3000-3199" + l3_vni_no_vlan_default_option: + description: + - Enable L3 VNI no-VLAN default option. + type: bool + default: false + fabric_mtu: + description: + - The fabric MTU size (1500-9216). + type: int + default: 9216 + l2_host_interface_mtu: + description: + - The L2 host interface MTU size (1500-9216). + type: int + default: 9216 + vpc_domain_id_range: + description: + - The vPC domain ID range. + type: str + default: "1-1000" + vpc_peer_link_vlan: + description: + - The vPC peer link VLAN ID. + type: str + default: "3600" + vpc_peer_link_enable_native_vlan: + description: + - Enable native VLAN on the vPC peer link. + type: bool + default: false + vpc_peer_keep_alive_option: + description: + - The vPC peer keep-alive option. + type: str + default: loopback + vpc_auto_recovery_timer: + description: + - The vPC auto recovery timer in seconds (240-3600). + type: int + default: 360 + vpc_delay_restore_timer: + description: + - The vPC delay restore timer in seconds (1-3600). + type: int + default: 150 + vpc_peer_link_port_channel_id: + description: + - The vPC peer link port-channel ID. + type: str + default: "500" + vpc_ipv6_neighbor_discovery_sync: + description: + - Enable vPC IPv6 neighbor discovery synchronization. + type: bool + default: true + vpc_layer3_peer_router: + description: + - Enable vPC layer-3 peer router. + type: bool + default: true + vpc_tor_delay_restore_timer: + description: + - The vPC TOR delay restore timer. + type: int + default: 30 + fabric_vpc_domain_id: + description: + - Enable fabric vPC domain ID. + type: bool + default: false + shared_vpc_domain_id: + description: + - The shared vPC domain ID. + type: int + default: 1 + fabric_vpc_qos: + description: + - Enable fabric vPC QoS. + type: bool + default: false + fabric_vpc_qos_policy_name: + description: + - The fabric vPC QoS policy name. + type: str + default: spine_qos_for_fabric_vpc_peering + enable_peer_switch: + description: + - Enable peer switch. + type: bool + default: false + vrf_template: + description: + - The VRF template name. + type: str + default: Default_VRF_Universal + network_template: + description: + - The network template name. + type: str + default: Default_Network_Universal + vrf_extension_template: + description: + - The VRF extension template name. + type: str + default: Default_VRF_Extension_Universal + network_extension_template: + description: + - The network extension template name. + type: str + default: Default_Network_Extension_Universal + performance_monitoring: + description: + - Enable performance monitoring. + type: bool + default: false + tenant_dhcp: + description: + - Enable tenant DHCP. + type: bool + default: true + advertise_physical_ip: + description: + - Advertise physical IP address for NVE loopback. + type: bool + default: false + advertise_physical_ip_on_border: + description: + - Advertise physical IP address on border switches. + type: bool + default: true + anycast_border_gateway_advertise_physical_ip: + description: + - Enable anycast border gateway to advertise physical IP. + type: bool + default: false + snmp_trap: + description: + - Enable SNMP traps. + type: bool + default: true + cdp: + description: + - Enable CDP. + type: bool + default: false + tcam_allocation: + description: + - Enable TCAM allocation. + type: bool + default: true + real_time_interface_statistics_collection: + description: + - Enable real-time interface statistics collection. + type: bool + default: false + interface_statistics_load_interval: + description: + - The interface statistics load interval in seconds. + type: int + default: 10 + greenfield_debug_flag: + description: + - The greenfield debug flag. + type: str + default: enable + nxapi: + description: + - Enable NX-API (HTTPS). + type: bool + default: false + nxapi_https_port: + description: + - The NX-API HTTPS port (1-65535). + type: int + default: 443 + nxapi_http: + description: + - Enable NX-API HTTP. + type: bool + default: true + nxapi_http_port: + description: + - The NX-API HTTP port (1-65535). + type: int + default: 80 + bgp_authentication: + description: + - Enable BGP authentication. + type: bool + default: false + bgp_authentication_key_type: + description: + - The BGP authentication key type. + type: str + default: 3des + bgp_authentication_key: + description: + - The BGP authentication key. + type: str + default: "" + bfd: + description: + - Enable BFD globally. + type: bool + default: false + bfd_ibgp: + description: + - Enable BFD for iBGP sessions. + type: bool + default: false + bfd_ospf: + description: + - Enable BFD for OSPF. + type: bool + default: false + bfd_isis: + description: + - Enable BFD for IS-IS. + type: bool + default: false + bfd_pim: + description: + - Enable BFD for PIM. + type: bool + default: false + bfd_authentication: + description: + - Enable BFD authentication. + type: bool + default: false + bfd_authentication_key_id: + description: + - The BFD authentication key ID. + type: int + default: 100 + bfd_authentication_key: + description: + - The BFD authentication key. + type: str + default: "" + ospf_authentication: + description: + - Enable OSPF authentication. + type: bool + default: false + ospf_authentication_key_id: + description: + - The OSPF authentication key ID. + type: int + default: 127 + ospf_authentication_key: + description: + - The OSPF authentication key. + type: str + default: "" + pim_hello_authentication: + description: + - Enable PIM hello authentication. + type: bool + default: false + pim_hello_authentication_key: + description: + - The PIM hello authentication key. + type: str + default: "" + isis_level: + description: + - The IS-IS level. + type: str + default: level-2 + isis_area_number: + description: + - The IS-IS area number. + type: str + default: "0001" + isis_point_to_point: + description: + - Enable IS-IS point-to-point. + type: bool + default: true + isis_authentication: + description: + - Enable IS-IS authentication. + type: bool + default: false + isis_authentication_keychain_name: + description: + - The IS-IS authentication keychain name. + type: str + default: "" + isis_authentication_keychain_key_id: + description: + - The IS-IS authentication keychain key ID. + type: int + default: 127 + isis_authentication_key: + description: + - The IS-IS authentication key. + type: str + default: "" + isis_overload: + description: + - Enable IS-IS overload bit. + type: bool + default: true + isis_overload_elapse_time: + description: + - The IS-IS overload elapse time in seconds. + type: int + default: 60 + macsec: + description: + - Enable MACsec on intra-fabric links. + type: bool + default: false + macsec_cipher_suite: + description: + - The MACsec cipher suite. + type: str + default: GCM-AES-XPN-256 + macsec_key_string: + description: + - The MACsec primary key string. + type: str + default: "" + macsec_algorithm: + description: + - The MACsec algorithm. + type: str + default: AES_128_CMAC + macsec_fallback_key_string: + description: + - The MACsec fallback key string. + type: str + default: "" + macsec_fallback_algorithm: + description: + - The MACsec fallback algorithm. + type: str + default: AES_128_CMAC + macsec_report_timer: + description: + - The MACsec report timer. + type: int + default: 5 + vrf_lite_macsec: + description: + - Enable MACsec on VRF lite links. + type: bool + default: false + quantum_key_distribution: + description: + - Enable quantum key distribution. + type: bool + default: false + quantum_key_distribution_profile_name: + description: + - The quantum key distribution profile name. + type: str + default: "" + key_management_entity_server_ip: + description: + - The key management entity server IP address. + type: str + default: "" + key_management_entity_server_port: + description: + - The key management entity server port. + type: int + default: 0 + trustpoint_label: + description: + - The trustpoint label. + type: str + default: "" + vrf_lite_auto_config: + description: + - The VRF lite auto-configuration mode. + type: str + default: manual + vrf_lite_subnet_range: + description: + - The VRF lite subnet IP address pool. + type: str + default: "10.33.0.0/16" + vrf_lite_subnet_target_mask: + description: + - The VRF lite subnet target mask. + type: int + default: 30 + vrf_lite_ipv6_subnet_range: + description: + - The VRF lite IPv6 subnet range. + type: str + default: "fd00::a33:0/112" + vrf_lite_ipv6_subnet_target_mask: + description: + - The VRF lite IPv6 subnet target mask (112-128). + type: int + default: 126 + auto_unique_vrf_lite_ip_prefix: + description: + - Enable auto unique VRF lite IP prefix. + type: bool + default: false + auto_symmetric_vrf_lite: + description: + - Enable auto symmetric VRF lite. + type: bool + default: false + auto_vrf_lite_default_vrf: + description: + - Enable auto VRF lite for the default VRF. + type: bool + default: false + auto_symmetric_default_vrf: + description: + - Enable auto symmetric default VRF. + type: bool + default: false + per_vrf_loopback_auto_provision: + description: + - Enable per-VRF loopback auto-provisioning. + type: bool + default: false + per_vrf_loopback_ip_range: + description: + - The per-VRF loopback IP address pool. + type: str + default: "10.5.0.0/22" + per_vrf_loopback_auto_provision_ipv6: + description: + - Enable per-VRF loopback auto-provisioning for IPv6. + type: bool + default: false + per_vrf_loopback_ipv6_range: + description: + - The per-VRF loopback IPv6 address pool. + type: str + default: "fd00::a05:0/112" + underlay_ipv6: + description: + - Enable IPv6 underlay. + type: bool + default: false + ipv6_multicast_group_subnet: + description: + - The IPv6 multicast group subnet. + type: str + default: "ff1e::/121" + tenant_routed_multicast_ipv6: + description: + - Enable tenant routed multicast for IPv6. + type: bool + default: false + ipv6_link_local: + description: + - Enable IPv6 link-local addressing. + type: bool + default: true + ipv6_subnet_target_mask: + description: + - The IPv6 subnet target mask. + type: int + default: 126 + ipv6_subnet_range: + description: + - The IPv6 subnet range. + type: str + default: "fd00::a04:0/112" + bgp_loopback_ipv6_range: + description: + - The BGP loopback IPv6 address pool. + type: str + default: "fd00::a02:0/119" + nve_loopback_ipv6_range: + description: + - The NVE loopback IPv6 address pool. + type: str + default: "fd00::a03:0/118" + ipv6_anycast_rendezvous_point_ip_range: + description: + - The IPv6 anycast rendezvous point IP address pool. + type: str + default: "fd00::254:254:0/118" + auto_bgp_neighbor_description: + description: + - Enable automatic BGP neighbor description. + type: bool + default: true + ibgp_peer_template: + description: + - The iBGP peer template name. + type: str + default: "" + leaf_ibgp_peer_template: + description: + - The leaf iBGP peer template name. + type: str + default: "" + link_state_routing_tag: + description: + - The link state routing tag. + type: str + default: UNDERLAY + static_underlay_ip_allocation: + description: + - Enable static underlay IP allocation. + type: bool + default: false + security_group_tag: + description: + - Enable Security Group Tag (SGT) support. + type: bool + default: false + security_group_tag_prefix: + description: + - The SGT prefix. + type: str + default: SG_ + security_group_tag_mac_segmentation: + description: + - Enable SGT MAC segmentation. + type: bool + default: false + security_group_tag_id_range: + description: + - The SGT ID range. + type: str + default: "10000-14000" + security_group_tag_preprovision: + description: + - Enable SGT pre-provisioning. + type: bool + default: false + security_group_status: + description: + - The security group status. + type: str + default: enabled + default_queuing_policy: + description: + - Enable default queuing policy. + type: bool + default: false + aiml_qos: + description: + - Enable AI/ML QoS. + type: bool + default: false + aiml_qos_policy: + description: + - The AI/ML QoS policy. + type: str + default: 400G + dlb: + description: + - Enable dynamic load balancing. + type: bool + default: false + dlb_mode: + description: + - The DLB mode. + type: str + default: flowlet + ptp: + description: + - Enable Precision Time Protocol (PTP). + type: bool + default: false + ptp_loopback_id: + description: + - The PTP loopback ID. + type: int + default: 0 + ptp_domain_id: + description: + - The PTP domain ID. + type: int + default: 0 + stp_root_option: + description: + - The STP root option. + type: str + default: mst + stp_vlan_range: + description: + - The STP VLAN range. + type: str + default: "" + mst_instance_range: + description: + - The MST instance range. + type: str + default: "0-3,5,7-9" + stp_bridge_priority: + description: + - The STP bridge priority. + type: int + default: 0 + mpls_handoff: + description: + - Enable MPLS handoff. + type: bool + default: false + mpls_loopback_identifier: + description: + - The MPLS loopback identifier. + type: int + default: 101 + mpls_loopback_ip_range: + description: + - The MPLS loopback IP address pool. + type: str + default: "10.101.0.0/25" + private_vlan: + description: + - Enable private VLAN support. + type: bool + default: false + ip_service_level_agreement_id_range: + description: + - The IP SLA ID range. + type: str + default: "10000-19999" + object_tracking_number_range: + description: + - The object tracking number range. + type: str + default: "100-299" + route_map_sequence_number_range: + description: + - The route map sequence number range. + type: str + default: "1-65534" + day0_bootstrap: + description: + - Enable day-0 bootstrap (POAP). + type: bool + default: false + local_dhcp_server: + description: + - Enable local DHCP server for bootstrap. + type: bool + default: false + dhcp_protocol_version: + description: + - The DHCP protocol version for bootstrap. + type: str + default: dhcpv4 + dhcp_start_address: + description: + - The DHCP start address for bootstrap. + type: str + default: "" + dhcp_end_address: + description: + - The DHCP end address for bootstrap. + type: str + default: "" + management_gateway: + description: + - The management gateway for bootstrap. + type: str + default: "" + management_ipv4_prefix: + description: + - The management IPv4 prefix length for bootstrap. + type: int + default: 24 + management_ipv6_prefix: + description: + - The management IPv6 prefix length for bootstrap. + type: int + default: 64 + real_time_backup: + description: + - Enable real-time backup. + type: bool + default: false + scheduled_backup: + description: + - Enable scheduled backup. + type: bool + default: false + scheduled_backup_time: + description: + - The scheduled backup time. + type: str + default: "" + nve_hold_down_timer: + description: + - The NVE hold-down timer in seconds. + type: int + default: 180 + next_generation_oam: + description: + - Enable next-generation OAM. + type: bool + default: true + strict_config_compliance_mode: + description: + - Enable strict configuration compliance mode. + type: bool + default: false + copp_policy: + description: + - The CoPP policy. + type: str + default: dense + power_redundancy_mode: + description: + - The power redundancy mode. + type: str + default: redundant + host_interface_admin_state: + description: + - Enable host interface admin state. + type: bool + default: true + heartbeat_interval: + description: + - The heartbeat interval. + type: int + default: 190 + policy_based_routing: + description: + - Enable policy-based routing. + type: bool + default: false + brownfield_network_name_format: + description: + - The brownfield network name format. + type: str + default: "Auto_Net_VNI$$VNI$$_VLAN$$VLAN_ID$$" + brownfield_skip_overlay_network_attachments: + description: + - Skip brownfield overlay network attachments. + type: bool + default: false + allow_smart_switch_onboarding: + description: + - Allow smart switch onboarding. + type: bool + default: false + aaa: + description: + - Enable AAA. + type: bool + default: false + extra_config_leaf: + description: + - Extra freeform configuration applied to leaf switches. + type: str + default: "" + extra_config_spine: + description: + - Extra freeform configuration applied to spine switches. + type: str + default: "" + extra_config_tor: + description: + - Extra freeform configuration applied to TOR switches. + type: str + default: "" + extra_config_intra_fabric_links: + description: + - Extra freeform configuration applied to intra-fabric links. + type: str + default: "" + extra_config_aaa: + description: + - Extra freeform AAA configuration. + type: str + default: "" + banner: + description: + - The fabric banner text displayed on switch login. + type: str + default: "" + ntp_server_collection: + description: + - The list of NTP server IP addresses. + type: list + elements: str + dns_collection: + description: + - The list of DNS server IP addresses. + type: list + elements: str + syslog_server_collection: + description: + - The list of syslog server IP addresses. + type: list + elements: str + syslog_server_vrf_collection: + description: + - The list of VRFs for syslog servers. + type: list + elements: str + syslog_severity_collection: + description: + - The list of syslog severity levels (0-7). + type: list + elements: int + state: + description: + - The desired state of the fabric resources on the Cisco Nexus Dashboard. + - Use O(state=merged) to create new fabrics and update existing ones as defined in the configuration. + Resources on ND that are not specified in the configuration will be left unchanged. + - Use O(state=replaced) to replace the fabric configuration specified in the configuration. + Any settings not explicitly provided will revert to their defaults. + - Use O(state=overridden) to enforce the configuration as the single source of truth. + Any fabric existing on ND but not present in the configuration will be deleted. Use with extra caution. + - Use O(state=deleted) to remove the fabrics specified in the configuration from the Cisco Nexus Dashboard. + type: str + default: merged + choices: [ merged, replaced, overridden, deleted ] +extends_documentation_fragment: +- cisco.nd.modules +- cisco.nd.check_mode +notes: +- This module is only supported on Nexus Dashboard having version 4.1.0 or higher. +- Only iBGP VXLAN fabric type (C(vxlanIbgp)) is supported by this module. +- When using O(state=replaced) with only required fields, all optional management settings revert to their defaults. +- The O(config.management.bgp_asn) field is required when creating a fabric. +- O(config.management.site_id) defaults to the value of O(config.management.bgp_asn) if not provided. +""" + +EXAMPLES = r""" +- name: Create an iBGP VXLAN fabric using state merged + cisco.nd.nd_manage_fabric_ibgp: + state: merged + config: + - name: my_fabric + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65001" + site_id: "65001" + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00aa" + performance_monitoring: false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: loopback + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: true + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: enable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.2.0.0/22" + nve_loopback_ip_range: "10.3.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.254.0/24" + intra_fabric_subnet_range: "10.4.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.33.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.5.0.0/22" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + register: result + +- name: Update specific fields on an existing fabric using state merged (partial update) + cisco.nd.nd_manage_fabric_ibgp: + state: merged + config: + - name: my_fabric + category: fabric + management: + bgp_asn: "65002" + site_id: "65002" + anycast_gateway_mac: "2020.0000.00bb" + performance_monitoring: true + register: result + +- name: Create or fully replace an iBGP VXLAN fabric using state replaced + cisco.nd.nd_manage_fabric_ibgp: + state: replaced + config: + - name: my_fabric + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65004" + site_id: "65004" + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00dd" + performance_monitoring: true + replication_mode: multicast + multicast_group_subnet: "239.1.3.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 3 + rendezvous_point_loopback_id: 253 + vpc_peer_link_vlan: "3700" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: loopback + vpc_auto_recovery_timer: 300 + vpc_delay_restore_timer: 120 + vpc_peer_link_port_channel_id: "600" + vpc_ipv6_neighbor_discovery_sync: false + advertise_physical_ip: true + vpc_domain_id_range: "1-800" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9000 + l2_host_interface_mtu: 9000 + tenant_dhcp: false + nxapi: false + nxapi_https_port: 443 + nxapi_http: true + nxapi_http_port: 80 + snmp_trap: false + anycast_border_gateway_advertise_physical_ip: true + greenfield_debug_flag: disable + tcam_allocation: false + real_time_interface_statistics_collection: true + interface_statistics_load_interval: 30 + bgp_loopback_ip_range: "10.22.0.0/22" + nve_loopback_ip_range: "10.23.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.252.0/24" + intra_fabric_subnet_range: "10.24.0.0/16" + l2_vni_range: "40000-59000" + l3_vni_range: "60000-69000" + network_vlan_range: "2400-3099" + vrf_vlan_range: "2100-2399" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.53.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.25.0.0/22" + per_vrf_loopback_auto_provision_ipv6: true + per_vrf_loopback_ipv6_range: "fd00::a25:0/112" + banner: "^ Managed by Ansible ^" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + management_ipv6_prefix: 64 + register: result + +- name: Replace fabric with only required fields (all optional settings revert to defaults) + cisco.nd.nd_manage_fabric_ibgp: + state: replaced + config: + - name: my_fabric + category: fabric + management: + type: vxlanIbgp + bgp_asn: "65004" + site_id: "65004" + banner: "^ Managed by Ansible ^" + register: result + +- name: Enforce exact fabric inventory using state overridden (deletes unlisted fabrics) + cisco.nd.nd_manage_fabric_ibgp: + state: overridden + config: + - name: fabric_east + category: fabric + location: + latitude: 40.7128 + longitude: -74.0060 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65010" + site_id: "65010" + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.0010" + replication_mode: multicast + multicast_group_subnet: "239.1.10.0/25" + bgp_loopback_ip_range: "10.10.0.0/22" + nve_loopback_ip_range: "10.11.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.10.0/24" + intra_fabric_subnet_range: "10.12.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + - name: fabric_west + category: fabric + location: + latitude: 34.0522 + longitude: -118.2437 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65020" + site_id: "65020" + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.0020" + replication_mode: multicast + multicast_group_subnet: "239.1.20.0/25" + bgp_loopback_ip_range: "10.20.0.0/22" + nve_loopback_ip_range: "10.21.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.20.0/24" + intra_fabric_subnet_range: "10.22.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + register: result + +- name: Delete a specific fabric using state deleted + cisco.nd.nd_manage_fabric_ibgp: + state: deleted + config: + - name: my_fabric + register: result + +- name: Delete multiple fabrics in a single task + cisco.nd.nd_manage_fabric_ibgp: + state: deleted + config: + - name: fabric_east + - name: fabric_west + - name: fabric_old + register: result +""" + +RETURN = r""" +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec +from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric import ManageFabricOrchestrator + + +def main(): + argument_spec = nd_argument_spec() + argument_spec.update(FabricModel.get_argument_spec()) + + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + ) + + try: + # Initialize StateMachine + nd_state_machine = NDStateMachine( + module=module, + model_orchestrator=ManageFabricOrchestrator, + ) + + # Manage state + # TODO: return module output class object: + # output = nd_state_machine.manage_state() + # module.exit_json(**output) + nd_state_machine.manage_state() + + module.exit_json(**nd_state_machine.output.format()) + + except Exception as e: + module.fail_json(msg=f"Module execution failed: {str(e)}") + +if __name__ == "__main__": + main() From c6dfe8d66834ba6fefc32b07a3893986c502944d Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:18:35 -0500 Subject: [PATCH 061/131] [ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues. --- plugins/module_utils/nd_state_machine.py | 1 + plugins/module_utils/orchestrators/base.py | 5 +++++ .../module_utils/orchestrators/local_user.py | 18 ++++++++++++++++++ 3 files changed, 24 insertions(+) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index efed3517..40434159 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -84,6 +84,7 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier + response = {} identifier = proposed_item.get_identifier_value() try: # Determine diff status diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 1f4e3e69..d14dc322 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,8 +12,13 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType +======= +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +>>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) class NDBaseOrchestrator(BaseModel): diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 689ba9dc..9ee6a10f 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,6 +12,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( @@ -19,17 +20,34 @@ EpInfraAaaLocalUsersPut, EpInfraAaaLocalUsersDelete, EpInfraAaaLocalUsersGet, +======= +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( + V1InfraAaaLocalUsersPost, + V1InfraAaaLocalUsersPut, + V1InfraAaaLocalUsersDelete, + V1InfraAaaLocalUsersGet, +>>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel +<<<<<<< HEAD create_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPost update_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPut delete_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersDelete query_one_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet query_all_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet +======= + create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete + query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet +>>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) def query_all(self) -> ResponseType: """ From 3a2d1c1f89758aaf8bfbd02a9edf281fbed767c3 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 17:19:47 -0500 Subject: [PATCH 062/131] [ignore] Rename NDBaseSmartEndpoint to NDBaseEndpoint. Fix importation issues. --- plugins/module_utils/endpoints/base.py | 2 +- plugins/module_utils/orchestrators/base.py | 7 +------ .../module_utils/orchestrators/local_user.py | 20 +------------------ plugins/modules/nd_local_user.py | 3 +++ 4 files changed, 6 insertions(+), 26 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index e122b1f7..25b8a5a9 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -137,4 +137,4 @@ def verb(self) -> HttpVerbEnum: # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration def set_identifiers(self, identifier: IdentifierKey = None): - pass + pass \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index d14dc322..9f139e81 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,13 +12,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -======= -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType ->>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) class NDBaseOrchestrator(BaseModel): @@ -79,4 +74,4 @@ def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> R result = self.sender.query_obj(api_endpoint.path) return result or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 9ee6a10f..de6048fc 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,7 +12,6 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( @@ -20,34 +19,17 @@ EpInfraAaaLocalUsersPut, EpInfraAaaLocalUsersDelete, EpInfraAaaLocalUsersGet, -======= -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( - V1InfraAaaLocalUsersPost, - V1InfraAaaLocalUsersPut, - V1InfraAaaLocalUsersDelete, - V1InfraAaaLocalUsersGet, ->>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel -<<<<<<< HEAD create_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPost update_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPut delete_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersDelete query_one_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet query_all_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet -======= - create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet ->>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) def query_all(self) -> ResponseType: """ @@ -58,4 +40,4 @@ def query_all(self) -> ResponseType: result = self.sender.query_obj(api_endpoint.path) return result.get("localusers", []) or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index f5efea03..56e59ad5 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -199,6 +199,9 @@ def main(): ) # Manage state + # TODO: return module output class object: + # output = nd_state_machine.manage_state() + # module.exit_json(**output) nd_state_machine.manage_state() module.exit_json(**nd_state_machine.output.format()) From efdf4a2f13d757066f6298808b1d274027cfa3e6 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 4 Mar 2026 11:12:27 -0500 Subject: [PATCH 063/131] [ignore] Replace all pydantic imports with pydantic_compat. Fix sanity issues. --- plugins/module_utils/orchestrators/base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 9f139e81..90518370 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,7 +8,11 @@ __metaclass__ = type +<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict +======= +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict +>>>>>>> 6e81ee0 ([ignore] Replace all pydantic imports with pydantic_compat. Fix sanity issues.) from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule From 3f6ed5f77a4c872461156c556da860449cc64d17 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 6 Mar 2026 13:35:38 -0500 Subject: [PATCH 064/131] [ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly --- plugins/module_utils/nd_state_machine.py | 9 ++++++++- plugins/module_utils/orchestrators/base.py | 4 ---- tests/integration/inventory.networking | 10 +++++----- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 40434159..6d975a30 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,14 +8,22 @@ __metaclass__ = type +<<<<<<< HEAD from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ValidationError +======= +from typing import List, Dict, Any, Type +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError +>>>>>>> 520625b ([ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly) from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError +======= +>>>>>>> 520625b ([ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly) class NDStateMachine: @@ -84,7 +92,6 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier - response = {} identifier = proposed_item.get_identifier_value() try: # Determine diff status diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 90518370..9f139e81 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,11 +8,7 @@ __metaclass__ = type -<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict -======= -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict ->>>>>>> 6e81ee0 ([ignore] Replace all pydantic imports with pydantic_compat. Fix sanity issues.) from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule diff --git a/tests/integration/inventory.networking b/tests/integration/inventory.networking index 6b37d8f3..2aa818d7 100644 --- a/tests/integration/inventory.networking +++ b/tests/integration/inventory.networking @@ -1,15 +1,15 @@ [nd] -nd ansible_host= +nd-test ansible_host=10.48.161.120 [nd:vars] ansible_connection=ansible.netcommon.httpapi -ansible_python_interpreter=/usr/bin/python3.9 +ansible_python_interpreter=/usr/bin/python3.12 ansible_network_os=cisco.nd.nd ansible_httpapi_validate_certs=False ansible_httpapi_use_ssl=True ansible_httpapi_use_proxy=True -ansible_user=ansible_github_ci -ansible_password= +ansible_user=admin +ansible_password=C1sco123 insights_group= site_name= site_host= @@ -28,4 +28,4 @@ external_management_service_ip= external_data_service_ip= data_ip= data_gateway= -service_package_host=173.36.219.254 +service_package_host=173.36.219.254 From e56d0f2964b57768b1fe1eb682530b3eb0cbbc4a Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 10 Mar 2026 13:36:50 -0400 Subject: [PATCH 065/131] [ignore] Update NDOutput class. Remove all fail_json dependencies in NDStateMachineand add custom Exception for it in common/exceptions dir. Set json mode for to_diff_dict method in NDBaseModel. --- plugins/module_utils/nd_state_machine.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 6d975a30..955cf41d 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,22 +8,14 @@ __metaclass__ = type -<<<<<<< HEAD from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ValidationError -======= -from typing import List, Dict, Any, Type -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError ->>>>>>> 520625b ([ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly) from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator -<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError -======= ->>>>>>> 520625b ([ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly) class NDStateMachine: @@ -181,4 +173,4 @@ def _manage_delete_state(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" if not self.module.params.get("ignore_errors", False): - raise NDStateMachineError(error_msg) + raise NDStateMachineError(error_msg) \ No newline at end of file From 3d2ccbe322189b89e690abd9eb212d2618176d9e Mon Sep 17 00:00:00 2001 From: mwiebe Date: Thu, 12 Mar 2026 18:08:02 -0400 Subject: [PATCH 066/131] Fix fabric module following rebase --- plugins/module_utils/endpoints/base_path.py | 29 --- .../endpoints/base_paths_manage.py | 134 ----------- .../module_utils/endpoints/endpoint_mixins.py | 88 ------- .../endpoints/v1/infra_aaa_local_users.py | 179 -------------- .../endpoints/v1/manage/manage_fabrics.py | 4 +- .../nd_manage_fabric/manage_fabric_ibgp.py | 2 +- plugins/module_utils/pydantic_compat.py | 222 ------------------ 7 files changed, 3 insertions(+), 655 deletions(-) delete mode 100644 plugins/module_utils/endpoints/base_path.py delete mode 100644 plugins/module_utils/endpoints/base_paths_manage.py delete mode 100644 plugins/module_utils/endpoints/endpoint_mixins.py delete mode 100644 plugins/module_utils/endpoints/v1/infra_aaa_local_users.py delete mode 100644 plugins/module_utils/pydantic_compat.py diff --git a/plugins/module_utils/endpoints/base_path.py b/plugins/module_utils/endpoints/base_path.py deleted file mode 100644 index b1ddb5eb..00000000 --- a/plugins/module_utils/endpoints/base_path.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@arobel) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) -""" -Centralized base paths for ND API endpoints. - -This module provides a single location to manage all API Infra base paths, -allowing easy modification when API paths change. All endpoint classes -should use these path builders for consistency. -""" - -from __future__ import absolute_import, annotations, division, print_function - -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - -from typing import Final - -# Root API paths -ND_ANALYZE_API: Final = "/api/v1/analyze" -ND_INFRA_API: Final = "/api/v1/infra" -ND_MANAGE_API: Final = "/api/v1/manage" -ND_ONEMANAGE_API: Final = "/api/v1/onemanage" -ND_MSO_API: Final = "/mso" -NDFC_API: Final = "/appcenter/cisco/ndfc/api" -LOGIN: Final = "/login" diff --git a/plugins/module_utils/endpoints/base_paths_manage.py b/plugins/module_utils/endpoints/base_paths_manage.py deleted file mode 100644 index 069cd7ec..00000000 --- a/plugins/module_utils/endpoints/base_paths_manage.py +++ /dev/null @@ -1,134 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@arobel) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) -""" -Centralized base paths for ND Manage API endpoints. - -/api/v1/manage - -This module provides a single location to manage all API Manage base paths, -allowing easy modification when API paths change. All endpoint classes -should use these path builders for consistency. -""" - -from __future__ import absolute_import, annotations, division, print_function - -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - -from typing import Final - -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base_path import ND_MANAGE_API - - -class BasePath: - """ - # Summary - - API Endpoints for ND Manage - - ## Description - - Provides centralized endpoint definitions for all ND Manage API endpoints. - This allows API path changes to be managed in a single location. - - ## Usage - - ```python - from ansible_collections.cisco.nd.plugins.module_utils.ep.base_paths_manage import BasePath - - # Get a complete base path for ND Manage - path = BasePath.nd_manage("inventory", "switches") - # Returns: /api/v1/manage/inventory/switches - - # Leverage a convenience method - path = BasePath.nd_manage_inventory("switches") - # Returns: /api/v1/manage/inventory/switches - ``` - - ## Design Notes - - - All base paths are defined as class constants for easy modification - - Helper methods compose paths from base constants - - Use these methods in Pydantic endpoint models to ensure consistency - - If ND Manage changes base API paths, only this class needs updating - """ - - API: Final = ND_MANAGE_API - - @classmethod - def nd_manage(cls, *segments: str) -> str: - """ - # Summary - - Build ND manage API path. - - ## Parameters - - - segments: Path segments to append after /api/v1/manage - - ## Returns - - - Complete ND manage API path - - ## Example - - ```python - path = BasePath.nd_manage("inventory", "switches") - # Returns: /api/v1/manage/inventory/switches - ``` - """ - if not segments: - return cls.API - return f"{cls.API}/{'/'.join(segments)}" - - @classmethod - def nd_manage_inventory(cls, *segments: str) -> str: - """ - # Summary - - Build ND manage inventory API path. - - ## Parameters - - - segments: Path segments to append after inventory (e.g., "switches") - - ## Returns - - - Complete ND manage inventory path - - ## Example - - ```python - path = BasePath.nd_manage_inventory("switches") - # Returns: /api/v1/manage/inventory/switches - ``` - """ - return cls.nd_manage("inventory", *segments) - - @classmethod - def nd_manage_fabrics(cls, *segments: str) -> str: - """ - # Summary - - Build ND manage fabrics API path. - - ## Parameters - - - segments: Path segments to append after fabrics (e.g., "my-fabric") - - ## Returns - - - Complete ND manage fabrics path - - ## Example - - ```python - path = BasePath.nd_manage_fabrics() - # Returns: /api/v1/manage/fabrics - ``` - """ - return cls.nd_manage("fabrics", *segments) diff --git a/plugins/module_utils/endpoints/endpoint_mixins.py b/plugins/module_utils/endpoints/endpoint_mixins.py deleted file mode 100644 index e472e92f..00000000 --- a/plugins/module_utils/endpoints/endpoint_mixins.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@arobel) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) -""" -Reusable mixin classes for endpoint models. - -This module provides mixin classes that can be composed to add common -fields to endpoint models without duplication. -""" - -from __future__ import absolute_import, annotations, division, print_function - -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - -from typing import Optional - -from ansible_collections.cisco.nd.plugins.module_utils.enums import BooleanStringEnum -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, Field - - -class ClusterNameMixin(BaseModel): - """Mixin for endpoints that require cluster_name parameter.""" - - cluster_name: Optional[str] = Field(default=None, min_length=1, description="Cluster name") - - -class FabricNameMixin(BaseModel): - """Mixin for endpoints that require fabric_name parameter.""" - - fabric_name: Optional[str] = Field(default=None, min_length=1, max_length=64, description="Fabric name") - - -class ForceShowRunMixin(BaseModel): - """Mixin for endpoints that require force_show_run parameter.""" - - force_show_run: BooleanStringEnum = Field(default=BooleanStringEnum.FALSE, description="Force show running config") - - -class HealthCategoryMixin(BaseModel): - """Mixin for endpoints that require health_category parameter.""" - - health_category: Optional[str] = Field(default=None, min_length=1, description="Health category") - - -class InclAllMsdSwitchesMixin(BaseModel): - """Mixin for endpoints that require incl_all_msd_switches parameter.""" - - incl_all_msd_switches: BooleanStringEnum = Field(default=BooleanStringEnum.FALSE, description="Include all MSD switches") - - -class LinkUuidMixin(BaseModel): - """Mixin for endpoints that require link_uuid parameter.""" - - link_uuid: Optional[str] = Field(default=None, min_length=1, description="Link UUID") - - -class LoginIdMixin(BaseModel): - """Mixin for endpoints that require login_id parameter.""" - - login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") - - -class NetworkNameMixin(BaseModel): - """Mixin for endpoints that require network_name parameter.""" - - network_name: Optional[str] = Field(default=None, min_length=1, max_length=64, description="Network name") - - -class NodeNameMixin(BaseModel): - """Mixin for endpoints that require node_name parameter.""" - - node_name: Optional[str] = Field(default=None, min_length=1, description="Node name") - - -class SwitchSerialNumberMixin(BaseModel): - """Mixin for endpoints that require switch_sn parameter.""" - - switch_sn: Optional[str] = Field(default=None, min_length=1, description="Switch serial number") - - -class VrfNameMixin(BaseModel): - """Mixin for endpoints that require vrf_name parameter.""" - - vrf_name: Optional[str] = Field(default=None, min_length=1, max_length=64, description="VRF name") diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py deleted file mode 100644 index d1013e24..00000000 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@allenrobel) -# Copyright: (c) 2026, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) -""" -ND Infra AAA LocalUsers endpoint models. - -This module contains endpoint definitions for LocalUsers-related operations -in the ND Infra AAA API. -""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -from typing import Literal, Final -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import Field -from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey - - -class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseEndpoint): - """ - Base class for ND Infra AAA Local Users endpoints. - - Provides common functionality for all HTTP methods on the - /api/v1/infra/aaa/localUsers endpoint. - """ - - # TODO: Remove it - base_path: Final = NDBasePath.nd_infra_aaa("localUsers") - - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path. - - ## Returns - - - Complete endpoint path string, optionally including login_id - """ - if self.login_id is not None: - return NDBasePath.nd_infra_aaa("localUsers", self.login_id) - return self.base_path - - def set_identifiers(self, identifier: IdentifierKey = None): - self.login_id = identifier - - -class V1InfraAaaLocalUsersGet(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users GET Endpoint - - ## Description - - Endpoint to retrieve local users from the ND Infra AAA service. - Optionally retrieve a specific local user by login_id. - - ## Path - - - /api/v1/infra/aaa/localUsers - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - GET - """ - - class_name: Literal["V1InfraAaaLocalUsersGet"] = Field( - default="V1InfraAaaLocalUsersGet", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.GET - - -class V1InfraAaaLocalUsersPost(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users POST Endpoint - - ## Description - - Endpoint to create a local user in the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers - - ## Verb - - - POST - """ - - class_name: Literal["V1InfraAaaLocalUsersPost"] = Field( - default="V1InfraAaaLocalUsersPost", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.POST - - -class V1InfraAaaLocalUsersPut(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users PUT Endpoint - - ## Description - - Endpoint to update a local user in the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - PUT - """ - - class_name: Literal["V1InfraAaaLocalUsersPut"] = Field( - default="V1InfraAaaLocalUsersPut", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.PUT - - -class V1InfraAaaLocalUsersDelete(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users DELETE Endpoint - - ## Description - - Endpoint to delete a local user from the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - DELETE - """ - - class_name: Literal["V1InfraAaaLocalUsersDelete"] = Field( - default="V1InfraAaaLocalUsersDelete", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.DELETE diff --git a/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py b/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py index 2eaffb64..c1cfc56f 100644 --- a/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py +++ b/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py @@ -40,10 +40,10 @@ # from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base_paths_manage import BasePath from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.manage.base_path import BasePath -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.endpoint_mixins import FabricNameMixin +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import FabricNameMixin from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.endpoints.query_params import EndpointQueryParams -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict, Field +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict, Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey # Common config for basic validation diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py index 27ed0b09..c4d93e27 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py @@ -17,7 +17,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ( +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ( BaseModel, ConfigDict, Field, diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py deleted file mode 100644 index fb43f1b5..00000000 --- a/plugins/module_utils/pydantic_compat.py +++ /dev/null @@ -1,222 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@arobel) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -# pylint: disable=too-few-public-methods -""" -Pydantic compatibility layer. - -This module provides a single location for Pydantic imports with fallback -implementations when Pydantic is not available. This ensures consistent -behavior across all modules and follows the DRY principle. -""" - -from __future__ import absolute_import, annotations, division, print_function - -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - -import traceback -from typing import TYPE_CHECKING, Any, Callable, Union - -if TYPE_CHECKING: - # Type checkers always see the real Pydantic types - from pydantic import ( - AfterValidator, - BaseModel, - BeforeValidator, - ConfigDict, - Field, - PydanticExperimentalWarning, - StrictBool, - SecretStr, - ValidationError, - field_serializer, - model_serializer, - field_validator, - model_validator, - validator, - computed_field, - ) -else: - # Runtime: try to import, with fallback - try: - from pydantic import ( - AfterValidator, - BaseModel, - BeforeValidator, - ConfigDict, - Field, - PydanticExperimentalWarning, - StrictBool, - SecretStr, - ValidationError, - field_serializer, - model_serializer, - field_validator, - model_validator, - validator, - computed_field, - ) - except ImportError: - HAS_PYDANTIC = False # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR: Union[str, None] = traceback.format_exc() # pylint: disable=invalid-name - - # Fallback: Minimal BaseModel replacement - class BaseModel: - """Fallback BaseModel when pydantic is not available.""" - - model_config = {"validate_assignment": False, "use_enum_values": False} - - def __init__(self, **kwargs): - """Accept keyword arguments and set them as attributes.""" - for key, value in kwargs.items(): - setattr(self, key, value) - - def model_dump(self, exclude_none: bool = False, exclude_defaults: bool = False) -> dict: # pylint: disable=unused-argument - """Return a dictionary of field names and values. - - Args: - exclude_none: If True, exclude fields with None values - exclude_defaults: Accepted for API compatibility but not implemented in fallback - """ - result = {} - for key, value in self.__dict__.items(): - if exclude_none and value is None: - continue - result[key] = value - return result - - # Fallback: ConfigDict that does nothing - def ConfigDict(**kwargs) -> dict: # pylint: disable=unused-argument,invalid-name - """Pydantic ConfigDict fallback when pydantic is not available.""" - return kwargs - - # Fallback: Field that does nothing - def Field(**kwargs) -> Any: # pylint: disable=unused-argument,invalid-name - """Pydantic Field fallback when pydantic is not available.""" - if "default_factory" in kwargs: - return kwargs["default_factory"]() - return kwargs.get("default") - - # Fallback: field_serializer decorator that does nothing - def field_serializer(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic field_serializer fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: model_serializer decorator that does nothing - def model_serializer(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic model_serializer fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: field_validator decorator that does nothing - def field_validator(*args, **kwargs) -> Callable[..., Any]: # pylint: disable=unused-argument,invalid-name - """Pydantic field_validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: computed_field decorator that does nothing - def computed_field(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic computed_field fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: AfterValidator that returns the function unchanged - def AfterValidator(func): # pylint: disable=invalid-name - """Pydantic AfterValidator fallback when pydantic is not available.""" - return func - - # Fallback: BeforeValidator that returns the function unchanged - def BeforeValidator(func): # pylint: disable=invalid-name - """Pydantic BeforeValidator fallback when pydantic is not available.""" - return func - - # Fallback: PydanticExperimentalWarning - PydanticExperimentalWarning = Warning - - # Fallback: StrictBool - StrictBool = bool - - # Fallback: SecretStr - SecretStr = str - - # Fallback: IPvAnyAddress and IPvAnyNetwork - IPvAnyAddress = str - IPvAnyNetwork = str - - # Fallback: ValidationError - class ValidationError(Exception): - """ - Pydantic ValidationError fallback when pydantic is not available. - """ - - def __init__(self, message="A custom error occurred."): - self.message = message - super().__init__(self.message) - - def __str__(self): - return f"ValidationError: {self.message}" - - # Fallback: model_validator decorator that does nothing - def model_validator(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic model_validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: validator decorator that does nothing - def validator(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - else: - HAS_PYDANTIC = True # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name - -# Set HAS_PYDANTIC for when TYPE_CHECKING is True -if TYPE_CHECKING: - HAS_PYDANTIC = True # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name - -__all__ = [ - "AfterValidator", - "BaseModel", - "BeforeValidator", - "ConfigDict", - "Field", - "HAS_PYDANTIC", - "PYDANTIC_IMPORT_ERROR", - "PydanticExperimentalWarning", - "StrictBool", - "SecretStr", - "ValidationError", - "field_serializer", - "model_serializer", - "field_validator", - "model_validator", - "validator", - "computed_field", -] From 7be317711b318c17cb6ff5fc602cad23368d11b3 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Thu, 12 Mar 2026 18:15:00 -0400 Subject: [PATCH 067/131] Normalize changes to upstream fork --- plugins/module_utils/common/exceptions.py | 2 +- plugins/module_utils/endpoints/base.py | 2 +- plugins/module_utils/endpoints/mixins.py | 2 +- plugins/modules/nd_local_user.py | 3 --- tests/integration/inventory.networking | 10 +++++----- 5 files changed, 8 insertions(+), 11 deletions(-) diff --git a/plugins/module_utils/common/exceptions.py b/plugins/module_utils/common/exceptions.py index f0ae4400..3730e5d5 100644 --- a/plugins/module_utils/common/exceptions.py +++ b/plugins/module_utils/common/exceptions.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index 25b8a5a9..e122b1f7 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -137,4 +137,4 @@ def verb(self) -> HttpVerbEnum: # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration def set_identifiers(self, identifier: IdentifierKey = None): - pass \ No newline at end of file + pass diff --git a/plugins/module_utils/endpoints/mixins.py b/plugins/module_utils/endpoints/mixins.py index ada6b6fe..22d9a2dc 100644 --- a/plugins/module_utils/endpoints/mixins.py +++ b/plugins/module_utils/endpoints/mixins.py @@ -84,4 +84,4 @@ class SwitchSerialNumberMixin(BaseModel): class VrfNameMixin(BaseModel): """Mixin for endpoints that require vrf_name parameter.""" - vrf_name: Optional[str] = Field(default=None, min_length=1, max_length=64, description="VRF name") \ No newline at end of file + vrf_name: Optional[str] = Field(default=None, min_length=1, max_length=64, description="VRF name") diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 56e59ad5..f5efea03 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -199,9 +199,6 @@ def main(): ) # Manage state - # TODO: return module output class object: - # output = nd_state_machine.manage_state() - # module.exit_json(**output) nd_state_machine.manage_state() module.exit_json(**nd_state_machine.output.format()) diff --git a/tests/integration/inventory.networking b/tests/integration/inventory.networking index 2aa818d7..6b37d8f3 100644 --- a/tests/integration/inventory.networking +++ b/tests/integration/inventory.networking @@ -1,15 +1,15 @@ [nd] -nd-test ansible_host=10.48.161.120 +nd ansible_host= [nd:vars] ansible_connection=ansible.netcommon.httpapi -ansible_python_interpreter=/usr/bin/python3.12 +ansible_python_interpreter=/usr/bin/python3.9 ansible_network_os=cisco.nd.nd ansible_httpapi_validate_certs=False ansible_httpapi_use_ssl=True ansible_httpapi_use_proxy=True -ansible_user=admin -ansible_password=C1sco123 +ansible_user=ansible_github_ci +ansible_password= insights_group= site_name= site_host= @@ -28,4 +28,4 @@ external_management_service_ip= external_data_service_ip= data_ip= data_gateway= -service_package_host=173.36.219.254 +service_package_host=173.36.219.254 From efb5a1755ccf6159432d4586e6d5bb26a4c692b1 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Thu, 12 Mar 2026 18:35:10 -0400 Subject: [PATCH 068/131] Change endpoint names to adhere to convention --- .../endpoints/v1/manage/manage_fabrics.py | 17 ++++++-------- .../orchestrators/manage_fabric.py | 22 +++++++++---------- plugins/modules/nd_manage_fabric_ibgp.py | 7 ++---- 3 files changed, 20 insertions(+), 26 deletions(-) diff --git a/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py b/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py index c1cfc56f..45bf5e43 100644 --- a/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py +++ b/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py @@ -36,10 +36,7 @@ from typing import Literal, Optional, Final from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum - -# from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base_paths_manage import BasePath from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.manage.base_path import BasePath - from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import FabricNameMixin from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.endpoints.query_params import EndpointQueryParams @@ -77,7 +74,7 @@ class FabricsEndpointParams(EndpointQueryParams): ) -class _EpApiV1ManageFabricsBase(FabricNameMixin, BaseModel): +class _EpManageFabricsBase(FabricNameMixin, BaseModel): """ Base class for ND Manage Fabrics endpoints. @@ -92,7 +89,7 @@ class _EpApiV1ManageFabricsBase(FabricNameMixin, BaseModel): def set_identifiers(self, identifier: IdentifierKey = None): self.fabric_name = identifier -class EpApiV1ManageFabricsGet(_EpApiV1ManageFabricsBase): +class EpManageFabricsGet(_EpManageFabricsBase): """ # Summary @@ -235,7 +232,7 @@ class FabricsListEndpointParams(EndpointQueryParams): ) -class EpApiV1ManageFabricsListGet(_EpApiV1ManageFabricsBase): +class EpManageFabricsListGet(_EpManageFabricsBase): """ # Summary @@ -315,7 +312,7 @@ def verb(self) -> HttpVerbEnum: return HttpVerbEnum.GET -class EpApiV1ManageFabricsPost(BaseModel): +class EpManageFabricsPost(BaseModel): """ # Summary @@ -403,7 +400,7 @@ def verb(self) -> HttpVerbEnum: return HttpVerbEnum.POST -class EpApiV1ManageFabricsPut(_EpApiV1ManageFabricsBase): +class EpManageFabricsPut(_EpManageFabricsBase): """ # Summary @@ -487,7 +484,7 @@ def verb(self) -> HttpVerbEnum: return HttpVerbEnum.PUT -class EpApiV1ManageFabricsDelete(_EpApiV1ManageFabricsBase): +class EpManageFabricsDelete(_EpManageFabricsBase): """ # Summary @@ -561,7 +558,7 @@ def verb(self) -> HttpVerbEnum: return HttpVerbEnum.DELETE -class EpApiV1ManageFabricsSummaryGet(_EpApiV1ManageFabricsBase): +class EpManageFabricsSummaryGet(_EpManageFabricsBase): """ # Summary diff --git a/plugins/module_utils/orchestrators/manage_fabric.py b/plugins/module_utils/orchestrators/manage_fabric.py index f39f7690..3681c0ee 100644 --- a/plugins/module_utils/orchestrators/manage_fabric.py +++ b/plugins/module_utils/orchestrators/manage_fabric.py @@ -15,26 +15,26 @@ from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.manage.manage_fabrics import ( - EpApiV1ManageFabricsGet, - EpApiV1ManageFabricsListGet, - EpApiV1ManageFabricsPost, - EpApiV1ManageFabricsPut, - EpApiV1ManageFabricsDelete, + EpManageFabricsGet, + EpManageFabricsListGet, + EpManageFabricsPost, + EpManageFabricsPut, + EpManageFabricsDelete, ) class ManageFabricOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = FabricModel - create_endpoint: Type[NDEndpointBaseModel] = EpApiV1ManageFabricsPost - update_endpoint: Type[NDEndpointBaseModel] = EpApiV1ManageFabricsPut - delete_endpoint: Type[NDEndpointBaseModel] = EpApiV1ManageFabricsDelete - query_one_endpoint: Type[NDEndpointBaseModel] = EpApiV1ManageFabricsGet - query_all_endpoint: Type[NDEndpointBaseModel] = EpApiV1ManageFabricsListGet + create_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPost + update_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPut + delete_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsDelete + query_one_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsGet + query_all_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsListGet def query_all(self) -> ResponseType: """ - Custom query_all action to extract 'localusers' from response. + Custom query_all action to extract 'fabrics' from response. """ try: result = self.sender.query_obj(self.query_all_endpoint.base_path) diff --git a/plugins/modules/nd_manage_fabric_ibgp.py b/plugins/modules/nd_manage_fabric_ibgp.py index a3bf42e6..15d8460a 100644 --- a/plugins/modules/nd_manage_fabric_ibgp.py +++ b/plugins/modules/nd_manage_fabric_ibgp.py @@ -15,9 +15,9 @@ --- module: nd_manage_fabric_ibgp version_added: "1.4.0" -short_description: Manage iBGP VXLAN fabrics on Cisco Nexus Dashboard Fabric Controller +short_description: Manage iBGP VXLAN fabrics on Cisco Nexus Dashboard description: -- Manage iBGP VXLAN fabrics on Cisco Nexus Dashboard Fabric Controller (NDFC). +- Manage iBGP VXLAN fabrics on Cisco Nexus Dashboard (ND). - It supports creating, updating, replacing, and deleting iBGP VXLAN fabrics. author: - Mike Wiebe (@mwiebe) @@ -1379,9 +1379,6 @@ def main(): ) # Manage state - # TODO: return module output class object: - # output = nd_state_machine.manage_state() - # module.exit_json(**output) nd_state_machine.manage_state() module.exit_json(**nd_state_machine.output.format()) From 87c481f1012cf5d88b9f7e5a2f0b535df40a9f27 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Thu, 12 Mar 2026 19:39:41 -0400 Subject: [PATCH 069/131] Modify orchestrator to use path instead of base_path --- plugins/module_utils/orchestrators/manage_fabric.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/module_utils/orchestrators/manage_fabric.py b/plugins/module_utils/orchestrators/manage_fabric.py index 3681c0ee..5a57cb0c 100644 --- a/plugins/module_utils/orchestrators/manage_fabric.py +++ b/plugins/module_utils/orchestrators/manage_fabric.py @@ -37,7 +37,8 @@ def query_all(self) -> ResponseType: Custom query_all action to extract 'fabrics' from response. """ try: - result = self.sender.query_obj(self.query_all_endpoint.base_path) + api_endpoint = self.query_all_endpoint() + result = self.sender.query_obj(api_endpoint.path) return result.get("fabrics", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e From c7f5ea3e3d740eab14ce3de9298631e160d26f72 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Fri, 13 Mar 2026 10:41:04 -0400 Subject: [PATCH 070/131] Add initial integration tests --- .../nd_manage_fabric/tasks/fabric_ibgp.yaml | 1172 +++++++++++++++++ .../targets/nd_manage_fabric/tasks/main.yaml | 3 + .../targets/nd_manage_fabric/vars/main.yaml | 81 ++ 3 files changed, 1256 insertions(+) create mode 100644 tests/integration/targets/nd_manage_fabric/tasks/fabric_ibgp.yaml create mode 100644 tests/integration/targets/nd_manage_fabric/tasks/main.yaml create mode 100644 tests/integration/targets/nd_manage_fabric/vars/main.yaml diff --git a/tests/integration/targets/nd_manage_fabric/tasks/fabric_ibgp.yaml b/tests/integration/targets/nd_manage_fabric/tasks/fabric_ibgp.yaml new file mode 100644 index 00000000..e03294e1 --- /dev/null +++ b/tests/integration/targets/nd_manage_fabric/tasks/fabric_ibgp.yaml @@ -0,0 +1,1172 @@ +--- +# Test code for the ND modules +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +- name: Test that we have a Nexus Dashboard host, username and password + ansible.builtin.fail: + msg: 'Please define the following variables: ansible_host, ansible_user and ansible_password.' + when: ansible_host is not defined or ansible_user is not defined or ansible_password is not defined + +############################################################################# +# CLEANUP - Ensure clean state before tests +############################################################################# +- name: Clean up any existing test fabrics before starting tests + cisco.nd.nd_manage_fabric_ibgp: + state: deleted + config: + - name: "{{ test_fabric_merged }}" + - name: "{{ test_fabric_replaced }}" + - name: "{{ test_fabric_deleted }}" + tags: always + +############################################################################# +# TEST 1: STATE MERGED - Create fabric using merged state +############################################################################# +- name: "TEST 1a: Create fabric using state merged (first run)" + cisco.nd.nd_manage_fabric_ibgp: + state: merged + config: + - "{{ {'name': test_fabric_merged} | combine(common_fabric_config) }}" + register: merged_result_1 + tags: [test_merged, test_merged_create] + +- name: "TEST 1a: Verify fabric was created using merged state" + assert: + that: + - merged_result_1 is changed + - merged_result_1 is not failed + fail_msg: "Fabric creation with state merged failed" + success_msg: "Fabric successfully created with state merged" + tags: [test_merged, test_merged_create] + +- name: "TEST 1b: Create fabric using state merged (second run - idempotency test)" + cisco.nd.nd_manage_fabric_ibgp: + state: merged + config: + - "{{ {'name': test_fabric_merged} | combine(common_fabric_config) }}" + register: merged_result_2 + tags: [test_merged, test_merged_idempotent] + +- name: "TEST 1b: Verify merged state is idempotent" + assert: + that: + - merged_result_2 is not changed + - merged_result_2 is not failed + fail_msg: "Merged state is not idempotent - should not change when run twice with same config" + success_msg: "Merged state is idempotent - no changes on second run" + tags: [test_merged, test_merged_idempotent] + +# - name: "PAUSE: Review TEST 1b results before continuing" +# ansible.builtin.pause: +# prompt: "TEST 1b complete. Review results and press Enter to continue, or Ctrl+C then A to abort" +# tags: [test_merged, test_merged_update] + +- name: "TEST 1c: Update fabric using state merged (modify existing)" + cisco.nd.nd_manage_fabric_ibgp: + state: merged + config: + - name: "{{ test_fabric_merged }}" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65002" # Changed from 65001 + site_id: "65002" # Changed from 65001 + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00bb" # Changed from 00aa + performance_monitoring: true # Changed from false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: loopback + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: true + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: enable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.2.0.0/22" + nve_loopback_ip_range: "10.3.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.254.0/24" + intra_fabric_subnet_range: "10.4.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.33.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.5.0.0/22" + # per_vrf_loopback_auto_provision_ipv6: false + # per_vrf_loopback_ipv6_range: "fd00::a05:0/112" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + register: merged_result_3 + tags: [test_merged, test_merged_update] + +- name: "TEST 1c: Verify fabric was updated using merged state" + assert: + that: + - merged_result_3 is changed + - merged_result_3 is not failed + fail_msg: "Fabric update with state merged failed" + success_msg: "Fabric successfully updated with state merged" + tags: [test_merged, test_merged_update] + +############################################################################# +# VALIDATION: Query test_fabric_merged and validate expected changes +############################################################################# +# Get authentication token first +- name: "VALIDATION 1: Authenticate with ND to get token" + ansible.builtin.uri: + url: "https://{{ ansible_host }}:{{ ansible_httpapi_port | default(443) }}/login" + method: POST + headers: + Content-Type: "application/json" + body_format: json + body: + domain: "{{ ansible_httpapi_login_domain | default('local') }}" + userName: "{{ ansible_user }}" + userPasswd: "{{ ansible_password }}" + validate_certs: false + return_content: true + status_code: + - 200 + register: nd_auth_response + tags: [test_merged, test_merged_validation] + delegate_to: localhost + +- name: "VALIDATION 1: Query test_fabric_merged configuration from ND" + ansible.builtin.uri: + url: "https://{{ ansible_host }}:{{ ansible_httpapi_port | default(443) }}/api/v1/manage/fabrics/{{ test_fabric_merged }}" + method: GET + headers: + Authorization: "Bearer {{ nd_auth_response.json.jwttoken }}" + Content-Type: "application/json" + validate_certs: false + return_content: true + status_code: + - 200 + - 404 + register: merged_fabric_query + tags: [test_merged, test_merged_validation] + delegate_to: localhost + +# - debug: msg="{{ merged_fabric_query }}" +# - meta: end_play + +- name: "VALIDATION 1: Parse fabric configuration response" + set_fact: + merged_fabric_config: "{{ merged_fabric_query.json }}" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify BGP ASN was updated to 65002" + assert: + that: + - merged_fabric_config.management.bgpAsn == "65002" + fail_msg: "BGP ASN validation failed. Expected: 65002, Actual: {{ merged_fabric_config.management.bgpAsn }}" + success_msg: "✓ BGP ASN correctly updated to 65002" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify Site ID was updated to 65002" + assert: + that: + - merged_fabric_config.management.siteId == "65002" + fail_msg: "Site ID validation failed. Expected: 65002, Actual: {{ merged_fabric_config.management.siteId }}" + success_msg: "✓ Site ID correctly updated to 65002" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify Anycast Gateway MAC was updated to 2020.0000.00bb" + assert: + that: + - merged_fabric_config.management.anycastGatewayMac == "2020.0000.00bb" + fail_msg: "Anycast Gateway MAC validation failed. Expected: 2020.0000.00bb, Actual: {{ merged_fabric_config.management.anycastGatewayMac }}" + success_msg: "✓ Anycast Gateway MAC correctly updated to 2020.0000.00bb" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify Performance Monitoring was enabled" + assert: + that: + - merged_fabric_config.management.performanceMonitoring == true + fail_msg: "Performance Monitoring validation failed. Expected: true, Actual: {{ merged_fabric_config.management.performanceMonitoring }}" + success_msg: "✓ Performance Monitoring correctly enabled" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Display successful validation summary for test_fabric_merged" + debug: + msg: | + ======================================== + VALIDATION SUMMARY for test_fabric_merged: + ======================================== + ✓ BGP ASN: {{ merged_fabric_config.management.bgpAsn }} + ✓ Site ID: {{ merged_fabric_config.management.siteId }} + ✓ Anycast Gateway MAC: {{ merged_fabric_config.management.anycastGatewayMac }} + ✓ Performance Monitoring: {{ merged_fabric_config.management.performanceMonitoring }} + + All 4 expected changes validated successfully! + ======================================== + tags: [test_merged, test_merged_validation] + +# - name: "PAUSE: Review TEST 1c results before continuing" +# ansible.builtin.pause: +# prompt: "TEST 1c complete. Review results and press Enter to continue, or Ctrl+C then A to abort" +# tags: [test_merged, test_merged_update] + +############################################################################# +# TEST 2: STATE REPLACED - Create and manage fabric using replaced state +############################################################################# +- name: "TEST 2a: Create fabric using state replaced (first run)" + cisco.nd.nd_manage_fabric_ibgp: + state: replaced + config: + - name: "{{ test_fabric_replaced }}" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65004" # DIfferent from default ASN + site_id: "65004" # DIfferent from default site_id + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00dd" # DIfferent from default MAC + performance_monitoring: true # DIfferent from default to true + replication_mode: multicast + multicast_group_subnet: "239.1.3.0/25" # DIfferent from default subnet + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 3 # DIfferent from default count + rendezvous_point_loopback_id: 253 # DIfferent from default loopback + vpc_peer_link_vlan: "3700" # DIfferent from default VLAN + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: loopback + vpc_auto_recovery_timer: 300 # DIfferent from default timer + vpc_delay_restore_timer: 120 # DIfferent from default timer + vpc_peer_link_port_channel_id: "600" # DIfferent from default port channel + vpc_ipv6_neighbor_discovery_sync: false # DIfferent from default to false + advertise_physical_ip: true # DIfferent from default to true + vpc_domain_id_range: "1-800" # DIfferent from default range + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9000 # DIfferent from default MTU + l2_host_interface_mtu: 9000 # DIfferent from default MTU + tenant_dhcp: false # DIfferent from default to false + nxapi: false # DIfferent from default to false + nxapi_https_port: 443 + nxapi_http: true # DIfferent from default to true + nxapi_http_port: 80 + snmp_trap: false # DIfferent from default to false + anycast_border_gateway_advertise_physical_ip: true # DIfferent from default to true + greenfield_debug_flag: disable # DIfferent from default to disable + tcam_allocation: false # DIfferent from default to false + real_time_interface_statistics_collection: true # DIfferent from default to true + interface_statistics_load_interval: 30 # DIfferent from default interval + bgp_loopback_ip_range: "10.22.0.0/22" # DIfferent from default range + nve_loopback_ip_range: "10.23.0.0/22" # DIfferent from default range + anycast_rendezvous_point_ip_range: "10.254.252.0/24" # DIfferent from default range + intra_fabric_subnet_range: "10.24.0.0/16" # DIfferent from default range + l2_vni_range: "40000-59000" # DIfferent from default range + l3_vni_range: "60000-69000" # DIfferent from default range + network_vlan_range: "2400-3099" # DIfferent from default range + vrf_vlan_range: "2100-2399" # DIfferent from default range + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.53.0.0/16" # DIfferent from default range + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.25.0.0/22" # DIfferent from default range + per_vrf_loopback_auto_provision_ipv6: true + per_vrf_loopback_ipv6_range: "fd00::a25:0/112" # DIfferent from default range + banner: "^ Updated via replaced state ^" # Added banner + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + management_ipv6_prefix: 64 + register: replaced_result_1 + tags: [test_replaced, test_replaced_create] + +- name: "TEST 2a: Verify fabric was created using replaced state" + assert: + that: + - replaced_result_1 is changed + - replaced_result_1 is not failed + fail_msg: "Fabric creation with state replaced failed" + success_msg: "Fabric successfully created with state replaced" + tags: [test_replaced, test_replaced_create] + +- name: "TEST 2b: Create fabric using state replaced (second run - idempotency test)" + cisco.nd.nd_manage_fabric_ibgp: + state: replaced + config: + - name: "{{ test_fabric_replaced }}" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65004" # DIfferent from default ASN + site_id: "65004" # DIfferent from default site_id + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00dd" # DIfferent from default MAC + performance_monitoring: true # DIfferent from default to true + replication_mode: multicast + multicast_group_subnet: "239.1.3.0/25" # DIfferent from default subnet + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 3 # DIfferent from default count + rendezvous_point_loopback_id: 253 # DIfferent from default loopback + vpc_peer_link_vlan: "3700" # DIfferent from default VLAN + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: loopback + vpc_auto_recovery_timer: 300 # DIfferent from default timer + vpc_delay_restore_timer: 120 # DIfferent from default timer + vpc_peer_link_port_channel_id: "600" # DIfferent from default port channel + vpc_ipv6_neighbor_discovery_sync: false # DIfferent from default to false + advertise_physical_ip: true # DIfferent from default to true + vpc_domain_id_range: "1-800" # DIfferent from default range + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9000 # DIfferent from default MTU + l2_host_interface_mtu: 9000 # DIfferent from default MTU + tenant_dhcp: false # DIfferent from default to false + nxapi: false # DIfferent from default to false + nxapi_https_port: 443 + nxapi_http: true # DIfferent from default to true + nxapi_http_port: 80 + snmp_trap: false # DIfferent from default to false + anycast_border_gateway_advertise_physical_ip: true # DIfferent from default to true + greenfield_debug_flag: disable # DIfferent from default to disable + tcam_allocation: false # DIfferent from default to false + real_time_interface_statistics_collection: true # DIfferent from default to true + interface_statistics_load_interval: 30 # DIfferent from default interval + bgp_loopback_ip_range: "10.22.0.0/22" # DIfferent from default range + nve_loopback_ip_range: "10.23.0.0/22" # DIfferent from default range + anycast_rendezvous_point_ip_range: "10.254.252.0/24" # DIfferent from default range + intra_fabric_subnet_range: "10.24.0.0/16" # DIfferent from default range + l2_vni_range: "40000-59000" # DIfferent from default range + l3_vni_range: "60000-69000" # DIfferent from default range + network_vlan_range: "2400-3099" # DIfferent from default range + vrf_vlan_range: "2100-2399" # DIfferent from default range + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.53.0.0/16" # DIfferent from default range + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.25.0.0/22" # DIfferent from default range + per_vrf_loopback_auto_provision_ipv6: true + per_vrf_loopback_ipv6_range: "fd00::a25:0/112" # DIfferent from default range + banner: "^ Updated via replaced state ^" # Added banner + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + management_ipv6_prefix: 64 + register: replaced_result_2 + tags: [test_replaced, test_replaced_idempotent] + +- name: "TEST 2b: Verify replaced state is idempotent" + assert: + that: + - replaced_result_2 is not changed + - replaced_result_2 is not failed + fail_msg: "Replaced state is not idempotent - should not change when run twice with same config" + success_msg: "Replaced state is idempotent - no changes on second run" + tags: [test_replaced, test_replaced_idempotent] + +# - name: "PAUSE: Review TEST 2b results before continuing" +# ansible.builtin.pause: +# prompt: "TEST 2b complete. Review results and press Enter to continue, or Ctrl+C then A to abort" +# tags: [test_replaced, test_replaced_idempotent] + +- name: "TEST 2c: Update fabric using state replaced (complete replacement)" + cisco.nd.nd_manage_fabric_ibgp: + state: replaced + config: + - name: "{{ test_fabric_replaced }}" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65004" # Changed ASN + site_id: "65004" # Changed site_id + banner: "^ Updated via replaced state ^" # Added banner + register: replaced_result_3 + tags: [test_replaced, test_replaced_update] + +- name: "TEST 2c: Verify fabric was completely replaced" + assert: + that: + - replaced_result_3 is changed + - replaced_result_3 is not failed + fail_msg: "Fabric replacement with state replaced failed" + success_msg: "Fabric successfully replaced with state replaced" + tags: [test_replaced, test_replaced_update] + +# ############################################################################# +# # VALIDATION: Query test_fabric_replaced and validate expected changes +# ############################################################################# +# Get authentication token first +- name: "VALIDATION 2: Authenticate with ND to get token" + ansible.builtin.uri: + url: "https://{{ ansible_host }}:{{ ansible_httpapi_port | default(443) }}/login" + method: POST + headers: + Content-Type: "application/json" + body_format: json + body: + domain: "{{ ansible_httpapi_login_domain | default('local') }}" + userName: "{{ ansible_user }}" + userPasswd: "{{ ansible_password }}" + validate_certs: false + return_content: true + status_code: + - 200 + register: nd_auth_response_2 + tags: [test_replaced, test_replaced_validation] + delegate_to: localhost + +- name: "VALIDATION 2: Query test_fabric_replaced configuration from ND" + ansible.builtin.uri: + url: "https://{{ ansible_host }}:{{ ansible_httpapi_port | default(443) }}/api/v1/manage/fabrics/{{ test_fabric_replaced }}" + method: GET + headers: + Authorization: "Bearer {{ nd_auth_response_2.json.jwttoken }}" + Content-Type: "application/json" + validate_certs: false + return_content: true + status_code: + - 200 + - 404 + register: replaced_fabric_query + tags: [test_replaced, test_replaced_validation] + delegate_to: localhost + +- name: "VALIDATION 2: Parse fabric configuration response" + set_fact: + replaced_fabric_config: "{{ replaced_fabric_query.json }}" + tags: [test_replaced, test_replaced_validation] + +# Network Range Validations +- name: "VALIDATION 2: Verify L3 VNI Range was standardized to 50000-59000" + assert: + that: + - replaced_fabric_config.management.l3VniRange == "50000-59000" + fail_msg: "L3 VNI Range validation failed. Expected: 50000-59000, Actual: {{ replaced_fabric_config.management.l3VniRange }}" + success_msg: "✓ L3 VNI Range correctly standardized to 50000-59000" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify L2 VNI Range was standardized to 30000-49000" + assert: + that: + - replaced_fabric_config.management.l2VniRange == "30000-49000" + fail_msg: "L2 VNI Range validation failed. Expected: 30000-49000, Actual: {{ replaced_fabric_config.management.l2VniRange }}" + success_msg: "✓ L2 VNI Range correctly standardized to 30000-49000" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify BGP Loopback IP Range was standardized to 10.2.0.0/22" + assert: + that: + - replaced_fabric_config.management.bgpLoopbackIpRange == "10.2.0.0/22" + fail_msg: "BGP Loopback IP Range validation failed. Expected: 10.2.0.0/22, Actual: {{ replaced_fabric_config.management.bgpLoopbackIpRange }}" + success_msg: "✓ BGP Loopback IP Range correctly standardized to 10.2.0.0/22" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify NVE Loopback IP Range was standardized to 10.3.0.0/22" + assert: + that: + - replaced_fabric_config.management.nveLoopbackIpRange == "10.3.0.0/22" + fail_msg: "NVE Loopback IP Range validation failed. Expected: 10.3.0.0/22, Actual: {{ replaced_fabric_config.management.nveLoopbackIpRange }}" + success_msg: "✓ NVE Loopback IP Range correctly standardized to 10.3.0.0/22" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Intra-Fabric Subnet Range was standardized to 10.4.0.0/16" + assert: + that: + - replaced_fabric_config.management.intraFabricSubnetRange == "10.4.0.0/16" + fail_msg: "Intra-Fabric Subnet Range validation failed. Expected: 10.4.0.0/16, Actual: {{ replaced_fabric_config.management.intraFabricSubnetRange }}" + success_msg: "✓ Intra-Fabric Subnet Range correctly standardized to 10.4.0.0/16" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VRF Lite Subnet Range was standardized to 10.33.0.0/16" + assert: + that: + - replaced_fabric_config.management.vrfLiteSubnetRange == "10.33.0.0/16" + fail_msg: "VRF Lite Subnet Range validation failed. Expected: 10.33.0.0/16, Actual: {{ replaced_fabric_config.management.vrfLiteSubnetRange }}" + success_msg: "✓ VRF Lite Subnet Range correctly standardized to 10.33.0.0/16" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Anycast RP IP Range was standardized to 10.254.254.0/24" + assert: + that: + - replaced_fabric_config.management.anycastRendezvousPointIpRange == "10.254.254.0/24" + fail_msg: "Anycast RP IP Range validation failed. Expected: 10.254.254.0/24, Actual: {{ replaced_fabric_config.management.anycastRendezvousPointIpRange }}" + success_msg: "✓ Anycast RP IP Range correctly standardized to 10.254.254.0/24" + tags: [test_replaced, test_replaced_validation] + +# VLAN Range Validations +- name: "VALIDATION 2: Verify Network VLAN Range was standardized to 2300-2999" + assert: + that: + - replaced_fabric_config.management.networkVlanRange == "2300-2999" + fail_msg: "Network VLAN Range validation failed. Expected: 2300-2999, Actual: {{ replaced_fabric_config.management.networkVlanRange }}" + success_msg: "✓ Network VLAN Range correctly standardized to 2300-2999" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VRF VLAN Range was standardized to 2000-2299" + assert: + that: + - replaced_fabric_config.management.vrfVlanRange == "2000-2299" + fail_msg: "VRF VLAN Range validation failed. Expected: 2000-2299, Actual: {{ replaced_fabric_config.management.vrfVlanRange }}" + success_msg: "✓ VRF VLAN Range correctly standardized to 2000-2299" + tags: [test_replaced, test_replaced_validation] + +# MTU Validations +- name: "VALIDATION 2: Verify Fabric MTU was increased to 9216" + assert: + that: + - replaced_fabric_config.management.fabricMtu == 9216 + fail_msg: "Fabric MTU validation failed. Expected: 9216, Actual: {{ replaced_fabric_config.management.fabricMtu }}" + success_msg: "✓ Fabric MTU correctly increased to 9216" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify L2 Host Interface MTU was increased to 9216" + assert: + that: + - replaced_fabric_config.management.l2HostInterfaceMtu == 9216 + fail_msg: "L2 Host Interface MTU validation failed. Expected: 9216, Actual: {{ replaced_fabric_config.management.l2HostInterfaceMtu }}" + success_msg: "✓ L2 Host Interface MTU correctly increased to 9216" + tags: [test_replaced, test_replaced_validation] + +# Gateway and Multicast Validations +- name: "VALIDATION 2: Verify Anycast Gateway MAC was standardized to 2020.0000.00aa" + assert: + that: + - replaced_fabric_config.management.anycastGatewayMac == "2020.0000.00aa" + fail_msg: "Anycast Gateway MAC validation failed. Expected: 2020.0000.00aa, Actual: {{ replaced_fabric_config.management.anycastGatewayMac }}" + success_msg: "✓ Anycast Gateway MAC correctly standardized to 2020.0000.00aa" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Multicast Group Subnet was standardized to 239.1.1.0/25" + assert: + that: + - replaced_fabric_config.management.multicastGroupSubnet == "239.1.1.0/25" + fail_msg: "Multicast Group Subnet validation failed. Expected: 239.1.1.0/25, Actual: {{ replaced_fabric_config.management.multicastGroupSubnet }}" + success_msg: "✓ Multicast Group Subnet correctly standardized to 239.1.1.0/25" + tags: [test_replaced, test_replaced_validation] + +# VPC Configuration Validations +- name: "VALIDATION 2: Verify VPC Auto Recovery Timer was standardized to 360" + assert: + that: + - replaced_fabric_config.management.vpcAutoRecoveryTimer == 360 + fail_msg: "VPC Auto Recovery Timer validation failed. Expected: 360, Actual: {{ replaced_fabric_config.management.vpcAutoRecoveryTimer }}" + success_msg: "✓ VPC Auto Recovery Timer correctly standardized to 360" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC Delay Restore Timer was standardized to 150" + assert: + that: + - replaced_fabric_config.management.vpcDelayRestoreTimer == 150 + fail_msg: "VPC Delay Restore Timer validation failed. Expected: 150, Actual: {{ replaced_fabric_config.management.vpcDelayRestoreTimer }}" + success_msg: "✓ VPC Delay Restore Timer correctly standardized to 150" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC Peer Link Port Channel ID was standardized to 500" + assert: + that: + - replaced_fabric_config.management.vpcPeerLinkPortChannelId == "500" + fail_msg: "VPC Peer Link Port Channel ID validation failed. Expected: 500, Actual: {{ replaced_fabric_config.management.vpcPeerLinkPortChannelId }}" + success_msg: "✓ VPC Peer Link Port Channel ID correctly standardized to 500" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC Peer Link VLAN was standardized to 3600" + assert: + that: + - replaced_fabric_config.management.vpcPeerLinkVlan == "3600" + fail_msg: "VPC Peer Link VLAN validation failed. Expected: 3600, Actual: {{ replaced_fabric_config.management.vpcPeerLinkVlan }}" + success_msg: "✓ VPC Peer Link VLAN correctly standardized to 3600" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC Domain ID Range was standardized to 1-1000" + assert: + that: + - replaced_fabric_config.management.vpcDomainIdRange == "1-1000" + fail_msg: "VPC Domain ID Range validation failed. Expected: 1-1000, Actual: {{ replaced_fabric_config.management.vpcDomainIdRange }}" + success_msg: "✓ VPC Domain ID Range correctly standardized to 1-1000" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC IPv6 Neighbor Discovery Sync was enabled" + assert: + that: + - replaced_fabric_config.management.vpcIpv6NeighborDiscoverySync == true + fail_msg: "VPC IPv6 Neighbor Discovery Sync validation failed. Expected: true, Actual: {{ replaced_fabric_config.management.vpcIpv6NeighborDiscoverySync }}" + success_msg: "✓ VPC IPv6 Neighbor Discovery Sync correctly enabled" + tags: [test_replaced, test_replaced_validation] + +# Multicast Settings Validations +- name: "VALIDATION 2: Verify Rendezvous Point Count was standardized to 2" + assert: + that: + - replaced_fabric_config.management.rendezvousPointCount == 2 + fail_msg: "Rendezvous Point Count validation failed. Expected: 2, Actual: {{ replaced_fabric_config.management.rendezvousPointCount }}" + success_msg: "✓ Rendezvous Point Count correctly standardized to 2" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Rendezvous Point Loopback ID was standardized to 254" + assert: + that: + - replaced_fabric_config.management.rendezvousPointLoopbackId == 254 + fail_msg: "Rendezvous Point Loopback ID validation failed. Expected: 254, Actual: {{ replaced_fabric_config.management.rendezvousPointLoopbackId }}" + success_msg: "✓ Rendezvous Point Loopback ID correctly standardized to 254" + tags: [test_replaced, test_replaced_validation] + +# Feature Flag Validations +- name: "VALIDATION 2: Verify TCAM Allocation was enabled" + assert: + that: + - replaced_fabric_config.management.tcamAllocation == true + fail_msg: "TCAM Allocation validation failed. Expected: true, Actual: {{ replaced_fabric_config.management.tcamAllocation }}" + success_msg: "✓ TCAM Allocation correctly enabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Real Time Interface Statistics Collection was disabled" + assert: + that: + - replaced_fabric_config.management.realTimeInterfaceStatisticsCollection == false + fail_msg: "Real Time Interface Statistics Collection validation failed. Expected: false, Actual: {{ replaced_fabric_config.management.realTimeInterfaceStatisticsCollection }}" + success_msg: "✓ Real Time Interface Statistics Collection correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Performance Monitoring was disabled" + assert: + that: + - replaced_fabric_config.management.performanceMonitoring == false + fail_msg: "Performance Monitoring validation failed. Expected: false, Actual: {{ replaced_fabric_config.management.performanceMonitoring }}" + success_msg: "✓ Performance Monitoring correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Tenant DHCP was enabled" + assert: + that: + - replaced_fabric_config.management.tenantDhcp == true + fail_msg: "Tenant DHCP validation failed. Expected: true, Actual: {{ replaced_fabric_config.management.tenantDhcp }}" + success_msg: "✓ Tenant DHCP correctly enabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify SNMP Trap was enabled" + assert: + that: + - replaced_fabric_config.management.snmpTrap == true + fail_msg: "SNMP Trap validation failed. Expected: true, Actual: {{ replaced_fabric_config.management.snmpTrap }}" + success_msg: "✓ SNMP Trap correctly enabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Greenfield Debug Flag was enabled" + assert: + that: + - replaced_fabric_config.management.greenfieldDebugFlag == "enable" + fail_msg: "Greenfield Debug Flag validation failed. Expected: enable, Actual: {{ replaced_fabric_config.management.greenfieldDebugFlag }}" + success_msg: "✓ Greenfield Debug Flag correctly enabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify NXAPI HTTP was enabled" + assert: + that: + - replaced_fabric_config.management.nxapiHttp == true + fail_msg: "NXAPI HTTP validation failed. Expected: true, Actual: {{ replaced_fabric_config.management.nxapiHttp }}" + success_msg: "✓ NXAPI HTTP correctly enabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify NXAPI was disabled" + assert: + that: + - replaced_fabric_config.management.nxapi == false + fail_msg: "NXAPI validation failed. Expected: false, Actual: {{ replaced_fabric_config.management.nxapi }}" + success_msg: "✓ NXAPI correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Per VRF Loopback Auto Provision was disabled" + assert: + that: + - replaced_fabric_config.management.perVrfLoopbackAutoProvision == false + fail_msg: "Per VRF Loopback Auto Provision validation failed. Expected: false, Actual: {{ replaced_fabric_config.management.perVrfLoopbackAutoProvision }}" + success_msg: "✓ Per VRF Loopback Auto Provision correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Per VRF Loopback Auto Provision IPv6 was disabled" + assert: + that: + - replaced_fabric_config.management.perVrfLoopbackAutoProvisionIpv6 == false + fail_msg: "Per VRF Loopback Auto Provision IPv6 validation failed. Expected: false, Actual: {{ replaced_fabric_config.management.perVrfLoopbackAutoProvisionIpv6 }}" + success_msg: "✓ Per VRF Loopback Auto Provision IPv6 correctly disabled" + tags: [test_replaced, test_replaced_validation] + +# Verify banner was preserved +- name: "VALIDATION 2: Verify Banner was preserved" + assert: + that: + - replaced_fabric_config.management.banner == "^ Updated via replaced state ^" + fail_msg: "Banner validation failed. Expected: '^ Updated via replaced state ^', Actual: {{ replaced_fabric_config.management.banner }}" + success_msg: "✓ Banner correctly preserved: '{{ replaced_fabric_config.management.banner }}'" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Display successful validation summary for test_fabric_replaced" + debug: + msg: | + ======================================== + VALIDATION SUMMARY for test_fabric_replaced: + ======================================== + Network Ranges: + ✓ L3 VNI Range: {{ replaced_fabric_config.management.l3VniRange }} + ✓ L2 VNI Range: {{ replaced_fabric_config.management.l2VniRange }} + ✓ BGP Loopback IP Range: {{ replaced_fabric_config.management.bgpLoopbackIpRange }} + ✓ NVE Loopback IP Range: {{ replaced_fabric_config.management.nveLoopbackIpRange }} + ✓ Intra-Fabric Subnet Range: {{ replaced_fabric_config.management.intraFabricSubnetRange }} + ✓ VRF Lite Subnet Range: {{ replaced_fabric_config.management.vrfLiteSubnetRange }} + ✓ Anycast RP IP Range: {{ replaced_fabric_config.management.anycastRendezvousPointIpRange }} + + VLAN Ranges: + ✓ Network VLAN Range: {{ replaced_fabric_config.management.networkVlanRange }} + ✓ VRF VLAN Range: {{ replaced_fabric_config.management.vrfVlanRange }} + + MTU Settings: + ✓ Fabric MTU: {{ replaced_fabric_config.management.fabricMtu }} + ✓ L2 Host Interface MTU: {{ replaced_fabric_config.management.l2HostInterfaceMtu }} + + VPC Configuration: + ✓ VPC Auto Recovery Timer: {{ replaced_fabric_config.management.vpcAutoRecoveryTimer }} + ✓ VPC Delay Restore Timer: {{ replaced_fabric_config.management.vpcDelayRestoreTimer }} + ✓ VPC Peer Link Port Channel ID: {{ replaced_fabric_config.management.vpcPeerLinkPortChannelId }} + ✓ VPC Peer Link VLAN: {{ replaced_fabric_config.management.vpcPeerLinkVlan }} + ✓ VPC Domain ID Range: {{ replaced_fabric_config.management.vpcDomainIdRange }} + ✓ VPC IPv6 Neighbor Discovery Sync: {{ replaced_fabric_config.management.vpcIpv6NeighborDiscoverySync }} + + Gateway & Multicast: + ✓ Anycast Gateway MAC: {{ replaced_fabric_config.management.anycastGatewayMac }} + ✓ Multicast Group Subnet: {{ replaced_fabric_config.management.multicastGroupSubnet }} + ✓ Rendezvous Point Count: {{ replaced_fabric_config.management.rendezvousPointCount }} + ✓ Rendezvous Point Loopback ID: {{ replaced_fabric_config.management.rendezvousPointLoopbackId }} + + Feature Flags: + ✓ TCAM Allocation: {{ replaced_fabric_config.management.tcamAllocation }} + ✓ Real Time Interface Statistics Collection: {{ replaced_fabric_config.management.realTimeInterfaceStatisticsCollection }} + ✓ Performance Monitoring: {{ replaced_fabric_config.management.performanceMonitoring }} + ✓ Tenant DHCP: {{ replaced_fabric_config.management.tenantDhcp }} + ✓ SNMP Trap: {{ replaced_fabric_config.management.snmpTrap }} + ✓ Greenfield Debug Flag: {{ replaced_fabric_config.management.greenfieldDebugFlag }} + ✓ NXAPI HTTP: {{ replaced_fabric_config.management.nxapiHttp }} + ✓ NXAPI: {{ replaced_fabric_config.management.nxapi }} + + Auto-Provisioning: + ✓ Per VRF Loopback Auto Provision: {{ replaced_fabric_config.management.perVrfLoopbackAutoProvision }} + ✓ Per VRF Loopback Auto Provision IPv6: {{ replaced_fabric_config.management.perVrfLoopbackAutoProvisionIpv6 }} + + Preserved Settings: + ✓ Banner: "{{ replaced_fabric_config.management.banner }}" + + All 30+ expected changes validated successfully! + ======================================== + tags: [test_replaced, test_replaced_validation] + +# - name: "PAUSE: Review TEST 2c results before continuing" +# ansible.builtin.pause: +# prompt: "TEST 2c complete. Review results and press Enter to continue, or Ctrl+C then A to abort" +# tags: [test_replaced, test_replaced_idempotent] + +############################################################################# +# TEST 3: Demonstrate difference between merged and replaced states +############################################################################# +- name: "TEST 3: Create fabric for merged vs replaced comparison" + cisco.nd.nd_manage_fabric_ibgp: + state: replaced + config: + - "{{ {'name': test_fabric_deleted} | combine(common_fabric_config) }}" + register: comparison_fabric_creation + tags: [test_comparison] + +- name: "TEST 3a: Partial update using merged state (should merge changes)" + cisco.nd.nd_manage_fabric_ibgp: + state: merged + config: + - name: "{{ test_fabric_deleted }}" + category: fabric + management: + bgp_asn: "65099" # Only updating ASN + fabric_mtu: 8000 # Only updating MTU + register: merged_partial_result + tags: [test_comparison, test_merged_partial] + +- name: "TEST 3a: Verify merged state preserves existing configuration" + assert: + that: + - merged_partial_result is changed + - merged_partial_result is not failed + fail_msg: "Partial update with merged state failed" + success_msg: "Merged state successfully performed partial update" + tags: [test_comparison, test_merged_partial] + +- name: "TEST 3b: Partial update using replaced state (should replace entire config)" + cisco.nd.nd_manage_fabric_ibgp: + state: replaced + config: + - name: "{{ test_fabric_deleted }}" + category: fabric + management: + type: vxlanIbgp + bgp_asn: "65100" # Only specifying minimal config for replaced + target_subnet_mask: 30 + register: replaced_partial_result + tags: [test_comparison, test_replaced_partial] + +- name: "TEST 3b: Verify replaced state performs complete replacement" + assert: + that: + - replaced_partial_result is changed + - replaced_partial_result is not failed + fail_msg: "Partial replacement with replaced state failed" + success_msg: "Replaced state successfully performed complete replacement" + tags: [test_comparison, test_replaced_partial] + +############################################################################# +# TEST 4: STATE DELETED - Delete fabrics +############################################################################# +- name: "TEST 4a: Delete fabric using state deleted" + cisco.nd.nd_manage_fabric_ibgp: + state: deleted + config: + - name: "{{ test_fabric_deleted }}" + register: deleted_result_1 + tags: [test_deleted, test_deleted_delete] + +- name: "TEST 4a: Verify fabric was deleted" + assert: + that: + - deleted_result_1 is changed + - deleted_result_1 is not failed + fail_msg: "Fabric deletion with state deleted failed" + success_msg: "Fabric successfully deleted with state deleted" + tags: [test_deleted, test_deleted_delete] + +- name: "TEST 4b: Delete fabric using state deleted (second run - idempotency test)" + cisco.nd.nd_manage_fabric_ibgp: + state: deleted + config: + - name: "{{ test_fabric_deleted }}" + register: deleted_result_2 + tags: [test_deleted, test_deleted_idempotent] + +- name: "TEST 4b: Verify deleted state is idempotent" + assert: + that: + - deleted_result_2 is not changed + - deleted_result_2 is not failed + fail_msg: "Deleted state is not idempotent - should not change when deleting non-existent fabric" + success_msg: "Deleted state is idempotent - no changes when deleting non-existent fabric" + tags: [test_deleted, test_deleted_idempotent] + +############################################################################# +# TEST 5: Multiple fabric operations in single task +############################################################################# +- name: "TEST 5: Multiple fabric operations in single task" + cisco.nd.nd_manage_fabric_ibgp: + state: merged + config: + - name: "multi_fabric_1" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65101" + site_id: "65101" + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.0001" + performance_monitoring: false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: loopback + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + # vpc_ipv6_neighbor_discovery_sync: true + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: true + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: enable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.101.0.0/22" + nve_loopback_ip_range: "10.103.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.101.0/24" + intra_fabric_subnet_range: "10.104.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.133.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.105.0.0/22" + # per_vrf_loopback_auto_provision_ipv6: false + # per_vrf_loopback_ipv6_range: "fd00::a105:0/112" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + # management_ipv6_prefix: 64 + - name: "multi_fabric_2" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65102" + site_id: "65102" + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.0002" + performance_monitoring: false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: loopback + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + # vpc_ipv6_neighbor_discovery_sync: true + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: true + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: enable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.102.0.0/22" + nve_loopback_ip_range: "10.103.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.102.0/24" + intra_fabric_subnet_range: "10.104.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.134.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.106.0.0/22" + # per_vrf_loopback_auto_provision_ipv6: false + # per_vrf_loopback_ipv6_range: "fd00::a106:0/112" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + # management_ipv6_prefix: 64 + register: multi_fabric_result + tags: [test_multi, test_multi_create] + +- name: "TEST 5: Verify multiple fabrics were created" + assert: + that: + - multi_fabric_result is changed + - multi_fabric_result is not failed + fail_msg: "Multiple fabric creation failed" + success_msg: "Multiple fabrics successfully created" + tags: [test_multi, test_multi_create] + +############################################################################# +# FINAL CLEANUP - Clean up all test fabrics +############################################################################# +- name: "CLEANUP: Delete all test fabrics" + cisco.nd.nd_manage_fabric_ibgp: + state: deleted + config: + - name: "{{ test_fabric_merged }}" + - name: "{{ test_fabric_replaced }}" + - name: "{{ test_fabric_deleted }}" + - name: "multi_fabric_1" + - name: "multi_fabric_2" + ignore_errors: true + tags: [cleanup, always] + +############################################################################# +# TEST SUMMARY +############################################################################# +- name: "TEST SUMMARY: Display test results" + debug: + msg: | + ======================================== + TEST SUMMARY for cisco.nd.nd_manage_fabric_ibgp module: + ======================================== + ✓ TEST 1: STATE MERGED + - Create fabric: {{ 'PASSED' if merged_result_1 is changed else 'FAILED' }} + - Idempotency: {{ 'PASSED' if merged_result_2 is not changed else 'FAILED' }} + - Update fabric: {{ 'PASSED' if merged_result_3 is changed else 'FAILED' }} + + ✓ TEST 2: STATE REPLACED + - Create fabric: {{ 'PASSED' if replaced_result_1 is changed else 'FAILED' }} + - Idempotency: {{ 'PASSED' if replaced_result_2 is not changed else 'FAILED' }} + - Replace fabric: {{ 'PASSED' if replaced_result_3 is changed else 'FAILED' }} + + ✓ TEST 3: MERGED vs REPLACED Comparison + - Merged partial: {{ 'PASSED' if merged_partial_result is changed else 'FAILED' }} + - Replaced partial: {{ 'PASSED' if replaced_partial_result is changed else 'FAILED' }} + + ✓ TEST 4: STATE DELETED + - Delete fabric: {{ 'PASSED' if deleted_result_1 is changed else 'FAILED' }} + - Idempotency: {{ 'PASSED' if deleted_result_2 is not changed else 'FAILED' }} + + ✓ TEST 5: MULTIPLE FABRICS + - Multi-create: {{ 'PASSED' if multi_fabric_result is changed else 'FAILED' }} + + All tests validate: + - State merged: Creates and updates fabrics by merging changes + - State replaced: Creates and completely replaces fabric configuration + - State deleted: Removes fabrics + - Idempotency: All operations are idempotent when run multiple times + - Difference: Merged preserves existing config, replaced overwrites completely + ======================================== + tags: [summary, always] \ No newline at end of file diff --git a/tests/integration/targets/nd_manage_fabric/tasks/main.yaml b/tests/integration/targets/nd_manage_fabric/tasks/main.yaml new file mode 100644 index 00000000..579ac793 --- /dev/null +++ b/tests/integration/targets/nd_manage_fabric/tasks/main.yaml @@ -0,0 +1,3 @@ +--- +- name: Run nd_manage_fabric iBGP tests + ansible.builtin.include_tasks: fabric_ibgp.yaml diff --git a/tests/integration/targets/nd_manage_fabric/vars/main.yaml b/tests/integration/targets/nd_manage_fabric/vars/main.yaml new file mode 100644 index 00000000..11db15a5 --- /dev/null +++ b/tests/integration/targets/nd_manage_fabric/vars/main.yaml @@ -0,0 +1,81 @@ +--- + +test_fabric_merged: "test_fabric_merged" +test_fabric_replaced: "test_fabric_replaced" +test_fabric_deleted: "test_fabric_deleted" + +# Common fabric configuration for all tests +common_fabric_config: + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanIbgp + bgp_asn: "65001" + site_id: "65001" + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00aa" + performance_monitoring: false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: loopback + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: true + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: enable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.2.0.0/22" + nve_loopback_ip_range: "10.3.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.254.0/24" + intra_fabric_subnet_range: "10.4.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.33.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.5.0.0/22" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 From e20ff4d4d8cd34f9d9f26dbe5be7a02f344cd81e Mon Sep 17 00:00:00 2001 From: mwiebe Date: Fri, 13 Mar 2026 11:01:28 -0400 Subject: [PATCH 071/131] Refactor all endpoints to inherit from base class --- .../endpoints/v1/manage/manage_fabrics.py | 197 +++++------------- 1 file changed, 52 insertions(+), 145 deletions(-) diff --git a/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py b/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py index 45bf5e43..af1c057b 100644 --- a/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py +++ b/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py @@ -33,7 +33,7 @@ __metaclass__ = type # pylint: enable=inFinal, valid-name -from typing import Literal, Optional, Final +from typing import ClassVar, Literal, Optional, Final from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.manage.base_path import BasePath @@ -80,15 +80,56 @@ class _EpManageFabricsBase(FabricNameMixin, BaseModel): Provides common functionality for all HTTP methods on the /api/v1/manage/fabrics endpoint. + + Subclasses may override: + - ``_require_fabric_name``: set to ``False`` for collection-level endpoints + (list, create) that do not include a fabric name in the path. + - ``_path_suffix``: set to a non-empty string to append an extra segment + after the fabric name (e.g. ``"summary"``). Only used when + ``_require_fabric_name`` is ``True``. """ - # TODO: Remove it - # base_path: Final = BasePath.nd_manage_fabrics() - base_path: Final = BasePath.path("fabrics") + _require_fabric_name: ClassVar[bool] = True + _path_suffix: ClassVar[Optional[str]] = None + + endpoint_params: EndpointQueryParams = Field( + default_factory=EndpointQueryParams, description="Endpoint-specific query parameters" + ) def set_identifiers(self, identifier: IdentifierKey = None): self.fabric_name = identifier + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path with optional fabric name, path suffix, and + query string. + + ## Returns + + - Complete endpoint path string + + ## Raises + + - `ValueError` if `fabric_name` is required but not set + """ + if self._require_fabric_name and self.fabric_name is None: + raise ValueError( + f"{type(self).__name__}.path: fabric_name must be set before accessing path." + ) + segments = ["fabrics"] + if self.fabric_name is not None: + segments.append(self.fabric_name) + if self._path_suffix: + segments.append(self._path_suffix) + base_path = BasePath.path(*segments) + query_string = self.endpoint_params.to_query_string() + if query_string: + return f"{base_path}?{query_string}" + return base_path + class EpManageFabricsGet(_EpManageFabricsBase): """ # Summary @@ -144,30 +185,6 @@ class EpManageFabricsGet(_EpManageFabricsBase): default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" ) - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path with the fabric name and optional query string. - - ## Returns - - - Complete endpoint path string including fabric_name and optional query parameters - - ## Raises - - - `ValueError` if `fabric_name` is None - """ - if self.fabric_name is None: - raise ValueError(f"{self.class_name}.path: fabric_name must be set before accessing path.") - # base_path = BasePath.nd_manage_fabrics(self.fabric_name) - base_path = BasePath.path("fabrics", self.fabric_name) - query_string = self.endpoint_params.to_query_string() - if query_string: - return f"{base_path}?{query_string}" - return base_path - @property def verb(self) -> HttpVerbEnum: """Return the HTTP verb for this endpoint.""" @@ -274,6 +291,8 @@ class EpManageFabricsListGet(_EpManageFabricsBase): ``` """ + _require_fabric_name: ClassVar[bool] = False + model_config = COMMON_CONFIG class_name: Literal["EpApiV1ManageFabricsListGet"] = Field( @@ -284,35 +303,13 @@ class EpManageFabricsListGet(_EpManageFabricsBase): default_factory=FabricsListEndpointParams, description="Endpoint-specific query parameters" ) - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path with optional query string. - - ## Returns - - - Complete endpoint path string including optional query parameters - - ## Raises - - - None - """ - # base_path = BasePath.nd_manage_fabrics() - base_path = BasePath.path("fabrics") - query_string = self.endpoint_params.to_query_string() - if query_string: - return f"{base_path}?{query_string}" - return base_path - @property def verb(self) -> HttpVerbEnum: """Return the HTTP verb for this endpoint.""" return HttpVerbEnum.GET -class EpManageFabricsPost(BaseModel): +class EpManageFabricsPost(_EpManageFabricsBase): """ # Summary @@ -362,6 +359,8 @@ class EpManageFabricsPost(BaseModel): ``` """ + _require_fabric_name: ClassVar[bool] = False + model_config = COMMON_CONFIG class_name: Literal["EpApiV1ManageFabricsPost"] = Field( @@ -372,28 +371,6 @@ class EpManageFabricsPost(BaseModel): default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" ) - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path with optional query string. - - ## Returns - - - Complete endpoint path string - - ## Raises - - - None - """ - # base_path = BasePath.nd_manage_fabrics() - base_path = BasePath.path("fabrics") - query_string = self.endpoint_params.to_query_string() - if query_string: - return f"{base_path}?{query_string}" - return base_path - @property def verb(self) -> HttpVerbEnum: """Return the HTTP verb for this endpoint.""" @@ -454,30 +431,6 @@ class EpManageFabricsPut(_EpManageFabricsBase): default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" ) - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path with the fabric name and optional query string. - - ## Returns - - - Complete endpoint path string - - ## Raises - - - `ValueError` if `fabric_name` is None - """ - if self.fabric_name is None: - raise ValueError(f"{self.class_name}.path: fabric_name must be set before accessing path.") - # base_path = BasePath.nd_manage_fabrics(self.fabric_name) - base_path = BasePath.path("fabrics", self.fabric_name) - query_string = self.endpoint_params.to_query_string() - if query_string: - return f"{base_path}?{query_string}" - return base_path - @property def verb(self) -> HttpVerbEnum: """Return the HTTP verb for this endpoint.""" @@ -528,30 +481,6 @@ class EpManageFabricsDelete(_EpManageFabricsBase): default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" ) - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path with the fabric name and optional query string. - - ## Returns - - - Complete endpoint path string - - ## Raises - - - `ValueError` if `fabric_name` is None - """ - if self.fabric_name is None: - raise ValueError(f"{self.class_name}.path: fabric_name must be set before accessing path.") - # base_path = BasePath.nd_manage_fabrics(self.fabric_name) - base_path = BasePath.path("fabrics", self.fabric_name) - query_string = self.endpoint_params.to_query_string() - if query_string: - return f"{base_path}?{query_string}" - return base_path - @property def verb(self) -> HttpVerbEnum: """Return the HTTP verb for this endpoint.""" @@ -599,34 +528,12 @@ class EpManageFabricsSummaryGet(_EpManageFabricsBase): default="EpApiV1ManageFabricsSummaryGet", description="Class name for backward compatibility" ) + _path_suffix: ClassVar[Optional[str]] = "summary" + endpoint_params: FabricsEndpointParams = Field( default_factory=FabricsEndpointParams, description="Endpoint-specific query parameters" ) - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path with the fabric name and optional query string. - - ## Returns - - - Complete endpoint path string - - ## Raises - - - `ValueError` if `fabric_name` is None - """ - if self.fabric_name is None: - raise ValueError(f"{self.class_name}.path: fabric_name must be set before accessing path.") - # base_path = BasePath.nd_manage_fabrics(self.fabric_name, "summary") - base_path = BasePath.path("fabrics", self.fabric_name, "summary") - query_string = self.endpoint_params.to_query_string() - if query_string: - return f"{base_path}?{query_string}" - return base_path - @property def verb(self) -> HttpVerbEnum: """Return the HTTP verb for this endpoint.""" From 2a790777bd7e707fa99c979f022a080ad0aa80d6 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Fri, 13 Mar 2026 11:11:49 -0400 Subject: [PATCH 072/131] Inherit from NDEndpointBaseModel --- .../endpoints/v1/manage/manage_fabrics.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py b/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py index af1c057b..5cb08213 100644 --- a/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py +++ b/plugins/module_utils/endpoints/v1/manage/manage_fabrics.py @@ -43,9 +43,6 @@ from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict, Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# Common config for basic validation -COMMON_CONFIG = ConfigDict(validate_assignment=True) - class FabricsEndpointParams(EndpointQueryParams): """ @@ -74,7 +71,7 @@ class FabricsEndpointParams(EndpointQueryParams): ) -class _EpManageFabricsBase(FabricNameMixin, BaseModel): +class _EpManageFabricsBase(FabricNameMixin, NDEndpointBaseModel): """ Base class for ND Manage Fabrics endpoints. @@ -175,8 +172,6 @@ class EpManageFabricsGet(_EpManageFabricsBase): ``` """ - model_config = COMMON_CONFIG - class_name: Literal["EpApiV1ManageFabricsGet"] = Field( default="EpApiV1ManageFabricsGet", description="Class name for backward compatibility" ) @@ -293,8 +288,6 @@ class EpManageFabricsListGet(_EpManageFabricsBase): _require_fabric_name: ClassVar[bool] = False - model_config = COMMON_CONFIG - class_name: Literal["EpApiV1ManageFabricsListGet"] = Field( default="EpApiV1ManageFabricsListGet", description="Class name for backward compatibility" ) @@ -361,8 +354,6 @@ class EpManageFabricsPost(_EpManageFabricsBase): _require_fabric_name: ClassVar[bool] = False - model_config = COMMON_CONFIG - class_name: Literal["EpApiV1ManageFabricsPost"] = Field( default="EpApiV1ManageFabricsPost", description="Class name for backward compatibility" ) @@ -421,8 +412,6 @@ class EpManageFabricsPut(_EpManageFabricsBase): ``` """ - model_config = COMMON_CONFIG - class_name: Literal["EpApiV1ManageFabricsPut"] = Field( default="EpApiV1ManageFabricsPut", description="Class name for backward compatibility" ) @@ -471,8 +460,6 @@ class EpManageFabricsDelete(_EpManageFabricsBase): ``` """ - model_config = COMMON_CONFIG - class_name: Literal["EpApiV1ManageFabricsDelete"] = Field( default="EpApiV1ManageFabricsDelete", description="Class name for backward compatibility" ) @@ -522,8 +509,6 @@ class EpManageFabricsSummaryGet(_EpManageFabricsBase): ``` """ - model_config = COMMON_CONFIG - class_name: Literal["EpApiV1ManageFabricsSummaryGet"] = Field( default="EpApiV1ManageFabricsSummaryGet", description="Class name for backward compatibility" ) From a0ea474b0b0e7686ebf34490258ade9acbfc1f91 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 19 Aug 2025 12:44:17 -0400 Subject: [PATCH 073/131] [minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher. --- plugins/module_utils/constants.py | 14 + plugins/module_utils/nd.py | 79 ++--- plugins/module_utils/nd_config_collection.py | 295 ++++++++++++++++++ plugins/module_utils/nd_network_resources.py | 202 ++++++++++++ plugins/module_utils/utils.py | 32 ++ plugins/modules/nd_local_user.py | 269 ++++++++++++++++ .../targets/nd_local_user/tasks/main.yml | 134 ++++++++ 7 files changed, 974 insertions(+), 51 deletions(-) create mode 100644 plugins/module_utils/nd_config_collection.py create mode 100644 plugins/module_utils/nd_network_resources.py create mode 100644 plugins/module_utils/utils.py create mode 100644 plugins/modules/nd_local_user.py create mode 100644 tests/integration/targets/nd_local_user/tasks/main.yml diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 10de9edf..cbba61b3 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -157,6 +157,11 @@ "restart", "delete", "update", + "merged", + "replaced", + "overridden", + "deleted", + "gathered", ) INTERFACE_FLOW_RULES_TYPES_MAPPING = {"port_channel": "PORTCHANNEL", "physical": "PHYSICAL", "l3out_sub_interface": "L3_SUBIF", "l3out_svi": "SVI"} @@ -170,3 +175,12 @@ ND_SETUP_NODE_DEPLOYMENT_TYPE = {"physical": "cimc", "virtual": "vnode"} BACKUP_TYPE = {"config_only": "config-only", None: "config-only", "": "config-only", "full": "full"} + +USER_ROLES_MAPPING = { + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", +} diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 03ffc85f..5f528bb8 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -18,7 +18,6 @@ from ansible.module_utils.basic import json from ansible.module_utils.basic import env_fallback from ansible.module_utils.six import PY3 -from ansible.module_utils.six.moves import filterfalse from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils._text import to_native, to_text from ansible.module_utils.connection import Connection @@ -73,53 +72,27 @@ def cmp(a, b): def issubset(subset, superset): - """Recurse through nested dictionary and compare entries""" + """Recurse through a nested dictionary and check if it is a subset of another.""" - # Both objects are the same object - if subset is superset: - return True - - # Both objects are identical - if subset == superset: - return True - - # Both objects have a different type - if isinstance(subset) is not isinstance(superset): + if type(subset) is not type(superset): return False + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + for key, value in subset.items(): - # Ignore empty values if value is None: - return True + continue - # Item from subset is missing from superset if key not in superset: return False - # Item has different types in subset and superset - if isinstance(superset.get(key)) is not isinstance(value): - return False + superset_value = superset.get(key) - # Compare if item values are subset - if isinstance(value, dict): - if not issubset(superset.get(key), value): - return False - elif isinstance(value, list): - try: - # NOTE: Fails for lists of dicts - if not set(value) <= set(superset.get(key)): - return False - except TypeError: - # Fall back to exact comparison for lists of dicts - diff = list(filterfalse(lambda i: i in value, superset.get(key))) + list(filterfalse(lambda j: j in superset.get(key), value)) - if diff: - return False - elif isinstance(value, set): - if not value <= superset.get(key): - return False - else: - if not value == superset.get(key): - return False + if not issubset(value, superset_value): + return False return True @@ -212,7 +185,7 @@ def __init__(self, module): self.previous = dict() self.proposed = dict() self.sent = dict() - self.stdout = None + self.stdout = "" # debug output self.has_modified = False @@ -266,8 +239,13 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: +<<<<<<< HEAD if data: info = self.connection.send_request(method, uri, json.dumps(data)) +======= + if data is not None: + info = conn.send_request(method, uri, json.dumps(data)) +>>>>>>> 7c967c3 ([minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher.) else: info = self.connection.send_request(method, uri) self.result["data"] = data @@ -324,6 +302,8 @@ def request( self.fail_json(msg="ND Error: {0}".format(self.error.get("message")), data=data, info=info) self.error = payload if "code" in payload: + if self.status == 404 and ignore_not_found_error: + return {} self.fail_json(msg="ND Error {code}: {message}".format(**payload), data=data, info=info, payload=payload) elif "messages" in payload and len(payload.get("messages")) > 0: self.fail_json(msg="ND Error {code} ({severity}): {message}".format(**payload["messages"][0]), data=data, info=info, payload=payload) @@ -520,30 +500,27 @@ def get_diff(self, unwanted=None): if not self.existing and self.sent: return True - existing = self.existing - sent = self.sent + existing = deepcopy(self.existing) + sent = deepcopy(self.sent) for key in unwanted: if isinstance(key, str): if key in existing: - try: - del existing[key] - except KeyError: - pass - try: - del sent[key] - except KeyError: - pass + del existing[key] + if key in sent: + del sent[key] elif isinstance(key, list): key_path, last = key[:-1], key[-1] try: existing_parent = reduce(dict.get, key_path, existing) - del existing_parent[last] + if existing_parent is not None: + del existing_parent[last] except KeyError: pass try: sent_parent = reduce(dict.get, key_path, sent) - del sent_parent[last] + if sent_parent is not None: + del sent_parent[last] except KeyError: pass return not issubset(sent, existing) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py new file mode 100644 index 00000000..1cf86756 --- /dev/null +++ b/plugins/module_utils/nd_config_collection.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import sys +from copy import deepcopy +from functools import reduce + +# Python 2 and 3 compatibility (To be removed in the future) +if sys.version_info[0] >= 3: + from collections.abc import MutableMapping + iteritems = lambda d: d.items() +else: + from collections import MutableMapping + iteritems = lambda d: d.iteritems() + +# NOTE: Single-Index Hybrid Collection for ND Network Resource Module +class NDConfigCollection(MutableMapping): + + def __init__(self, identifier_keys, data=None, use_composite_keys=False): + self.identifier_keys = identifier_keys + self.use_composite_keys = use_composite_keys + + # Dual Storage + self._list = [] + self._map = {} + + if data: + for item in data: + self.add(item) + + # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") + def _get_identifier_value(self, config): + """Generates the internal map key based on the selected mode.""" + if self.use_composite_keys: + # Mode: Composite (Tuple of ALL keys) + values = [] + for key in self.identifier_keys: + val = config.get(key) + if val is None: + return None # Missing a required part + values.append(val) + return tuple(values) + else: + # Mode: Priority (First available key) + for key in self.identifier_keys: + if key in config: + return config[key] + return None + + # Magic Methods + def __getitem__(self, key): + return self._map[key] + + def __setitem__(self, key, value): + if key in self._map: + old_ref = self._map[key] + try: + idx = self._list.index(old_ref) + self._list[idx] = value + self._map[key] = value + except ValueError: + pass + else: + # Add new + self._list.append(value) + self._map[key] = value + + def __delitem__(self, key): + if key in self._map: + obj_ref = self._map[key] + del self._map[key] + self._list.remove(obj_ref) + else: + raise KeyError(key) + + def __iter__(self): + return iter(self._map) + + def __len__(self): + return len(self._list) + + def __eq__(self, other): + if isinstance(other, NDConfigCollection): + return self._list == other._list + elif isinstance(other, list): + return self._list == other + elif isinstance(other, dict): + return self._map == other + return False + + def __ne__(self, other): + return not self.__eq__(other) + + def __repr__(self): + return str(self._list) + + # Helper Methods + def _filter_dict(self, data, ignore_keys): + return {k: v for k, v in iteritems(data) if k not in ignore_keys} + + def _issubset(self, subset, superset): + if type(subset) is not type(superset): + return False + + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + + for key, value in iteritems(subset): + if value is None: + continue + + if key not in superset: + return False + + superset_value = superset.get(key) + + if not self._issubset(value, superset_value): + return False + return True + + def _remove_unwanted_keys(self, data, unwanted_keys): + for key in unwanted_keys: + if isinstance(key, str): + if key in data: + del data[key] + elif isinstance(key, list) and len(key) > 0: + key_path, last = key[:-1], key[-1] + try: + parent = reduce(lambda d, k: d.get(k) if isinstance(d, dict) else None, key_path, data) + if isinstance(parent, dict) and last in parent: + del parent[last] + except (KeyError, TypeError): + pass + return data + + # Core Operations + def to_list(self): + return self._list + + def to_dict(self): + return self._map + + def copy(self): + return NDConfigCollection(self.identifier_keys, deepcopy(self._list), self.use_composite_keys) + + def add(self, config): + ident = self._get_identifier_value(config) + if ident is None: + mode = "Composite" if self.use_composite_keys else "Priority" + raise ValueError("[{0} Mode] Config missing required keys: {1}".format(mode, self.identifier_keys)) + + if ident in self._map: + self.__setitem__(ident, config) + else: + self._list.append(config) + self._map[ident] = config + + def merge(self, new_config): + ident = self._get_identifier_value(new_config) + if ident and ident in self._map: + self._map[ident].update(new_config) + else: + self.add(new_config) + + def replace(self, new_config): + ident = self._get_identifier_value(new_config) + if ident: + self[ident] = new_config + else: + self.add(new_config) + + def remove(self, identifiers): + # Try Map Removal + try: + target_key = self._get_identifier_value(identifiers) + if target_key and target_key in self._map: + self.__delitem__(target_key) + return + except Exception: + pass + + # Fallback: Linear Removal + to_remove = [] + for config in self._list: + match = True + for k, v in iteritems(identifiers): + if config.get(k) != v: + match = False + break + if match: + to_remove.append(self._get_identifier_value(config)) + + for ident in to_remove: + if ident in self._map: + self.__delitem__(ident) + + def get_by_key(self, key, default=None): + return self._map.get(key, default) + + def get_by_idenfiers(self, identifiers, default=None): + # Try Map Lookup + target_key = self._get_identifier_value(identifiers) + if target_key and target_key in self._map: + return self._map[target_key] + + # Fallback: Linear Lookup + valid_search_keys = [k for k in identifiers if k in self.identifier_keys] + if not valid_search_keys: + return default + + for config in self._list: + match = True + for k in valid_search_keys: + if config.get(k) != identifiers[k]: + match = False + break + if match: + return config + return default + + # Diff logic + def get_diff_config(self, new_config, unwanted_keys=None): + unwanted_keys = unwanted_keys or [] + + ident = self._get_identifier_value(new_config) + + if not ident or ident not in self._map: + return "new" + + existing = deepcopy(self._map[ident]) + sent = deepcopy(new_config) + + self._remove_unwanted_keys(existing, unwanted_keys) + self._remove_unwanted_keys(sent, unwanted_keys) + + is_subset = self._issubset(sent, existing) + + if is_subset: + return "no_diff" + else: + return "changed" + + def get_diff_collection(self, new_collection, unwanted_keys=None): + if not isinstance(new_collection, NDConfigCollection): + raise TypeError("Argument must be an NDConfigCollection") + + if len(self) != len(new_collection): + return True + + for item in new_collection.to_list(): + if self.get_diff_config(item, unwanted_keys) != "no_diff": + return True + + for ident in self._map: + if ident not in new_collection._map: + return True + + return False + + def get_diff_identifiers(self, new_collection): + current_identifiers = set(self.config_collection.keys()) + other_identifiers = set(new_collection.config_collection.keys()) + + return list(current_identifiers - other_identifiers) + + # Sanitize Operations + def sanitize(self, keys_to_remove=None, values_to_remove=None, remove_none_values=False): + keys_to_remove = keys_to_remove or [] + values_to_remove = values_to_remove or [] + + def recursive_clean(obj): + if isinstance(obj, dict): + keys = list(obj.keys()) + for k in keys: + v = obj[k] + if k in keys_to_remove or v in values_to_remove or (remove_none_values and v is None): + del obj[k] + continue + if isinstance(v, (dict, list)): + recursive_clean(v) + elif isinstance(obj, list): + for item in obj: + recursive_clean(item) + + for item in self._list: + recursive_clean(item) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py new file mode 100644 index 00000000..b73b24e7 --- /dev/null +++ b/plugins/module_utils/nd_network_resources.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from copy import deepcopy +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED + +# TODO: Make further enhancement to logs and outputs +# NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later +class NDNetworkResourceModule(NDModule): + + def __init__(self, module, path, identifier_keys, use_composite_keys=False, actions_overwrite_map=None): + super().__init__(module) + + # Initial variables + self.path = path + self.actions_overwrite_map = actions_overwrite_map or {} + self.identifier_keys = identifier_keys + self.use_composite_keys = use_composite_keys + + # Initial data + self.init_all_data = self._query_all() + + # Info ouput + self.existing = NDConfigCollection(identifier_keys, data=self.init_all_data) + self.previous = NDConfigCollection(identifier_keys) + self.proposed = NDConfigCollection(identifier_keys) + self.sent = NDConfigCollection(identifier_keys) + + # Debug output + self.nd_logs = [] + + # Helper variables + self.current_identifier = "" + self.existing_config = {} + self.proposed_config = {} + + # Actions Operations + def actions_overwrite(action): + def decorator(func): + def wrapper(self, *args, **kwargs): + overwrite_action = self.actions_overwrite_map.get(action) + if callable(overwrite_action): + return overwrite_action(self) + else: + return func(self, *args, **kwargs) + return wrapper + return decorator + + @actions_overwrite("create") + def _create(self): + if not self.module.check_mode: + return self.request(path=self.path, method="POST", data=self.proposed_config) + + @actions_overwrite("update") + def _update(self): + if not self.module.check_mode: + object_path = "{0}/{1}".format(self.path, self.current_identifier) + return self.request(path=object_path, method="PUT", data=self.proposed_config) + + @actions_overwrite("delete") + def _delete(self): + if not self.module.check_mode: + object_path = "{0}/{1}".format(self.path, self.current_identifier) + self.request(path=object_path, method="DELETE") + + @actions_overwrite("query_all") + def _query_all(self): + return self.query_obj(self.path) + + def format_log(self, identifier, status, after_data, sent_payload_data=None): + item_result = { + "identifier": identifier, + "status": status, + "before": self.existing_config, + "after": deepcopy(after_data) if after_data is not None else self.existing_config, + "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {}, + } + + if not self.module.check_mode and self.url is not None: + item_result.update( + { + "method": self.method, + "response": self.response, + "status": self.status, + "url": self.url, + } + ) + + self.nd_logs.append(item_result) + + # Logs and Outputs formating Operations + def add_logs_and_ouputs(self): + if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + if self.params.get("output_level") in ("debug", "info"): + self.result["previous"] = self.previous.to_list() + if not self.has_modified and self.previous.get_diff_collection(self.existing): + self.result["changed"] = True + if self.stdout: + self.result["stdout"] = self.stdout + + if self.params.get("output_level") == "debug": + self.result["nd_logs"] = self.nd_logs + if self.url is not None: + self.result["httpapi_logs"] = self.httpapi_logs + + if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + self.result["sent"] = self.sent.to_list() + self.result["proposed"] = self.proposed.to_list() + + self.result["current"] = self.existing.to_list() + + # Manage State Operations + def manage_state(self, state, new_configs, unwanted_keys=None, override_exceptions=None): + unwanted_keys = unwanted_keys or [] + override_exceptions = override_exceptions or [] + + self.proposed = NDConfigCollection(self.identifier_keys, data=new_configs) + self.proposed.sanitize() + self.previous = self.existing.copy() + + if state in ["merged", "replaced", "overidden"]: + for identifier, config in self.proposed.items(): + + diff_config_info = self.existing.get_diff_config(config, unwanted_keys) + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self.proposed_config = config + request_response = None + sent_payload = None + status = "no_change" + + if diff_config_info != "no_diff": + if state == "merged": + self.existing.merge(config) + self.proposed_config = self.existing[identifier] + else: + self.existing.replace(config) + + if diff_config_info == "changed": + request_response = self._update() + status = "updated" + else: + request_response = self._create() + status= "created" + + if not self.module.check_mode: + self.sent.add(self.proposed_config) + sent_payload = self.proposed_config + else: + request_response = self.proposed_config + + self.format_log(identifier, status, request_response, sent_payload) + + + if state == "overidden": + diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + for identifier in diff_identifiers: + if identifier not in override_exceptions: + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self._delete() + del self.existing[identifier] + self.format_log(identifier, "deleted", after_data={}) + + + elif state == "deleted": + for identifier, config in self.proposed.items(): + if identifier in self.existing.keys(): + self.current_identifier = identifier + self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) + self.proposed_config = config + self._delete() + del self.existing[identifier] + self.format_log(identifier, "deleted", after_data={}) + + # Outputs Operations + def fail_json(self, msg, **kwargs): + self.add_logs_and_ouputs() + + self.result.update(**kwargs) + self.module.fail_json(msg=msg, **self.result) + + def exit_json(self, **kwargs): + self.add_logs_and_ouputs() + + if self.module._diff and self.result.get("changed") is True: + self.result["diff"] = dict( + before=self.previous.to_list(), + after=self.existing.to_list(), + ) + + self.result.update(**kwargs) + self.module.exit_json(**self.result) diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py new file mode 100644 index 00000000..5bf0a0f0 --- /dev/null +++ b/plugins/module_utils/utils.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from copy import deepcopy + + +def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): + if keys is None: + keys = [] + if values is None: + values = [] + + result = deepcopy(dict_to_sanitize) + for k, v in dict_to_sanitize.items(): + if k in keys: + del result[k] + elif v in values or (v is None and remove_none_values): + del result[k] + elif isinstance(v, dict) and recursive: + result[k] = sanitize_dict(v, keys, values) + elif isinstance(v, list) and recursive: + for index, item in enumerate(v): + if isinstance(item, dict): + result[k][index] = sanitize_dict(item, keys, values) + return result \ No newline at end of file diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py new file mode 100644 index 00000000..552df3b7 --- /dev/null +++ b/plugins/modules/nd_local_user.py @@ -0,0 +1,269 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"} + +DOCUMENTATION = r""" +--- +module: nd_local_user +version_added: "1.4.0" +short_description: Manage local users on Cisco Nexus Dashboard +description: +- Manage local users on Cisco Nexus Dashboard (ND). +- It supports creating, updating, querying, and deleting local users. +author: +- Gaspard Micol (@gmicol) +options: + config: + description: + - The list of the local users to configure. + type: list + elements: dict + suboptions: + email: + description: + - The email address of the local user. + type: str + login_id: + description: + - The login ID of the local user. + - The O(config.login_id) must be defined when creating, updating or deleting a local user. + type: str + required: true + first_name: + description: + - The first name of the local user. + type: str + last_name: + description: + - The last name of the local user. + type: str + user_password: + description: + - The password of the local user. + - Password must have a minimum of 8 characters to a maximum of 64 characters. + - Password must have three of the following; one number, one lower case character, one upper case character, one special character. + - The O(config.user_password) must be defined when creating a new local_user. + type: str + reuse_limitation: + description: + - The number of different passwords a user must use before they can reuse a previous one. + - It defaults to C(0) when unset during creation. + type: int + time_interval_limitation: + description: + - The minimum time period that must pass before a previous password can be reused. + - It defaults to C(0) when unset during creation. + type: int + security_domains: + description: + - The list of Security Domains and Roles for the local user. + - At least, one Security Domain must be defined when creating a new local user. + type: list + elements: dict + suboptions: + name: + description: + - The name of the Security Domain to which the local user is given access. + type: str + required: true + aliases: [ security_domain_name, domain_name ] + roles: + description: + - The Permission Roles of the local user within the Security Domain. + type: list + elements: str + choices: [ fabric_admin, observer, super_admin, support_engineer, approver, designer ] + aliases: [ domains ] + remote_id_claim: + description: + - The remote ID claim of the local user. + type: str + remote_user_authorization: + description: + - To enable/disable the Remote User Authorization of the local user. + - Remote User Authorization is used for signing into Nexus Dashboard when using identity providers that cannot provide authorization claims. + Once this attribute is enabled, the local user ID cannot be used to directly login to Nexus Dashboard. + - It defaults to C(false) when unset during creation. + type: bool + state: + description: + - The desired state of the network resources on the Cisco Nexus Dashboard. + - Use O(state=merged) to create new resources and updates existing ones as defined in your configuration. + Resources on ND that are not specified in the configuration will be left unchanged. + - Use O(state=replaced) to replace the resources specified in the configuration. + - Use O(state=overridden) to enforce the configuration as the single source of truth. + The resources on ND will be modified to exactly match the configuration. + Any resource existing on ND but not present in the configuration will be deleted. Use with extra caution. + - Use O(state=deleted) to remove the resources specified in the configuration from the Cisco Nexus Dashboard. + type: str + default: merged + choices: [ merged, replaced, overridden, deleted ] +extends_documentation_fragment: +- cisco.nd.modules +- cisco.nd.check_mode +notes: +- This module is only supported on Nexus Dashboard having version 4.1.0 or higher. +- This module is not idempotent when creating or updating a local user object when O(config.user_password) is used. +""" + +EXAMPLES = r""" +- name: Create a new local user + cisco.nd.nd_local_user: + config: + - email: user@example.com + login_id: local_user + first_name: User first name + last_name: User last name + user_password: localUserPassword1% + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + name: all + roles: + - observer + - support_engineer + remote_id_claim: remote_user + remote_user_authorization: true + state: merged + register: result + +- name: Create local user with minimal configuration + cisco.nd.nd_local_user: + config: + - login_id: local_user_min + user_password: localUserMinuser_password + security_domain: all + state: merged + +- name: Update local user + cisco.nd.nd_local_user: + config: + - email: udpateduser@example.com + login_id: local_user + first_name: Updated user first name + last_name: Updated user last name + user_password: updatedLocalUserPassword1% + reuse_limitation: 25 + time_interval_limitation: 15 + security_domains: + - name: all + roles: super_admin + - name: ansible_domain + roles: observer + roles: super_admin + remote_id_claim: "" + remote_user_authorization: false + state: replaced + +- name: Delete a local user + cisco.nd.nd_local_user: + config: + - login_id: local_user + state: deleted +""" + +RETURN = r""" +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec, NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resources import NDNetworkResourceModule +from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING + + +# Actions overwrite functions +def quey_all_local_users(nd): + return nd.query_obj(nd.path).get("localusers") + + +def main(): + argument_spec = nd_argument_spec() + argument_spec.update( + config=dict( + type="list", + elements="dict", + options=dict( + email=dict(type="str"), + login_id=dict(type="str", required=True), + first_name=dict(type="str"), + last_name=dict(type="str"), + user_password=dict(type="str", no_log=True), + reuse_limitation=dict(type="int"), + time_interval_limitation=dict(type="int"), + security_domains=dict( + type="list", + elements="dict", + options=dict( + name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), + roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + ), + aliases=["domains"], + ), + remote_id_claim=dict(type="str"), + remote_user_authorization=dict(type="bool"), + ), + ), + override_exceptions=dict(type="list", elements="str"), + state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), + ) + + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + ) + + path = "/api/v1/infra/aaa/localUsers" + identifier_keys = ["loginID"] + actions_overwrite_map = {"query_all": quey_all_local_users} + + nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) + + state = nd.params.get("state") + config = nd.params.get("config") + override_exceptions = nd.params.get("override_exceptions") + new_config = [] + for object in config: + payload = { + "email": object.get("email"), + "firstName": object.get("first_name"), + "lastName": object.get("last_name"), + "loginID": object.get("login_id"), + "password": object.get("user_password"), + "remoteIDClaim": object.get("remote_id_claim"), + "xLaunch": object.get("remote_user_authorization"), + } + + if object.get("security_domains"): + payload["rbac"] = { + "domains": { + security_domain.get("name"): { + "roles": ( + [USER_ROLES_MAPPING.get(role) for role in security_domain["roles"]] if isinstance(security_domain.get("roles"), list) else [] + ) + } + for security_domain in object["security_domains"] + }, + } + if object.get("reuse_limitation") or object.get("time_interval_limitation"): + payload["passwordPolicy"] = { + "reuseLimitation": object.get("reuse_limitation"), + "timeIntervalLimitation": object.get("time_interval_limitation"), + } + new_config.append(payload) + + nd.manage_state(state=state, new_configs=new_config, unwanted_keys=[["passwordPolicy", "passwordChangeTime"], ["userID"]], override_exceptions=override_exceptions) + + nd.exit_json() + + +if __name__ == "__main__": + main() diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml new file mode 100644 index 00000000..77e55cd1 --- /dev/null +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -0,0 +1,134 @@ +# Test code for the ND modules +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +- name: Test that we have a Nexus Dashboard host, username and password + ansible.builtin.fail: + msg: 'Please define the following variables: ansible_host, ansible_user and ansible_password.' + when: ansible_host is not defined or ansible_user is not defined or ansible_password is not defined + +- name: Set vars + ansible.builtin.set_fact: + nd_info: &nd_info + output_level: '{{ api_key_output_level | default("debug") }}' + +- name: Ensure local users do not exist before test starts + cisco.nd.nd_local_user: + <<: *nd_info + config: + - login_id: ansible_local_user + - login_id: ansible_local_user_2 + state: deleted + +# CREATE +- name: Create local users with full and minimum configuration (check mode) + cisco.nd.nd_local_user: &create_local_user + <<: *nd_info + config: + - email: ansibleuser@example.com + login_id: ansible_local_user + first_name: Ansible first name + last_name: Ansible last name + user_password: ansibleLocalUserPassword1%Test + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + - name: all + roles: + - observer + - support_engineer + remote_id_claim: ansible_remote_user + remote_user_authorization: true + - login_id: ansible_local_user_2 + user_password: ansibleLocalUser2Password1%Test + security_domains: + - name: all + state: merged + check_mode: true + register: cm_create_local_user + +- name: Create local users with full and minimum configuration (normal mode) + cisco.nd.nd_local_user: + <<: *create_local_user + register: nm_create_local_user + +# UPDATE +- name: Update all ansible_local_user's attributes (check mode) + cisco.nd.nd_local_user: &update_first_local_user + <<: *nd_info + config: + - email: updatedansibleuser@example.com + login_id: ansible_local_user + first_name: Updated Ansible first name + last_name: Updated Ansible last name + user_password: updatedAnsibleLocalUserPassword1% + reuse_limitation: 25 + time_interval_limitation: 15 + security_domains: + - name: all + roles: super_admin + remote_id_claim: "" + remote_user_authorization: false + state: replaced + check_mode: true + register: cm_update_local_user + +- name: Update local user (normal mode) + cisco.nd.nd_local_user: + <<: *update_first_local_user + register: nm_update_local_user + +- name: Update all ansible_local_user_2's attributes except password + cisco.nd.nd_local_user: &update_second_local_user + <<: *nd_info + config: + - email: secondansibleuser@example.com + login_id: ansible_local_user_2 + first_name: Second Ansible first name + last_name: Second Ansible last name + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + - name: all + roles: fabric_admin + remote_id_claim: ansible_remote_user_2 + remote_user_authorization: true + state: merged + register: nm_update_local_user_2 + +- name: Update all ansible_local_user_2's attributes except password again (idempotency) + cisco.nd.nd_local_user: + <<: *update_second_local_user + register: nm_update_local_user_2_again + + +# DELETE +- name: Delete local user by name (check mode) + cisco.nd.nd_local_user: &delete_local_user + <<: *nd_info + config: + - login_id: ansible_local_user + state: deleted + check_mode: true + register: cm_delete_local_user + +- name: Delete local user by name (normal mode) + cisco.nd.nd_local_user: + <<: *delete_local_user + register: nm_delete_local_user + +- name: Delete local user again (idempotency test) + cisco.nd.nd_local_user: + <<: *delete_local_user + register: nm_delete_local_user_again + + +# CLEAN UP +- name: Ensure local users do not exist + cisco.nd.nd_local_user: + <<: *nd_info + config: + - login_id: ansible_local_user + - login_id: ansible_local_user_2 + state: deleted From 576552a8e3205df8f5d706f73dc1121093198573 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 15 Jan 2026 11:47:32 -0500 Subject: [PATCH 074/131] [ignore] First Pydantic implementation: Add Pydantic Models for nd_local_user. --- .../module_utils/models/local_user_model.py | 142 ++++++++++++++++++ plugins/module_utils/nd_config_collection.py | 1 + plugins/module_utils/nd_network_resources.py | 2 + plugins/modules/nd_local_user.py | 5 +- 4 files changed, 148 insertions(+), 2 deletions(-) create mode 100644 plugins/module_utils/models/local_user_model.py diff --git a/plugins/module_utils/models/local_user_model.py b/plugins/module_utils/models/local_user_model.py new file mode 100644 index 00000000..f8de1f46 --- /dev/null +++ b/plugins/module_utils/models/local_user_model.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import json +from typing import List, Dict, Any, Optional +from pydantic import BaseModel, ConfigDict, Field, field_validator + +# TODO: Add Field validation methods +# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel +# TODO: Maybe define our own baseModel +# TODO: Look at ansible aliases +from pydantic import BaseModel, Field, ConfigDict +from typing import List, Dict, Any, Optional + +class SecurityDomainModel(BaseModel): + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + name: str = Field(alias="name") + roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") + + +class LocalUserModel(BaseModel): + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + email: str = Field(default="", alias="email") + login_id: str = Field(alias="loginID") + first_name: str = Field(default="", alias="firstName") + last_name: str = Field(default="", alias="lastName") + user_password: str = Field(alias="password") + reuse_limitation: int = Field(default=0, alias="reuseLimitation") + time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") + security_domains: List[SecurityDomainModel] = Field(default_factory=list, alias="domains") + remote_id_claim: str = Field(default="", alias="remoteIDClaim") + remote_user_authorization: bool = Field(default=False, alias="xLaunch") + + def to_api_payload(self, user_roles_mapping: Dict[str, str] = None) -> Dict[str, Any]: + """Convert the model to the specific API payload format required.""" + if user_roles_mapping is None: + user_roles_mapping = {} + + base_data = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + + payload = { + "email": base_data.get("email"), + "firstName": base_data.get("firstName"), + "lastName": base_data.get("lastName"), + "loginID": base_data.get("loginID"), + "password": base_data.get("password"), + "remoteIDClaim": base_data.get("remoteIDClaim"), + "xLaunch": base_data.get("xLaunch"), + } + + if self.security_domains: + payload["rbac"] = { + "domains": { + domain.name: { + "roles": [ + user_roles_mapping.get(role, role) for role in domain.roles + ] + } + for domain in self.security_domains + } + } + + if self.reuse_limitation or self.time_interval_limitation: + payload["passwordPolicy"] = { + "reuseLimitation": self.reuse_limitation, + "timeIntervalLimitation": self.time_interval_limitation, + } + + return payload + + @classmethod + def from_api_payload( + cls, + payload: Dict[str, Any], + reverse_user_roles_mapping: Optional[Dict[str, str]] = None + ) -> 'LocalUserModel': + + if reverse_user_roles_mapping is None: + reverse_user_roles_mapping = {} + + user_data = { + "email": payload.get("email", ""), + "loginID": payload.get("loginID", ""), + "firstName": payload.get("firstName", ""), + "lastName": payload.get("lastName", ""), + "password": payload.get("password", ""), + "remoteIDClaim": payload.get("remoteIDClaim", ""), + "xLaunch": payload.get("xLaunch", False), + } + + password_policy = payload.get("passwordPolicy", {}) + user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) + user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) + + domains_data = [] + rbac = payload.get("rbac", {}) + if rbac and "domains" in rbac: + for domain_name, domain_config in rbac["domains"].items(): + # Map API roles back to internal roles + api_roles = domain_config.get("roles", []) + internal_roles = [ + reverse_user_roles_mapping.get(role, role) for role in api_roles + ] + + domain_data = { + "name": domain_name, + "roles": internal_roles + } + domains_data.append(domain_data) + + user_data["domains"] = domains_data + + return cls(**user_data) + + # @classmethod + # def from_api_payload_json( + # cls, + # json_payload: str, + # reverse_user_roles_mapping: Optional[Dict[str, str]] = None + # ) -> 'LocalUserModel': + + # payload = json.loads(json_payload) + # return cls.from_api_payload(payload, reverse_user_roles_mapping) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 1cf86756..8f0058bb 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -20,6 +20,7 @@ from collections import MutableMapping iteritems = lambda d: d.iteritems() +# TODO: Adapt to Pydantic Models # NOTE: Single-Index Hybrid Collection for ND Network Resource Module class NDConfigCollection(MutableMapping): diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index b73b24e7..3b549da1 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -14,6 +14,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED # TODO: Make further enhancement to logs and outputs +# TODO: Adapt to Pydantic Models # NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later class NDNetworkResourceModule(NDModule): @@ -98,6 +99,7 @@ def format_log(self, identifier, status, after_data, sent_payload_data=None): self.nd_logs.append(item_result) # Logs and Outputs formating Operations + # TODO: Move it to different file def add_logs_and_ouputs(self): if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if self.params.get("output_level") in ("debug", "info"): diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 552df3b7..4a5f1ad2 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -181,10 +181,11 @@ # Actions overwrite functions -def quey_all_local_users(nd): +def query_all_local_users(nd): return nd.query_obj(nd.path).get("localusers") +# TODO: Adapt to Pydantic Model def main(): argument_spec = nd_argument_spec() argument_spec.update( @@ -223,7 +224,7 @@ def main(): path = "/api/v1/infra/aaa/localUsers" identifier_keys = ["loginID"] - actions_overwrite_map = {"query_all": quey_all_local_users} + actions_overwrite_map = {"query_all": query_all_local_users} nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) From 39d21334c32fc07aa8a49067ecb106443d0d9fe2 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 20 Jan 2026 13:17:35 -0500 Subject: [PATCH 075/131] [ignore] Second Pydantic Implementation: Create a NDBaseModel to be inherited from future class models. Modify class models for local_user. --- plugins/module_utils/models/base.py | 57 +++++++ plugins/module_utils/models/local_user.py | 116 ++++++++++++++ .../module_utils/models/local_user_model.py | 142 ------------------ 3 files changed, 173 insertions(+), 142 deletions(-) create mode 100644 plugins/module_utils/models/base.py create mode 100644 plugins/module_utils/models/local_user.py delete mode 100644 plugins/module_utils/models/local_user_model.py diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py new file mode 100644 index 00000000..e7301d14 --- /dev/null +++ b/plugins/module_utils/models/base.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from abc import ABC, abstractmethod +from pydantic import BaseModel, ConfigDict +from typing import List, Dict, Any, Optional, ClassVar + + +class NDBaseModel(BaseModel, ABC): + + model_config = ConfigDict( + str_strip_whitespace=True, + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + ) + + # TODO: find ways to redifine these var in every + identifiers: ClassVar[List[str]] = [] + use_composite_identifiers: ClassVar[bool] = False + + @abstractmethod + def to_payload(self) -> Dict[str, Any]: + pass + + @classmethod + @abstractmethod + def from_response(cls, response: Dict[str, Any]) -> 'NDBaseModel': + pass + + # TODO: Modify to make it more generic and Pydantic + # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") + def get_identifier_value(self) -> Any: + """Generates the internal map key based on the selected mode.""" + # if self.use_composite_keys: + # # Mode: Composite (Tuple of ALL keys) + # values = [] + # for key in self.identifier_keys: + # val = config.get(key) + # if val is None: + # return None # Missing a required part + # values.append(val) + # return tuple(values) + # else: + # # Mode: Priority (First available key) + # for key in self.identifier_keys: + # if key in config: + # return config[key] + # return None + pass diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py new file mode 100644 index 00000000..7877a5a5 --- /dev/null +++ b/plugins/module_utils/models/local_user.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from pydantic import Field, field_validator +from types import MappingProxyType +from typing import List, Dict, Any, Optional, ClassVar + +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel + +# TODO: Add Field validation methods +# TODO: define our own Field class for string versioning, ansible aliases +# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel +# TODO: Surclass BaseModel -> Priority +# TODO: Look at ansible aliases + +# TODO: use constants.py file in the future +user_roles_mapping = MappingProxyType({}) + + +class LocalUserSecurityDomainModel(NDBaseModel): + + name: str = Field(alias="name") + roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") + + def to_payload(self) -> Dict[str, Any]: + return { + self.name: { + "roles": [ + user_roles_mapping.get(role, role) for role in self.roles + ] + } + } + + @classmethod + def from_response(cls, name: str, domain_config: List[str]) -> 'NDBaseModel': + internal_roles = [user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + + domain_data = { + "name": name, + "roles": internal_roles + } + + return cls(**domain_data) + + +class LocalUserModel(NDBaseModel): + + # TODO: Define a way to generate it (look at NDBaseModel comments) + identifiers: ClassVar[List[str]] = ["login_id"] + + # TODO: Use Optinal to remove default values (get them from API response instead) + email: str = Field(default="", alias="email") + login_id: str = Field(alias="loginID") + first_name: str = Field(default="", alias="firstName") + last_name: str = Field(default="", alias="lastName") + user_password: str = Field(alias="password") + reuse_limitation: int = Field(default=0, alias="reuseLimitation") + time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") + security_domains: List[LocalUserSecurityDomainModel] = Field(default_factory=list, alias="domains") + remote_id_claim: str = Field(default="", alias="remoteIDClaim") + remote_user_authorization: bool = Field(default=False, alias="xLaunch") + + def to_payload(self) -> Dict[str, Any]: + """Convert the model to the specific API payload format required.""" + + payload = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + + if self.security_domains: + payload["rbac"] = {"domains": {}} + for domain in self.security_domains: + payload["rbac"]["domains"].update(domain.to_api_payload()) + + if self.reuse_limitation or self.time_interval_limitation: + payload["passwordPolicy"] = { + "reuseLimitation": self.reuse_limitation, + "timeIntervalLimitation": self.time_interval_limitation, + } + + return payload + + @classmethod + def from_response(cls, payload: Dict[str, Any]) -> 'LocalUserModel': + + if reverse_user_roles_mapping is None: + reverse_user_roles_mapping = {} + + user_data = { + "email": payload.get("email"), + "loginID": payload.get("loginID"), + "firstName": payload.get("firstName"), + "lastName": payload.get("lastName"), + "password": payload.get("password"), + "remoteIDClaim": payload.get("remoteIDClaim"), + "xLaunch": payload.get("xLaunch"), + } + + password_policy = payload.get("passwordPolicy", {}) + user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) + user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) + + domains_data = [] + rbac = payload.get("rbac", {}) + if rbac and "domains" in rbac: + for domain_name, domain_config in rbac["domains"].items(): + domains_data.append(LocalUserSecurityDomainModel.from_api_response(domain_name, domain_config)) + + user_data["domains"] = domains_data + + return cls(**user_data) diff --git a/plugins/module_utils/models/local_user_model.py b/plugins/module_utils/models/local_user_model.py deleted file mode 100644 index f8de1f46..00000000 --- a/plugins/module_utils/models/local_user_model.py +++ /dev/null @@ -1,142 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2025, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -import json -from typing import List, Dict, Any, Optional -from pydantic import BaseModel, ConfigDict, Field, field_validator - -# TODO: Add Field validation methods -# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel -# TODO: Maybe define our own baseModel -# TODO: Look at ansible aliases -from pydantic import BaseModel, Field, ConfigDict -from typing import List, Dict, Any, Optional - -class SecurityDomainModel(BaseModel): - model_config = ConfigDict( - str_strip_whitespace=True, - use_enum_values=True, - validate_assignment=True, - populate_by_name=True, - ) - - name: str = Field(alias="name") - roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") - - -class LocalUserModel(BaseModel): - model_config = ConfigDict( - str_strip_whitespace=True, - use_enum_values=True, - validate_assignment=True, - populate_by_name=True, - ) - - email: str = Field(default="", alias="email") - login_id: str = Field(alias="loginID") - first_name: str = Field(default="", alias="firstName") - last_name: str = Field(default="", alias="lastName") - user_password: str = Field(alias="password") - reuse_limitation: int = Field(default=0, alias="reuseLimitation") - time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") - security_domains: List[SecurityDomainModel] = Field(default_factory=list, alias="domains") - remote_id_claim: str = Field(default="", alias="remoteIDClaim") - remote_user_authorization: bool = Field(default=False, alias="xLaunch") - - def to_api_payload(self, user_roles_mapping: Dict[str, str] = None) -> Dict[str, Any]: - """Convert the model to the specific API payload format required.""" - if user_roles_mapping is None: - user_roles_mapping = {} - - base_data = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) - - payload = { - "email": base_data.get("email"), - "firstName": base_data.get("firstName"), - "lastName": base_data.get("lastName"), - "loginID": base_data.get("loginID"), - "password": base_data.get("password"), - "remoteIDClaim": base_data.get("remoteIDClaim"), - "xLaunch": base_data.get("xLaunch"), - } - - if self.security_domains: - payload["rbac"] = { - "domains": { - domain.name: { - "roles": [ - user_roles_mapping.get(role, role) for role in domain.roles - ] - } - for domain in self.security_domains - } - } - - if self.reuse_limitation or self.time_interval_limitation: - payload["passwordPolicy"] = { - "reuseLimitation": self.reuse_limitation, - "timeIntervalLimitation": self.time_interval_limitation, - } - - return payload - - @classmethod - def from_api_payload( - cls, - payload: Dict[str, Any], - reverse_user_roles_mapping: Optional[Dict[str, str]] = None - ) -> 'LocalUserModel': - - if reverse_user_roles_mapping is None: - reverse_user_roles_mapping = {} - - user_data = { - "email": payload.get("email", ""), - "loginID": payload.get("loginID", ""), - "firstName": payload.get("firstName", ""), - "lastName": payload.get("lastName", ""), - "password": payload.get("password", ""), - "remoteIDClaim": payload.get("remoteIDClaim", ""), - "xLaunch": payload.get("xLaunch", False), - } - - password_policy = payload.get("passwordPolicy", {}) - user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) - user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) - - domains_data = [] - rbac = payload.get("rbac", {}) - if rbac and "domains" in rbac: - for domain_name, domain_config in rbac["domains"].items(): - # Map API roles back to internal roles - api_roles = domain_config.get("roles", []) - internal_roles = [ - reverse_user_roles_mapping.get(role, role) for role in api_roles - ] - - domain_data = { - "name": domain_name, - "roles": internal_roles - } - domains_data.append(domain_data) - - user_data["domains"] = domains_data - - return cls(**user_data) - - # @classmethod - # def from_api_payload_json( - # cls, - # json_payload: str, - # reverse_user_roles_mapping: Optional[Dict[str, str]] = None - # ) -> 'LocalUserModel': - - # payload = json.loads(json_payload) - # return cls.from_api_payload(payload, reverse_user_roles_mapping) From 10d1a3bf0b78271fc2cc6c7f6ca73d256daf3c1a Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 22 Jan 2026 01:04:05 -0500 Subject: [PATCH 076/131] [ignore] Pydantic Models: Modify and Clean both local_user.py and base.py based on comments. Add a get method and get_identifier_value function to NDBaseModel. --- plugins/module_utils/models/base.py | 43 ++++++------ plugins/module_utils/models/local_user.py | 82 ++++++++++------------- 2 files changed, 57 insertions(+), 68 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index e7301d14..bdd1b9c2 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -11,6 +11,7 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import List, Dict, Any, Optional, ClassVar +from typing_extensions import Self class NDBaseModel(BaseModel, ABC): @@ -22,7 +23,7 @@ class NDBaseModel(BaseModel, ABC): populate_by_name=True, ) - # TODO: find ways to redifine these var in every + # TODO: find ways to redifine these var in every future NDBaseModels identifiers: ClassVar[List[str]] = [] use_composite_identifiers: ClassVar[bool] = False @@ -32,26 +33,28 @@ def to_payload(self) -> Dict[str, Any]: @classmethod @abstractmethod - def from_response(cls, response: Dict[str, Any]) -> 'NDBaseModel': + def from_response(cls, response: Dict[str, Any]) -> Self: pass - # TODO: Modify to make it more generic and Pydantic + def get(self, field: str, default: Any = None) -> Any: + """Custom get method to mimic dictionary behavior.""" + return getattr(self, field, default) + + # TODO: Modify to make it more generic and Pydantic | might change and be moved in different Generic Class/Model # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") def get_identifier_value(self) -> Any: - """Generates the internal map key based on the selected mode.""" - # if self.use_composite_keys: - # # Mode: Composite (Tuple of ALL keys) - # values = [] - # for key in self.identifier_keys: - # val = config.get(key) - # if val is None: - # return None # Missing a required part - # values.append(val) - # return tuple(values) - # else: - # # Mode: Priority (First available key) - # for key in self.identifier_keys: - # if key in config: - # return config[key] - # return None - pass + """Generates the internal map key based on the selected mode.""" + if self.use_composite_identifiers: + # Mode: Composite (Tuple of ALL keys) + values = [] + for identifier in self.identifiers: + value = self.get(identifier) + if value is None: + return None # Missing a required part | Add Error Handling method here + values.append(value) + return tuple(values) + else: + # Mode: Priority (First available key) + for identifier in self.identifiers: + return self.get(identifier) + return None diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 7877a5a5..28cea27c 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,9 +8,10 @@ __metaclass__ = type -from pydantic import Field, field_validator +from pydantic import Field, field_validator, SecretStr from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar +from typing_extensions import Self from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel @@ -20,7 +21,7 @@ # TODO: Surclass BaseModel -> Priority # TODO: Look at ansible aliases -# TODO: use constants.py file in the future +# TODO: To be moved in constants.py file user_roles_mapping = MappingProxyType({}) @@ -39,15 +40,11 @@ def to_payload(self) -> Dict[str, Any]: } @classmethod - def from_response(cls, name: str, domain_config: List[str]) -> 'NDBaseModel': - internal_roles = [user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] - - domain_data = { - "name": name, - "roles": internal_roles - } - - return cls(**domain_data) + def from_response(cls, name: str, domain_config: List[str]) -> Self: + return cls( + name=name, + roles=[user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + ) class LocalUserModel(NDBaseModel): @@ -55,17 +52,17 @@ class LocalUserModel(NDBaseModel): # TODO: Define a way to generate it (look at NDBaseModel comments) identifiers: ClassVar[List[str]] = ["login_id"] - # TODO: Use Optinal to remove default values (get them from API response instead) - email: str = Field(default="", alias="email") + email: Optional[str] = Field(alias="email") login_id: str = Field(alias="loginID") - first_name: str = Field(default="", alias="firstName") - last_name: str = Field(default="", alias="lastName") - user_password: str = Field(alias="password") - reuse_limitation: int = Field(default=0, alias="reuseLimitation") - time_interval_limitation: int = Field(default=0, alias="timeIntervalLimitation") - security_domains: List[LocalUserSecurityDomainModel] = Field(default_factory=list, alias="domains") - remote_id_claim: str = Field(default="", alias="remoteIDClaim") - remote_user_authorization: bool = Field(default=False, alias="xLaunch") + first_name: Optional[str] = Field(default="", alias="firstName") + last_name: Optional[str] = Field(default="", alias="lastName") + # TODO: Check secrets manipulation when tracking changes while maintaining security + user_password: Optional[SecretStr] = Field(alias="password") + reuse_limitation: Optional[int] = Field(default=0, alias="reuseLimitation") + time_interval_limitation: Optional[int] = Field(default=0, alias="timeIntervalLimitation") + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(alias="domains") + remote_id_claim: Optional[str] = Field(default="", alias="remoteIDClaim") + remote_user_authorization: Optional[bool] = Field(default=False, alias="xLaunch") def to_payload(self) -> Dict[str, Any]: """Convert the model to the specific API payload format required.""" @@ -86,31 +83,20 @@ def to_payload(self) -> Dict[str, Any]: return payload @classmethod - def from_response(cls, payload: Dict[str, Any]) -> 'LocalUserModel': + def from_response(cls, response: Dict[str, Any]) -> Self: - if reverse_user_roles_mapping is None: - reverse_user_roles_mapping = {} - - user_data = { - "email": payload.get("email"), - "loginID": payload.get("loginID"), - "firstName": payload.get("firstName"), - "lastName": payload.get("lastName"), - "password": payload.get("password"), - "remoteIDClaim": payload.get("remoteIDClaim"), - "xLaunch": payload.get("xLaunch"), - } - - password_policy = payload.get("passwordPolicy", {}) - user_data["reuseLimitation"] = password_policy.get("reuseLimitation", 0) - user_data["timeIntervalLimitation"] = password_policy.get("timeIntervalLimitation", 0) - - domains_data = [] - rbac = payload.get("rbac", {}) - if rbac and "domains" in rbac: - for domain_name, domain_config in rbac["domains"].items(): - domains_data.append(LocalUserSecurityDomainModel.from_api_response(domain_name, domain_config)) - - user_data["domains"] = domains_data - - return cls(**user_data) + return cls( + email=response.get("email"), + login_id=response.get("loginID"), + first_name=response.get("firstName"), + last_name=response.get("lastName"), + user_password=response.get("password"), + reuse_limitation=response.get("passwordPolicy", {}).get("reuseLimitation"), + time_interval_limitation=response.get("passwordPolicy", {}).get("timeIntervalLimitation"), + security_domains=[ + LocalUserSecurityDomainModel.from_response(name, domain_config) + for name, domain_config in response.get("rbac", {}).get("domains", {}).items() + ], + remote_id_claim=response.get("remoteIDClaim"), + remote_user_authorization=response.get("xLaunch"), + ) From a3c683390acffac7164bb5c34042f3921a7bae2f Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 00:56:49 -0500 Subject: [PATCH 077/131] [ignore] Pydantic ND base models and local_user models: Final proposition of core design adding new methods which will be used in NDConfigCollection and NDNetworkResourceModule classes as well as basic error handling and simple docstrings. --- plugins/module_utils/models/base.py | 124 ++++++++++++++---- plugins/module_utils/models/local_user.py | 146 ++++++++++++++-------- 2 files changed, 192 insertions(+), 78 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index bdd1b9c2..a7eabf17 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -10,51 +10,127 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import List, Dict, Any, Optional, ClassVar +from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal from typing_extensions import Self class NDBaseModel(BaseModel, ABC): - + """ + Base model for all Nexus Dashboard API objects. + + Supports three identifier strategies: + - single: One unique required field (e.g., ["login_id"]) + - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) + - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) + """ + model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, validate_assignment=True, populate_by_name=True, + extra='ignore' ) - - # TODO: find ways to redifine these var in every future NDBaseModels + + # Subclasses MUST define these identifiers: ClassVar[List[str]] = [] - use_composite_identifiers: ClassVar[bool] = False - + identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + + # Optional: fields to exclude from diffs (e.g., passwords) + exclude_from_diff: ClassVar[List[str]] = [] + @abstractmethod def to_payload(self) -> Dict[str, Any]: + """ + Convert model to API payload format. + """ pass @classmethod @abstractmethod def from_response(cls, response: Dict[str, Any]) -> Self: + """ + Create model instance from API response. + """ pass - def get(self, field: str, default: Any = None) -> Any: - """Custom get method to mimic dictionary behavior.""" - return getattr(self, field, default) - - # TODO: Modify to make it more generic and Pydantic | might change and be moved in different Generic Class/Model - # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") - def get_identifier_value(self) -> Any: - """Generates the internal map key based on the selected mode.""" - if self.use_composite_identifiers: - # Mode: Composite (Tuple of ALL keys) + def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: + """ + Extract identifier value(s) from this instance: + - single identifier: Returns field value. + - composite identifiers: Returns tuple of all field values. + - hierarchical identifiers: Returns tuple of (field_name, value) for first non-None field. + """ + if not self.identifiers: + raise ValueError(f"{self.__class__.__name__} has no identifiers defined") + + if self.identifier_strategy == "single": + value = getattr(self, self.identifiers[0], None) + if value is None: + raise ValueError( + f"Single identifier field '{self.identifiers[0]}' is None" + ) + return value + + elif self.identifier_strategy == "composite": values = [] - for identifier in self.identifiers: - value = self.get(identifier) + missing = [] + + for field in self.identifiers: + value = getattr(self, field, None) if value is None: - return None # Missing a required part | Add Error Handling method here + missing.append(field) values.append(value) + + # NOTE: might not be needed in the future with field_validator + if missing: + raise ValueError( + f"Composite identifier fields {missing} are None. " + f"All required: {self.identifiers}" + ) + return tuple(values) + + elif self.identifier_strategy == "hierarchical": + for field in self.identifiers: + value = getattr(self, field, None) + if value is not None: + return (field, value) + + raise ValueError( + f"No non-None value in hierarchical fields {self.identifiers}" + ) + else: - # Mode: Priority (First available key) - for identifier in self.identifiers: - return self.get(identifier) - return None + raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") + + def to_diff_dict(self) -> Dict[str, Any]: + """ + Export for diff comparison (excludes sensitive fields). + """ + return self.model_dump( + by_alias=True, + exclude_none=True, + exclude=set(self.exclude_from_diff) + ) + +# NOTE: Maybe make it a seperate BaseModel +class NDNestedModel(NDBaseModel): + """ + Base for nested models without identifiers. + """ + + identifiers: ClassVar[List[str]] = [] + + def to_payload(self) -> Dict[str, Any]: + """ + Convert model to API payload format. + """ + return self.model_dump(by_alias=True, exclude_none=True) + + @classmethod + def from_response(cls, response: Dict[str, Any]) -> Self: + """ + Create model instance from API response. + """ + return cls.model_validate(response) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 28cea27c..b7069126 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -8,95 +8,133 @@ __metaclass__ = type -from pydantic import Field, field_validator, SecretStr +from pydantic import Field, SecretStr from types import MappingProxyType -from typing import List, Dict, Any, Optional, ClassVar +from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self -from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel +from models.base import NDBaseModel, NDNestedModel -# TODO: Add Field validation methods -# TODO: define our own Field class for string versioning, ansible aliases -# TODO: Add a method to get identifier(s) -> define a generic NDNetworkResourceModel -# TODO: Surclass BaseModel -> Priority -# TODO: Look at ansible aliases +# TODO: Move it to constants.py and import it +USER_ROLES_MAPPING = MappingProxyType({ + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", +}) -# TODO: To be moved in constants.py file -user_roles_mapping = MappingProxyType({}) +class LocalUserSecurityDomainModel(NDNestedModel): + """Security domain configuration for local user (nested model).""" -class LocalUserSecurityDomainModel(NDBaseModel): - - name: str = Field(alias="name") - roles: list[str] = Field(default_factory=lambda: ["observer"], alias="roles") - + # Fields + name: str + roles: Optional[List[str]] = None + def to_payload(self) -> Dict[str, Any]: - return { + + return { self.name: { "roles": [ - user_roles_mapping.get(role, role) for role in self.roles + USER_ROLES_MAPPING.get(role, role) + for role in (self.roles or []) ] } } - + @classmethod - def from_response(cls, name: str, domain_config: List[str]) -> Self: + def from_response(cls, name: str, domain_config: Dict[str, Any]) -> Self: + + # NOTE: Maybe create a function from it to be moved to utils.py and to be imported + reverse_mapping = {value: key for key, value in USER_ROLES_MAPPING.items()} + return cls( name=name, - roles=[user_roles_mapping.get(role, role) for role in domain_config.get("roles", [])] + roles=[ + reverse_mapping.get(role, role) + for role in domain_config.get("roles", []) + ] ) class LocalUserModel(NDBaseModel): + """ + Local user configuration. - # TODO: Define a way to generate it (look at NDBaseModel comments) + Identifier: login_id (single field) + """ + + # Identifier configuration identifiers: ClassVar[List[str]] = ["login_id"] - - email: Optional[str] = Field(alias="email") - login_id: str = Field(alias="loginID") - first_name: Optional[str] = Field(default="", alias="firstName") - last_name: Optional[str] = Field(default="", alias="lastName") - # TODO: Check secrets manipulation when tracking changes while maintaining security - user_password: Optional[SecretStr] = Field(alias="password") - reuse_limitation: Optional[int] = Field(default=0, alias="reuseLimitation") - time_interval_limitation: Optional[int] = Field(default=0, alias="timeIntervalLimitation") - security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(alias="domains") - remote_id_claim: Optional[str] = Field(default="", alias="remoteIDClaim") - remote_user_authorization: Optional[bool] = Field(default=False, alias="xLaunch") - + identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + exclude_from_diff: ClassVar[List[str]] = ["user_password"] + + # Fields + login_id: str = Field(..., alias="loginID") + email: Optional[str] = None + first_name: Optional[str] = Field(default=None, alias="firstName") + last_name: Optional[str] = Field(default=None, alias="lastName") + user_password: Optional[SecretStr] = Field(default=None, alias="password") + reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation") + time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation") + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="domains") + remote_id_claim: Optional[str] = Field(default=None, alias="remoteIDClaim") + remote_user_authorization: Optional[bool] = Field(default=None, alias="xLaunch") + def to_payload(self) -> Dict[str, Any]: - """Convert the model to the specific API payload format required.""" + payload = self.model_dump( + by_alias=True, + exclude={ + 'domains', + 'security_domains', + 'reuseLimitation', + 'reuse_limitation', + 'timeIntervalLimitation', + 'time_interval_limitation' + }, + exclude_none=True + ) - payload = self.model_dump(by_alias=True, exclude={'domains', 'reuseLimitation', 'timeIntervalLimitation'}) + if self.user_password: + payload["password"] = self.user_password.get_secret_value() if self.security_domains: payload["rbac"] = {"domains": {}} for domain in self.security_domains: - payload["rbac"]["domains"].update(domain.to_api_payload()) - - if self.reuse_limitation or self.time_interval_limitation: - payload["passwordPolicy"] = { - "reuseLimitation": self.reuse_limitation, - "timeIntervalLimitation": self.time_interval_limitation, - } - + payload["rbac"]["domains"].update(domain.to_payload()) + + if self.reuse_limitation is not None or self.time_interval_limitation is not None: + payload["passwordPolicy"] = {} + if self.reuse_limitation is not None: + payload["passwordPolicy"]["reuseLimitation"] = self.reuse_limitation + if self.time_interval_limitation is not None: + payload["passwordPolicy"]["timeIntervalLimitation"] = self.time_interval_limitation + return payload - + @classmethod def from_response(cls, response: Dict[str, Any]) -> Self: + password_policy = response.get("passwordPolicy", {}) + rbac = response.get("rbac", {}) + domains = rbac.get("domains", {}) + + security_domains = [ + LocalUserSecurityDomainModel.from_response(name, config) + for name, config in domains.items() + ] if domains else None return cls( - email=response.get("email"), login_id=response.get("loginID"), + email=response.get("email"), first_name=response.get("firstName"), last_name=response.get("lastName"), user_password=response.get("password"), - reuse_limitation=response.get("passwordPolicy", {}).get("reuseLimitation"), - time_interval_limitation=response.get("passwordPolicy", {}).get("timeIntervalLimitation"), - security_domains=[ - LocalUserSecurityDomainModel.from_response(name, domain_config) - for name, domain_config in response.get("rbac", {}).get("domains", {}).items() - ], + reuse_limitation=password_policy.get("reuseLimitation"), + time_interval_limitation=password_policy.get("timeIntervalLimitation"), + security_domains=security_domains, remote_id_claim=response.get("remoteIDClaim"), - remote_user_authorization=response.get("xLaunch"), + remote_user_authorization=response.get("xLaunch") ) From 24d5fe6b956407217137201268820b15ac5425eb Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 13:09:33 -0500 Subject: [PATCH 078/131] [ignore] Pydantic ND Config Collection: Final proposition of core design changing existing methods and adding new ones which will be used in NDNetworkResourceModule class as well as basic error handling and simple docstrings. --- plugins/module_utils/nd_config_collection.py | 515 ++++++++++--------- 1 file changed, 266 insertions(+), 249 deletions(-) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 8f0058bb..2f256d30 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -8,289 +8,306 @@ __metaclass__ = type -import sys +from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable from copy import deepcopy -from functools import reduce -# Python 2 and 3 compatibility (To be removed in the future) -if sys.version_info[0] >= 3: - from collections.abc import MutableMapping - iteritems = lambda d: d.items() -else: - from collections import MutableMapping - iteritems = lambda d: d.iteritems() +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from models.base import NDBaseModel -# TODO: Adapt to Pydantic Models -# NOTE: Single-Index Hybrid Collection for ND Network Resource Module -class NDConfigCollection(MutableMapping): +# Type aliases +# NOTE: Maybe add more type aliases in the future if needed +ModelType = TypeVar('ModelType', bound=NDBaseModel) +IdentifierKey = Union[str, int, Tuple[Any, ...]] - def __init__(self, identifier_keys, data=None, use_composite_keys=False): - self.identifier_keys = identifier_keys - self.use_composite_keys = use_composite_keys - - # Dual Storage - self._list = [] - self._map = {} + +class NDConfigCollection(Generic[ModelType]): + """ + Nexus Dashboard configuration collection for NDBaseModel instances. + """ + + def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType]] = None): + """ + Initialize collection. + """ + self._model_class = model_class - if data: - for item in data: + # Dual storage + self._items: List[ModelType] = [] + self._index: Dict[IdentifierKey, int] = {} + + if items: + for item in items: self.add(item) - # TODO: add a method to get nested keys, ex: get("spec", {}).get("onboardUrl") - def _get_identifier_value(self, config): - """Generates the internal map key based on the selected mode.""" - if self.use_composite_keys: - # Mode: Composite (Tuple of ALL keys) - values = [] - for key in self.identifier_keys: - val = config.get(key) - if val is None: - return None # Missing a required part - values.append(val) - return tuple(values) - else: - # Mode: Priority (First available key) - for key in self.identifier_keys: - if key in config: - return config[key] - return None - - # Magic Methods - def __getitem__(self, key): - return self._map[key] - - def __setitem__(self, key, value): - if key in self._map: - old_ref = self._map[key] - try: - idx = self._list.index(old_ref) - self._list[idx] = value - self._map[key] = value - except ValueError: - pass - else: - # Add new - self._list.append(value) - self._map[key] = value - - def __delitem__(self, key): - if key in self._map: - obj_ref = self._map[key] - del self._map[key] - self._list.remove(obj_ref) + def _extract_key(self, item: ModelType) -> IdentifierKey: + """ + Extract identifier key from item. + """ + try: + return item.get_identifier_value() + except Exception as e: + raise ValueError(f"Failed to extract identifier: {e}") from e + + def _rebuild_index(self) -> None: + """Rebuild index from scratch (O(n) operation).""" + self._index.clear() + for index, item in enumerate(self._items): + key = self._extract_key(item) + self._index[key] = index + + # Core CRUD Operations + + def add(self, item: ModelType) -> IdentifierKey: + """ + Add item to collection (O(1) operation). + """ + if not isinstance(item, self._model_class): + raise TypeError( + f"Item must be instance of {self._model_class.__name__}, " + f"got {type(item).__name__}" + ) + + key = self._extract_key(item) + + if key in self._index: + raise ValueError( + f"Item with identifier {key} already exists. Use replace() to update" + ) + + position = len(self._items) + self._items.append(item) + self._index[key] = position + + return key + + def get(self, key: IdentifierKey) -> Optional[ModelType]: + """ + Get item by identifier key (O(1) operation). + """ + index = self._index.get(key) + return self._items[index] if index is not None else None + + def replace(self, item: ModelType) -> bool: + """ + Replace existing item with same identifier (O(1) operation). + """ + if not isinstance(item, self._model_class): + raise TypeError( + f"Item must be instance of {self._model_class.__name__}, " + f"got {type(item).__name__}" + ) + + key = self._extract_key(item) + index = self._index.get(key) + + if index is None: + return False + + self._items[index] = item + return True + + def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[ModelType, ModelType], ModelType]] = None) -> ModelType: + """ + Merge item with existing, or add if not present. + """ + key = self._extract_key(item) + existing = self.get(key) + + if existing is None: + self.add(item) + return item + + # Custom or default merge + if custom_merge_function: + merged = custom_merge_function(existing, item) else: - raise KeyError(key) - - def __iter__(self): - return iter(self._map) - - def __len__(self): - return len(self._list) + # Default merge + existing_data = existing.model_dump() + new_data = item.model_dump(exclude_unset=True) + merged_data = self._deep_merge(existing_data, new_data) + merged = self._model_class.model_validate(merged_data) + + self.replace(merged) + return merged - def __eq__(self, other): - if isinstance(other, NDConfigCollection): - return self._list == other._list - elif isinstance(other, list): - return self._list == other - elif isinstance(other, dict): - return self._map == other - return False + def _deep_merge(self, base: Dict, update: Dict) -> Dict: + """Recursively merge dictionaries.""" + result = base.copy() + + for key, value in update.items(): + if value is None: + continue + + if key in result and isinstance(result[key], dict) and isinstance(value, dict): + result[key] = self._deep_merge(result[key], value) + else: + result[key] = value + + return result + + def delete(self, key: IdentifierKey) -> bool: + """ + Delete item by identifier (O(n) operation due to index rebuild) + """ + index = self._index.get(key) + + if index is None: + return False + + del self._items[index] + self._rebuild_index() + + return True + + # Diff Operations + + def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: + """ + Compare single item against collection. + """ + try: + key = self._extract_key(new_item) + except ValueError: + return "new" + + existing = self.get(key) + + if existing is None: + return "new" - def __ne__(self, other): - return not self.__eq__(other) + existing_data = existing.to_diff_dict() + new_data = new_item.to_diff_dict() + + if unwanted_keys: + existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) + new_data = self._remove_unwanted_keys(new_data, unwanted_keys) - def __repr__(self): - return str(self._list) + is_subset = self._issubset(new_data, existing_data) + + return "no_diff" if is_subset else "changed" + + def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> bool: + """ + Check if two collections differ. + """ + if not isinstance(other, NDConfigCollection): + raise TypeError("Argument must be NDConfigCollection") + + if len(self) != len(other): + return True - # Helper Methods - def _filter_dict(self, data, ignore_keys): - return {k: v for k, v in iteritems(data) if k not in ignore_keys} + for item in other: + if self.get_diff_config(item, unwanted_keys) != "no_diff": + return True - def _issubset(self, subset, superset): + for key in self.keys(): + if other.get(key) is None: + return True + + return False + + def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[IdentifierKey]: + """ + Get identifiers in self but not in other. + """ + current_keys = set(self.keys()) + other_keys = set(other.keys()) + return list(current_keys - other_keys) + + def _issubset(self, subset: Any, superset: Any) -> bool: + """Check if subset is contained in superset.""" if type(subset) is not type(superset): return False - + if not isinstance(subset, dict): if isinstance(subset, list): return all(item in superset for item in subset) return subset == superset - - for key, value in iteritems(subset): + + for key, value in subset.items(): if value is None: continue - + if key not in superset: return False - - superset_value = superset.get(key) - - if not self._issubset(value, superset_value): + + if not self._issubset(value, superset[key]): return False + return True - def _remove_unwanted_keys(self, data, unwanted_keys): + def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: + """Remove unwanted keys from dict (supports nested paths).""" + data = deepcopy(data) + for key in unwanted_keys: if isinstance(key, str): if key in data: del data[key] + elif isinstance(key, list) and len(key) > 0: - key_path, last = key[:-1], key[-1] try: - parent = reduce(lambda d, k: d.get(k) if isinstance(d, dict) else None, key_path, data) - if isinstance(parent, dict) and last in parent: - del parent[last] - except (KeyError, TypeError): + parent = data + for k in key[:-1]: + if isinstance(parent, dict) and k in parent: + parent = parent[k] + else: + break + else: + if isinstance(parent, dict) and key[-1] in parent: + del parent[key[-1]] + except (KeyError, TypeError, IndexError): pass + return data - - # Core Operations - def to_list(self): - return self._list + + # Collection Operations - def to_dict(self): - return self._map - - def copy(self): - return NDConfigCollection(self.identifier_keys, deepcopy(self._list), self.use_composite_keys) - - def add(self, config): - ident = self._get_identifier_value(config) - if ident is None: - mode = "Composite" if self.use_composite_keys else "Priority" - raise ValueError("[{0} Mode] Config missing required keys: {1}".format(mode, self.identifier_keys)) - - if ident in self._map: - self.__setitem__(ident, config) - else: - self._list.append(config) - self._map[ident] = config - - def merge(self, new_config): - ident = self._get_identifier_value(new_config) - if ident and ident in self._map: - self._map[ident].update(new_config) - else: - self.add(new_config) - - def replace(self, new_config): - ident = self._get_identifier_value(new_config) - if ident: - self[ident] = new_config - else: - self.add(new_config) - - def remove(self, identifiers): - # Try Map Removal - try: - target_key = self._get_identifier_value(identifiers) - if target_key and target_key in self._map: - self.__delitem__(target_key) - return - except Exception: - pass - - # Fallback: Linear Removal - to_remove = [] - for config in self._list: - match = True - for k, v in iteritems(identifiers): - if config.get(k) != v: - match = False - break - if match: - to_remove.append(self._get_identifier_value(config)) - - for ident in to_remove: - if ident in self._map: - self.__delitem__(ident) - - def get_by_key(self, key, default=None): - return self._map.get(key, default) - - def get_by_idenfiers(self, identifiers, default=None): - # Try Map Lookup - target_key = self._get_identifier_value(identifiers) - if target_key and target_key in self._map: - return self._map[target_key] - - # Fallback: Linear Lookup - valid_search_keys = [k for k in identifiers if k in self.identifier_keys] - if not valid_search_keys: - return default - - for config in self._list: - match = True - for k in valid_search_keys: - if config.get(k) != identifiers[k]: - match = False - break - if match: - return config - return default - - # Diff logic - def get_diff_config(self, new_config, unwanted_keys=None): - unwanted_keys = unwanted_keys or [] - - ident = self._get_identifier_value(new_config) - - if not ident or ident not in self._map: - return "new" - - existing = deepcopy(self._map[ident]) - sent = deepcopy(new_config) - - self._remove_unwanted_keys(existing, unwanted_keys) - self._remove_unwanted_keys(sent, unwanted_keys) - - is_subset = self._issubset(sent, existing) - - if is_subset: - return "no_diff" - else: - return "changed" - - def get_diff_collection(self, new_collection, unwanted_keys=None): - if not isinstance(new_collection, NDConfigCollection): - raise TypeError("Argument must be an NDConfigCollection") - - if len(self) != len(new_collection): - return True - - for item in new_collection.to_list(): - if self.get_diff_config(item, unwanted_keys) != "no_diff": - return True - - for ident in self._map: - if ident not in new_collection._map: - return True - - return False - - def get_diff_identifiers(self, new_collection): - current_identifiers = set(self.config_collection.keys()) - other_identifiers = set(new_collection.config_collection.keys()) - - return list(current_identifiers - other_identifiers) + def __len__(self) -> int: + """Return number of items.""" + return len(self._items) + + def __iter__(self): + """Iterate over items.""" + return iter(self._items) - # Sanitize Operations - def sanitize(self, keys_to_remove=None, values_to_remove=None, remove_none_values=False): - keys_to_remove = keys_to_remove or [] - values_to_remove = values_to_remove or [] + def keys(self) -> List[IdentifierKey]: + """Get all identifier keys.""" + return list(self._index.keys()) - def recursive_clean(obj): - if isinstance(obj, dict): - keys = list(obj.keys()) - for k in keys: - v = obj[k] - if k in keys_to_remove or v in values_to_remove or (remove_none_values and v is None): - del obj[k] - continue - if isinstance(v, (dict, list)): - recursive_clean(v) - elif isinstance(obj, list): - for item in obj: - recursive_clean(item) + def copy(self) -> "NDConfigCollection[ModelType]": + """Create deep copy of collection.""" + return NDConfigCollection( + model_class=self._model_class, + items=deepcopy(self._items) + ) - for item in self._list: - recursive_clean(item) + # Serialization + + def to_list(self, **kwargs) -> List[Dict]: + """ + Export as list of dicts (with aliases). + """ + return [item.model_dump(by_alias=True, exclude_none=True, **kwargs) for item in self._items] + + def to_payload_list(self) -> List[Dict[str, Any]]: + """ + Export as list of API payloads. + """ + return [item.to_payload() for item in self._items] + + @classmethod + def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + """ + Create collection from list of dicts. + """ + items = [model_class.model_validate(item_data) for item_data in data] + return cls(model_class=model_class, items=items) + + @classmethod + def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + """ + Create collection from API response. + """ + items = [model_class.from_response(item_data) for item_data in response_data] + return cls(model_class=model_class, items=items) From 958adef57e603fdd53ac58902759ef196fdd3324 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 13:51:54 -0500 Subject: [PATCH 079/131] [ignore] Pydantic Base ND Network Resource Module: Final proposition of core design changing existing methods and adding new ones which will be used in future as a based for ND network resource modules as well as basic error handling and simple docstrings. --- plugins/module_utils/nd_network_resources.py | 561 ++++++++++++++----- 1 file changed, 411 insertions(+), 150 deletions(-) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index 3b549da1..ab7df9e2 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -9,196 +9,457 @@ __metaclass__ = type from copy import deepcopy -from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from typing import Optional, List, Dict, Any, Callable, Literal +from pydantic import ValidationError -# TODO: Make further enhancement to logs and outputs -# TODO: Adapt to Pydantic Models -# NOTE: ONLY works for new API endpoints introduced in ND v4.1.0 and later -class NDNetworkResourceModule(NDModule): +# TODO: To be replaced with: +# from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +# from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +# from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from nd import NDModule +from nd_config_collection import NDConfigCollection +from models.base import NDBaseModel +from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED - def __init__(self, module, path, identifier_keys, use_composite_keys=False, actions_overwrite_map=None): - super().__init__(module) - # Initial variables +class NDNetworkResourceModule(NDModule): + """ + Generic Network Resource Module for Nexus Dashboard. + """ + + def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_overwrite_map: Optional[Dict[str, Callable]] = None): + """ + Initialize the Network Resource Module. + """ + super().__init__(module) + + # Configuration self.path = path + self.model_class = model_class self.actions_overwrite_map = actions_overwrite_map or {} - self.identifier_keys = identifier_keys - self.use_composite_keys = use_composite_keys - - # Initial data - self.init_all_data = self._query_all() - - # Info ouput - self.existing = NDConfigCollection(identifier_keys, data=self.init_all_data) - self.previous = NDConfigCollection(identifier_keys) - self.proposed = NDConfigCollection(identifier_keys) - self.sent = NDConfigCollection(identifier_keys) - - # Debug output - self.nd_logs = [] - - # Helper variables - self.current_identifier = "" - self.existing_config = {} - self.proposed_config = {} - - # Actions Operations - def actions_overwrite(action): + + # Initialize collections + try: + init_all_data = self._query_all() + + self.existing = NDConfigCollection.from_api_response( + response_data=init_all_data, + model_class=model_class + ) + self.previous = NDConfigCollection(model_class=model_class) + self.proposed = NDConfigCollection(model_class=model_class) + self.sent = NDConfigCollection(model_class=model_class) + + except Exception as e: + self.fail_json( + msg=f"Initialization failed: {str(e)}", + error=str(e) + ) + + # Operation tracking + self.nd_logs: List[Dict[str, Any]] = [] + + # Current operation context + self.current_identifier = None + self.existing_config: Dict[str, Any] = {} + self.proposed_config: Dict[str, Any] = {} + + # Action Decorator + + @staticmethod + def actions_overwrite(action: str): + """ + Decorator to allow overriding default action operations. + """ def decorator(func): def wrapper(self, *args, **kwargs): overwrite_action = self.actions_overwrite_map.get(action) if callable(overwrite_action): - return overwrite_action(self) + return overwrite_action(self, *args, **kwargs) else: return func(self, *args, **kwargs) return wrapper return decorator - + + # Action Operations + @actions_overwrite("create") - def _create(self): - if not self.module.check_mode: + def _create(self) -> Optional[Dict[str, Any]]: + """ + Create a new configuration object. + """ + if self.module.check_mode: + return self.proposed_config + + try: return self.request(path=self.path, method="POST", data=self.proposed_config) - + except Exception as e: + raise Exception(f"Create failed for {self.current_identifier}: {e}") from e + @actions_overwrite("update") - def _update(self): - if not self.module.check_mode: - object_path = "{0}/{1}".format(self.path, self.current_identifier) + def _update(self) -> Optional[Dict[str, Any]]: + """ + Update an existing configuration object. + """ + if self.module.check_mode: + return self.proposed_config + + try: + object_path = f"{self.path}/{self.current_identifier}" return self.request(path=object_path, method="PUT", data=self.proposed_config) - + except Exception as e: + raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + @actions_overwrite("delete") - def _delete(self): - if not self.module.check_mode: - object_path = "{0}/{1}".format(self.path, self.current_identifier) + def _delete(self) -> None: + """Delete a configuration object.""" + if self.module.check_mode: + return + + try: + object_path = f"{self.path}/{self.current_identifier}" self.request(path=object_path, method="DELETE") + except Exception as e: + raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e @actions_overwrite("query_all") - def _query_all(self): - return self.query_obj(self.path) - - def format_log(self, identifier, status, after_data, sent_payload_data=None): - item_result = { + def _query_all(self) -> List[Dict[str, Any]]: + """ + Query all configuration objects from device. + """ + try: + result = self.query_obj(self.path) + return result or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e + + # Logging + + def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: + """ + Create and append a log entry. + """ + log_entry = { "identifier": identifier, "status": status, - "before": self.existing_config, + "before": deepcopy(self.existing_config), "after": deepcopy(after_data) if after_data is not None else self.existing_config, - "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {}, + "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {} } - + + # Add HTTP details if not in check mode if not self.module.check_mode and self.url is not None: - item_result.update( - { - "method": self.method, - "response": self.response, - "status": self.status, - "url": self.url, - } + log_entry.update({ + "method": self.method, + "response": self.response, + "status": self.status, + "url": self.url + }) + + self.nd_logs.append(log_entry) + + # State Management + + def manage_state( + self, state: Literal["merged", "replaced", "overridden", "deleted"], new_configs: List[Dict[str, Any]], unwanted_keys: Optional[List] = None, override_exceptions: Optional[List] = None) -> None: + """ + Manage state according to desired configuration. + """ + unwanted_keys = unwanted_keys or [] + override_exceptions = override_exceptions or [] + + # Parse and validate configs + try: + parsed_items = [] + for config in new_configs: + try: + # Parse config into model + item = self.model_class.model_validate(config) + parsed_items.append(item) + except ValidationError as e: + self.fail_json( + msg=f"Invalid configuration: {e}", + config=config, + validation_errors=e.errors() + ) + return + + # Create proposed collection + self.proposed = NDConfigCollection( + model_class=self.model_class, + items=parsed_items ) + + # Save previous state + self.previous = self.existing.copy() - self.nd_logs.append(item_result) - - # Logs and Outputs formating Operations - # TODO: Move it to different file - def add_logs_and_ouputs(self): - if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - if self.params.get("output_level") in ("debug", "info"): + except Exception as e: + self.fail_json( + msg=f"Failed to prepare configurations: {e}", + error=str(e) + ) + return + + # Execute state operations + if state in ["merged", "replaced", "overridden"]: + self._manage_create_update_state(state, unwanted_keys) + + if state == "overridden": + self._manage_override_deletions(override_exceptions) + + elif state == "deleted": + self._manage_delete_state() + + else: + self.fail_json(msg=f"Invalid state: {state}") + + def _manage_create_update_state(self,state: Literal["merged", "replaced", "overridden"], unwanted_keys: List) -> None: + """ + Handle merged/replaced/overridden states. + """ + for proposed_item in self.proposed: + try: + # Extract identifier + identifier = proposed_item.get_identifier_value() + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + self.existing_config = ( + existing_item.model_dump(by_alias=True, exclude_none=True) + if existing_item + else {} + ) + + # Determine diff status + diff_status = self.existing.get_diff_config( + proposed_item, + unwanted_keys=unwanted_keys + ) + + # No changes needed + if diff_status == "no_diff": + self.format_log( + identifier=identifier, + status="no_change", + after_data=self.existing_config + ) + continue + + # Prepare final config based on state + if state == "merged" and existing_item: + # Merge with existing + merged_item = self.existing.merge(proposed_item) + final_item = merged_item + else: + # Replace or create + if existing_item: + self.existing.replace(proposed_item) + else: + self.existing.add(proposed_item) + final_item = proposed_item + + # Convert to API payload + self.proposed_config = final_item.to_payload() + + # Execute API operation + if diff_status == "changed": + response = self._update() + operation_status = "updated" + else: + response = self._create() + operation_status = "created" + + # Track sent payload + if not self.module.check_mode: + self.sent.add(final_item) + sent_payload = self.proposed_config + else: + sent_payload = None + + # Log operation + self.format_log( + identifier=identifier, + status=operation_status, + after_data=( + response if not self.module.check_mode + else final_item.model_dump(by_alias=True, exclude_none=True) + ), + sent_payload_data=sent_payload + ) + + except Exception as e: + error_msg = f"Failed to process {identifier}: {e}" + + self.format_log( + identifier=identifier, + status="no_change", + after_data=self.existing_config + ) + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + def _manage_override_deletions(self, override_exceptions: List) -> None: + """ + Delete items not in proposed config (for overridden state). + """ + diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + + for identifier in diff_identifiers: + if identifier in override_exceptions: + continue + + try: + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + if not existing_item: + continue + + self.existing_config = existing_item.model_dump( + by_alias=True, + exclude_none=True + ) + + # Execute delete + self._delete() + + # Remove from collection + self.existing.delete(identifier) + + # Log deletion + self.format_log( + identifier=identifier, + status="deleted", + after_data={} + ) + + except Exception as e: + error_msg = f"Failed to delete {identifier}: {e}" + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + def _manage_delete_state(self) -> None: + """Handle deleted state.""" + for proposed_item in self.proposed: + try: + identifier = proposed_item.get_identifier_value() + self.current_identifier = identifier + + existing_item = self.existing.get(identifier) + if not existing_item: + # Already deleted or doesn't exist + self.format_log( + identifier=identifier, + status="no_change", + after_data={} + ) + continue + + self.existing_config = existing_item.model_dump( + by_alias=True, + exclude_none=True + ) + + # Execute delete + self._delete() + + # Remove from collection + self.existing.delete(identifier) + + # Log deletion + self.format_log( + identifier=identifier, + status="deleted", + after_data={} + ) + + except Exception as e: + error_msg = f"Failed to delete {identifier}: {e}" + + if not self.module.params.get("ignore_errors", False): + self.fail_json( + msg=error_msg, + identifier=str(identifier), + error=str(e) + ) + return + + # Output Formatting + + def add_logs_and_outputs(self) -> None: + """Add logs and outputs to module result based on output_level.""" + output_level = self.params.get("output_level", "normal") + state = self.params.get("state") + + # Add previous state for certain states and output levels + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + if output_level in ("debug", "info"): self.result["previous"] = self.previous.to_list() + + # Check if there were changes if not self.has_modified and self.previous.get_diff_collection(self.existing): self.result["changed"] = True + + # Add stdout if present if self.stdout: self.result["stdout"] = self.stdout - - if self.params.get("output_level") == "debug": + + # Add debug information + if output_level == "debug": self.result["nd_logs"] = self.nd_logs + if self.url is not None: self.result["httpapi_logs"] = self.httpapi_logs - - if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - self.result["sent"] = self.sent.to_list() - self.result["proposed"] = self.proposed.to_list() - - self.result["current"] = self.existing.to_list() - - # Manage State Operations - def manage_state(self, state, new_configs, unwanted_keys=None, override_exceptions=None): - unwanted_keys = unwanted_keys or [] - override_exceptions = override_exceptions or [] - - self.proposed = NDConfigCollection(self.identifier_keys, data=new_configs) - self.proposed.sanitize() - self.previous = self.existing.copy() - - if state in ["merged", "replaced", "overidden"]: - for identifier, config in self.proposed.items(): - - diff_config_info = self.existing.get_diff_config(config, unwanted_keys) - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self.proposed_config = config - request_response = None - sent_payload = None - status = "no_change" - - if diff_config_info != "no_diff": - if state == "merged": - self.existing.merge(config) - self.proposed_config = self.existing[identifier] - else: - self.existing.replace(config) - - if diff_config_info == "changed": - request_response = self._update() - status = "updated" - else: - request_response = self._create() - status= "created" - - if not self.module.check_mode: - self.sent.add(self.proposed_config) - sent_payload = self.proposed_config - else: - request_response = self.proposed_config - - self.format_log(identifier, status, request_response, sent_payload) - - if state == "overidden": - diff_identifiers = self.previous.get_diff_identifiers(self.proposed) - for identifier in diff_identifiers: - if identifier not in override_exceptions: - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self._delete() - del self.existing[identifier] - self.format_log(identifier, "deleted", after_data={}) + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: + self.result["sent"] = self.sent.to_payload_list() + self.result["proposed"] = self.proposed.to_list() - - elif state == "deleted": - for identifier, config in self.proposed.items(): - if identifier in self.existing.keys(): - self.current_identifier = identifier - self.existing_config = deepcopy(self.existing.get_by_key(identifier, {})) - self.proposed_config = config - self._delete() - del self.existing[identifier] - self.format_log(identifier, "deleted", after_data={}) - - # Outputs Operations - def fail_json(self, msg, **kwargs): - self.add_logs_and_ouputs() - + # Always include current state + self.result["current"] = self.existing.to_list() + + # Module Exit Methods + + def fail_json(self, msg: str, **kwargs) -> None: + """ + Exit module with failure. + """ + self.add_logs_and_outputs() self.result.update(**kwargs) self.module.fail_json(msg=msg, **self.result) - - def exit_json(self, **kwargs): - self.add_logs_and_ouputs() - + + def exit_json(self, **kwargs) -> None: + """ + Exit module successfully. + """ + self.add_logs_and_outputs() + + # Add diff if module supports it if self.module._diff and self.result.get("changed") is True: - self.result["diff"] = dict( - before=self.previous.to_list(), - after=self.existing.to_list(), - ) - + try: + # Use diff-safe dicts (excludes sensitive fields) + before = [item.to_diff_dict() for item in self.previous] + after = [item.to_diff_dict() for item in self.existing] + + self.result["diff"] = dict( + before=before, + after=after + ) + except Exception: + pass # Don't fail on diff generation + self.result.update(**kwargs) self.module.exit_json(**self.result) From ade11d4c8938b2332f891eb5fc59189e5082e772 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 23 Jan 2026 14:37:44 -0500 Subject: [PATCH 080/131] [ignore] Modify nd_local_user based on Pydantic implementation and changes added to NDNetworkResourceModule. --- plugins/modules/nd_local_user.py | 91 +++++++++++++++----------------- 1 file changed, 43 insertions(+), 48 deletions(-) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 4a5f1ad2..3dcaf1a4 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -175,23 +175,34 @@ """ from ansible.module_utils.basic import AnsibleModule -from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec, NDModule -from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resources import NDNetworkResourceModule -from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +# TODO: To be replaced with: +# from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec +# from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule +# from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +# from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +from module_utils.nd import nd_argument_spec +from module_utils.nd_network_resources import NDNetworkResourceModule +from module_utils.models.local_user import LocalUserModel +from module_utils.constants import USER_ROLES_MAPPING -# Actions overwrite functions -def query_all_local_users(nd): - return nd.query_obj(nd.path).get("localusers") +# NOTE: Maybe Add the overwrite action in the LocalUserModel +def query_all_local_users(nd_module): + """ + Custom query_all action to extract 'localusers' from response. + """ + response = nd_module.query_obj(nd_module.path) + return response.get("localusers", []) -# TODO: Adapt to Pydantic Model +# NOTE: Maybe Add More aliases like in the LocalUserModel / Revisit the argmument_spec def main(): argument_spec = nd_argument_spec() argument_spec.update( config=dict( type="list", elements="dict", + required=True, options=dict( email=dict(type="str"), login_id=dict(type="str", required=True), @@ -221,49 +232,33 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, ) - - path = "/api/v1/infra/aaa/localUsers" - identifier_keys = ["loginID"] - actions_overwrite_map = {"query_all": query_all_local_users} - - nd = NDNetworkResourceModule(module, path, identifier_keys, actions_overwrite_map=actions_overwrite_map) - - state = nd.params.get("state") - config = nd.params.get("config") - override_exceptions = nd.params.get("override_exceptions") - new_config = [] - for object in config: - payload = { - "email": object.get("email"), - "firstName": object.get("first_name"), - "lastName": object.get("last_name"), - "loginID": object.get("login_id"), - "password": object.get("user_password"), - "remoteIDClaim": object.get("remote_id_claim"), - "xLaunch": object.get("remote_user_authorization"), - } - - if object.get("security_domains"): - payload["rbac"] = { - "domains": { - security_domain.get("name"): { - "roles": ( - [USER_ROLES_MAPPING.get(role) for role in security_domain["roles"]] if isinstance(security_domain.get("roles"), list) else [] - ) - } - for security_domain in object["security_domains"] - }, - } - if object.get("reuse_limitation") or object.get("time_interval_limitation"): - payload["passwordPolicy"] = { - "reuseLimitation": object.get("reuse_limitation"), - "timeIntervalLimitation": object.get("time_interval_limitation"), + + try: + # Create NDNetworkResourceModule with LocalUserModel + nd_module = NDNetworkResourceModule( + module=module, + path="/api/v1/infra/aaa/localUsers", + model_class=LocalUserModel, + actions_overwrite_map={ + "query_all": query_all_local_users } - new_config.append(payload) - - nd.manage_state(state=state, new_configs=new_config, unwanted_keys=[["passwordPolicy", "passwordChangeTime"], ["userID"]], override_exceptions=override_exceptions) + ) + + # Manage state + nd_module.manage_state( + state=module.params["state"], + new_configs=module.params["config"], + unwanted_keys=[ + ["passwordPolicy", "passwordChangeTime"], # Nested path + ["userID"] # Simple key + ], + override_exceptions=module.params.get("override_exceptions") + ) - nd.exit_json() + nd_module.exit_json() + + except Exception as e: + module.fail_json(msg=f"Module execution failed: {str(e)}") if __name__ == "__main__": From 1a8161560b648af760a9beffb9ca34d69005b9ae Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 17 Feb 2026 13:46:10 -0500 Subject: [PATCH 081/131] [ignore] Add api_endpoints for configuring endpoints and orchestrators for orchestrating crud api operations with model instances and endpoints. --- plugins/module_utils/api_endpoints/base.py | 178 ++++++++++++++++++ plugins/module_utils/api_endpoints/enums.py | 46 +++++ .../module_utils/api_endpoints/local_user.py | 178 ++++++++++++++++++ plugins/module_utils/api_endpoints/mixins.py | 25 +++ plugins/module_utils/orchestrators/base.py | 79 ++++++++ .../module_utils/orchestrators/local_user.py | 42 +++++ 6 files changed, 548 insertions(+) create mode 100644 plugins/module_utils/api_endpoints/base.py create mode 100644 plugins/module_utils/api_endpoints/enums.py create mode 100644 plugins/module_utils/api_endpoints/local_user.py create mode 100644 plugins/module_utils/api_endpoints/mixins.py create mode 100644 plugins/module_utils/orchestrators/base.py create mode 100644 plugins/module_utils/orchestrators/local_user.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py new file mode 100644 index 00000000..1a9cd768 --- /dev/null +++ b/plugins/module_utils/api_endpoints/base.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from abc import ABC, abstractmethod +from pydantic import BaseModel, ConfigDict +from typing import Final, Union, Tuple, Any + +IdentifierKey = Union[str, int, Tuple[Any, ...], None] + +class NDBaseSmartEndpoint(BaseModel, ABC): + + # TODO: maybe to be modified in the future + model_config = ConfigDict(validate_assignment=True) + + base_path: str + + @abstractmethod + @property + def path(self) -> str: + pass + + @abstractmethod + @property + def verb(self) -> str: + pass + + # TODO: Maybe to be modifed to be more Pydantic + # TODO: Maybe change function's name + # NOTE: function to set mixins fields from identifiers + @abstractmethod + def set_identifiers(self, identifier: IdentifierKey = None): + pass + + +class NDBasePath: + """ + # Summary + + Centralized API Base Paths + + ## Description + + Provides centralized base path definitions for all ND API endpoints. + This allows API path changes to be managed in a single location. + + ## Usage + + ```python + # Get a complete base path + path = BasePath.control_fabrics("MyFabric", "config-deploy") + # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/MyFabric/config-deploy + + # Build custom paths + path = BasePath.v1("custom", "endpoint") + # Returns: /appcenter/cisco/ndfc/api/v1/custom/endpoint + ``` + + ## Design Notes + + - All base paths are defined as class constants for easy modification + - Helper methods compose paths from base constants + - Use these methods in Pydantic endpoint models to ensure consistency + - If NDFC changes base API paths, only this class needs updating + """ + + # Root API paths + NDFC_API: Final = "/appcenter/cisco/ndfc/api" + ND_INFRA_API: Final = "/api/v1/infra" + ONEMANAGE: Final = "/onemanage" + LOGIN: Final = "/login" + + @classmethod + def api(cls, *segments: str) -> str: + """ + # Summary + + Build path from NDFC API root. + + ## Parameters + + - segments: Path segments to append + + ## Returns + + - Complete path string + + ## Example + + ```python + path = BasePath.api("custom", "endpoint") + # Returns: /appcenter/cisco/ndfc/api/custom/endpoint + ``` + """ + if not segments: + return cls.NDFC_API + return f"{cls.NDFC_API}/{'/'.join(segments)}" + + @classmethod + def v1(cls, *segments: str) -> str: + """ + # Summary + + Build v1 API path. + + ## Parameters + + - segments: Path segments to append after v1 + + ## Returns + + - Complete v1 API path + + ## Example + + ```python + path = BasePath.v1("lan-fabric", "rest") + # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest + ``` + """ + return cls.api("v1", *segments) + + @classmethod + def nd_infra(cls, *segments: str) -> str: + """ + # Summary + + Build ND infra API path. + + ## Parameters + + - segments: Path segments to append after /api/v1/infra + + ## Returns + + - Complete ND infra API path + + ## Example + + ```python + path = BasePath.nd_infra("aaa", "localUsers") + # Returns: /api/v1/infra/aaa/localUsers + ``` + """ + if not segments: + return cls.ND_INFRA_API + return f"{cls.ND_INFRA_API}/{'/'.join(segments)}" + + @classmethod + def nd_infra_aaa(cls, *segments: str) -> str: + """ + # Summary + + Build ND infra AAA API path. + + ## Parameters + + - segments: Path segments to append after aaa (e.g., "localUsers") + + ## Returns + + - Complete ND infra AAA path + + ## Example + + ```python + path = BasePath.nd_infra_aaa("localUsers") + # Returns: /api/v1/infra/aaa/localUsers + ``` + """ + return cls.nd_infra("aaa", *segments) diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py new file mode 100644 index 00000000..afb4dd5c --- /dev/null +++ b/plugins/module_utils/api_endpoints/enums.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Enums used in api_endpoints. +""" +from enum import Enum + + +class VerbEnum(str, Enum): + """ + # Summary + + Enum for HTTP verb values used in endpoints. + + ## Members + + - GET: Represents the HTTP GET method. + - POST: Represents the HTTP POST method. + - PUT: Represents the HTTP PUT method. + - DELETE: Represents the HTTP DELETE method. + """ + + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + + +class BooleanStringEnum(str, Enum): + """ + # Summary + + Enum for boolean string values used in query parameters. + + ## Members + + - TRUE: Represents the string "true". + - FALSE: Represents the string "false". + """ + + TRUE = "true" + FALSE = "false" \ No newline at end of file diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py new file mode 100644 index 00000000..de493e40 --- /dev/null +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +ND Infra AAA LocalUsers endpoint models. + +This module contains endpoint definitions for LocalUsers-related operations +in the ND Infra AAA API. +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type # pylint: disable=invalid-name + +from typing import Literal, Union, Tuple, Any, Final +from mixins import LoginIdMixin +from enums import VerbEnum +from base import NDBaseSmartEndpoint, NDBasePath +from pydantic import Field + +IdentifierKey = Union[str, int, Tuple[Any, ...], None] + +class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): + """ + Base class for ND Infra AAA Local Users endpoints. + + Provides common functionality for all HTTP methods on the + /api/v1/infra/aaa/localUsers endpoint. + """ + + base_path: Final = NDBasePath.nd_infra_aaa("localUsers") + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path. + + ## Returns + + - Complete endpoint path string, optionally including login_id + """ + if self.login_id is not None: + return NDBasePath.nd_infra_aaa("localUsers", self.login_id) + return self.base_path + + def set_identifiers(self, identifier: IdentifierKey = None): + self.login_id = identifier + + +class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users GET Endpoint + + ## Description + + Endpoint to retrieve local users from the ND Infra AAA service. + Optionally retrieve a specific local user by login_id. + + ## Path + + - /api/v1/infra/aaa/localUsers + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - GET + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersGet"] = Field( + default="EpApiV1InfraAaaLocalUsersGet", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.GET + + +class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users POST Endpoint + + ## Description + + Endpoint to create a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers + + ## Verb + + - POST + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( + default="EpApiV1InfraAaaLocalUsersPost", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.POST + + +class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users PUT Endpoint + + ## Description + + Endpoint to update a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - PUT + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( + default="EpApiV1InfraAaaLocalUsersPut", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.PUT + + +class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users DELETE Endpoint + + ## Description + + Endpoint to delete a local user from the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - DELETE + """ + + class_name: Literal["EpApiV1InfraAaaLocalUsersDelete"] = Field( + default="EpApiV1InfraAaaLocalUsersDelete", + description="Class name for backward compatibility", + frozen=True, + ) + + @property + def verb(self) -> VerbEnum: + """Return the HTTP verb for this endpoint.""" + return VerbEnum.DELETE diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py new file mode 100644 index 00000000..8ff3218f --- /dev/null +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +Reusable mixin classes for endpoint models. + +This module provides mixin classes that can be composed to add common +fields to endpoint models without duplication. +""" + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type # pylint: disable=invalid-name + +from typing import TYPE_CHECKING, Optional +from pydantic import BaseModel, Field + + +class LoginIdMixin(BaseModel): + """Mixin for endpoints that require login_id parameter.""" + + login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py new file mode 100644 index 00000000..120ea475 --- /dev/null +++ b/plugins/module_utils/orchestrators/base.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from ..models.base import NDBaseModel +from ..nd import NDModule +from ..api_endpoints.base import NDBaseSmartEndpoint +from typing import Dict, List, Any, Union, ClassVar, Type +from pydantic import BaseModel + + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] + + +# TODO: Revisit naming them "Orchestrator" +class NDBaseOrchestrator(BaseModel): + + model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] + + # NOTE: if not defined by subclasses, return an error as they are required + post_endpoint: NDBaseSmartEndpoint + put_endpoint: NDBaseSmartEndpoint + delete_endpoint: NDBaseSmartEndpoint + get_endpoint: NDBaseSmartEndpoint + + # NOTE: Module Field is always required + # TODO: Replace it with future sender + module: NDModule + + # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") + # TODO: Explore how to make them even more general + def create(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + return self.module.request(path=self.post_endpoint.base_path, method=self.post_endpoint.verb, data=model_instance.model_dump()) + except Exception as e: + raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e + + def update(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.model_dump()) + except Exception as e: + raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + + def delete(self, model_instance: NDBaseModel) -> ResponseType: + if self.module.check_mode: + return model_instance.model_dump() + + try: + self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.delete_endpoint.path, method=self.delete_endpoint.verb) + except Exception as e: + raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e + + def query_one(self, model_instance: NDBaseModel) -> ResponseType: + try: + self.get_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=self.get_endpoint.path, method=self.get_endpoint.verb) + except Exception as e: + raise Exception(f"Query failed for {self.current_identifier}: {e}") from e + + def query_all(self) -> ResponseType: + try: + result = self.module.query_obj(self.get_endpoint.path) + return result or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py new file mode 100644 index 00000000..b156512c --- /dev/null +++ b/plugins/module_utils/orchestrators/local_user.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from .base import NDBaseOrchestrator +from ..models.local_user import LocalUserModel +from typing import Dict, List, Any, Union, Type +from ..api_endpoints.local_user import ( + EpApiV1InfraAaaLocalUsersPost, + EpApiV1InfraAaaLocalUsersPut, + EpApiV1InfraAaaLocalUsersDelete, + EpApiV1InfraAaaLocalUsersGet, +) + + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] + +class LocalUserOrchestrator(NDBaseOrchestrator): + + model_class = Type[LocalUserModel] + + post_endpoint = EpApiV1InfraAaaLocalUsersPost() + put_endpoint = EpApiV1InfraAaaLocalUsersPut() + delete_endpoint = EpApiV1InfraAaaLocalUsersDelete() + get_endpoint = EpApiV1InfraAaaLocalUsersGet() + + def query_all(self): + """ + Custom query_all action to extract 'localusers' from response. + """ + try: + result = self.module.query_obj(self.get_endpoint.base_path) + return result.get("localusers", []) or [] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e + \ No newline at end of file From fd7ff672af59d69757c0c3b4e41eee539d2ff0d8 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 18 Feb 2026 01:23:37 -0500 Subject: [PATCH 082/131] [ignore] Modifiy models/local_user to take full advantage of Pydantic built-in functionalities. Slightly modify models/base.py to enforce identifiers definitions in NDBaseModel subclasses. Added multiple notes to assert next steps. --- plugins/module_utils/models/base.py | 48 ++++- plugins/module_utils/models/local_user.py | 216 ++++++++++++++-------- 2 files changed, 183 insertions(+), 81 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index a7eabf17..5a64c7a9 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -10,10 +10,11 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal +from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional from typing_extensions import Self +# TODO: Revisit identifiers strategy (low priority) class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -22,8 +23,9 @@ class NDBaseModel(BaseModel, ABC): - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) + - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ - + # TODO: revisit initial Model Configurations (low priority) model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, @@ -31,14 +33,38 @@ class NDBaseModel(BaseModel, ABC): populate_by_name=True, extra='ignore' ) - - # Subclasses MUST define these - identifiers: ClassVar[List[str]] = [] - identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + + # TODO: Revisit identifiers strategy (low priority) + identifiers: ClassVar[Optional[List[str]]] = None + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = None # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] + + # TODO: Revisit it with identifiers strategy (low priority) + def __init_subclass__(cls, **kwargs): + """ + Enforce configuration for identifiers definition. + """ + super().__init_subclass__(**kwargs) + + # Skip enforcement for nested models + # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) + if cls.__name__ in ['NDNestedModel']: + return + + if not hasattr(cls, "identifiers") or cls.identifiers is None: + raise ValueError( + f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." + f"Example: `identifiers: ClassVar[Optional[List[str]]] = ['login_id']`" + ) + if not hasattr(cls, "identifier_strategy") or cls.identifier_strategy is None: + raise ValueError( + f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." + f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'none']]] = 'single'`" + ) + # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) @abstractmethod def to_payload(self) -> Dict[str, Any]: """ @@ -54,6 +80,8 @@ def from_response(cls, response: Dict[str, Any]) -> Self: """ pass + # TODO: Revisit this function when revisiting identifier strategy (low priority) + # TODO: Add condition when there is no identifiers (high priority) def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: """ Extract identifier value(s) from this instance: @@ -82,7 +110,7 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: missing.append(field) values.append(value) - # NOTE: might not be needed in the future with field_validator + # NOTE: might be redefined with Pydantic (low priority) if missing: raise ValueError( f"Composite identifier fields {missing} are None. " @@ -104,6 +132,7 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") + def to_diff_dict(self) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). @@ -114,12 +143,13 @@ def to_diff_dict(self) -> Dict[str, Any]: exclude=set(self.exclude_from_diff) ) -# NOTE: Maybe make it a seperate BaseModel +# TODO: Make it a seperated BaseModel (low priority) class NDNestedModel(NDBaseModel): """ Base for nested models without identifiers. """ + # TODO: Configuration Fields to be clearly defined here (low priority) identifiers: ClassVar[List[str]] = [] def to_payload(self) -> Dict[str, Any]: @@ -133,4 +163,4 @@ def from_response(cls, response: Dict[str, Any]) -> Self: """ Create model instance from API response. """ - return cls.model_validate(response) + return cls.model_validate(response, by_alias=True) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index b7069126..4be05991 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,15 +8,15 @@ __metaclass__ = type -from pydantic import Field, SecretStr +from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel -from models.base import NDBaseModel, NDNestedModel +from .base import NDBaseModel, NDNestedModel -# TODO: Move it to constants.py and import it +# TODO: Move it to constants.py and make a reverse class Map for this USER_ROLES_MAPPING = MappingProxyType({ "fabric_admin": "fabric-admin", "observer": "observer", @@ -31,11 +31,13 @@ class LocalUserSecurityDomainModel(NDNestedModel): """Security domain configuration for local user (nested model).""" # Fields - name: str - roles: Optional[List[str]] = None - - def to_payload(self) -> Dict[str, Any]: + name: str = Field(..., alias="name", exclude=True) + roles: Optional[List[str]] = Field(default=None, alias="roles", exclude=True) + + # -- Serialization (Model instance -> API payload) -- + @model_serializer() + def serialize_model(self) -> Dict: return { self.name: { "roles": [ @@ -44,22 +46,12 @@ def to_payload(self) -> Dict[str, Any]: ] } } - - @classmethod - def from_response(cls, name: str, domain_config: Dict[str, Any]) -> Self: - # NOTE: Maybe create a function from it to be moved to utils.py and to be imported - reverse_mapping = {value: key for key, value in USER_ROLES_MAPPING.items()} - - return cls( - name=name, - roles=[ - reverse_mapping.get(role, role) - for role in domain_config.get("roles", []) - ] - ) + # -- Deserialization (API response / Ansible payload -> Model instance) -- + # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed +# TODO: Add field validation (e.g. me, le, choices, etc...) (medium priority) class LocalUserModel(NDBaseModel): """ Local user configuration. @@ -68,73 +60,153 @@ class LocalUserModel(NDBaseModel): """ # Identifier configuration - identifiers: ClassVar[List[str]] = ["login_id"] - identifier_strategy: ClassVar[Literal["single", "composite", "hierarchical"]] = "single" + # TODO: Revisit this identifiers strategy (low priority) + identifiers: ClassVar[Optional[List[str]]] = ["login_id"] + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "single" + + # Keys management configurations + # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] + unwanted_keys: ClassVar[List[List[str]]]= [ + ["passwordPolicy", "passwordChangeTime"], # Nested path + ["userID"] # Simple key + ] # Fields + # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec login_id: str = Field(..., alias="loginID") - email: Optional[str] = None + email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") last_name: Optional[str] = Field(default=None, alias="lastName") user_password: Optional[SecretStr] = Field(default=None, alias="password") - reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation") - time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation") - security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="domains") + reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation", exclude=True) + time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation", exclude=True) + security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="rbac") remote_id_claim: Optional[str] = Field(default=None, alias="remoteIDClaim") remote_user_authorization: Optional[bool] = Field(default=None, alias="xLaunch") - + + # -- Serialization (Model instance -> API payload) -- + + @computed_field(alias="passwordPolicy") + @property + def password_policy(self) -> Optional[Dict[str, int]]: + """Computed nested structure for API payload.""" + if self.reuse_limitation is None and self.time_interval_limitation is None: + return None + + policy = {} + if self.reuse_limitation is not None: + policy["reuseLimitation"] = self.reuse_limitation + if self.time_interval_limitation is not None: + policy["timeIntervalLimitation"] = self.time_interval_limitation + return policy + + @field_serializer("user_password") + def serialize_password(self, value: Optional[SecretStr]) -> Optional[str]: + return value.get_secret_value() if value else None + + + @field_serializer("security_domains") + def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) -> Optional[Dict]: + # NOTE: exclude `None` values and empty list (-> should we exclude empty list?) + if not value: + return None + + domains_dict = {} + for domain in value: + domains_dict.update(domain.to_payload()) + + return { + "domains": domains_dict + } + + def to_payload(self) -> Dict[str, Any]: - payload = self.model_dump( - by_alias=True, - exclude={ - 'domains', - 'security_domains', - 'reuseLimitation', - 'reuse_limitation', - 'timeIntervalLimitation', - 'time_interval_limitation' - }, - exclude_none=True - ) + return self.model_dump(by_alias=True, exclude_none=True) - if self.user_password: - payload["password"] = self.user_password.get_secret_value() + # -- Deserialization (API response / Ansible payload -> Model instance) -- - if self.security_domains: - payload["rbac"] = {"domains": {}} - for domain in self.security_domains: - payload["rbac"]["domains"].update(domain.to_payload()) + @model_validator(mode="before") + @classmethod + def deserialize_password_policy(cls, data: Any) -> Any: + if not isinstance(data, dict): + return data - if self.reuse_limitation is not None or self.time_interval_limitation is not None: - payload["passwordPolicy"] = {} - if self.reuse_limitation is not None: - payload["passwordPolicy"]["reuseLimitation"] = self.reuse_limitation - if self.time_interval_limitation is not None: - payload["passwordPolicy"]["timeIntervalLimitation"] = self.time_interval_limitation + password_policy = data.get("passwordPolicy") - return payload - + if password_policy and isinstance(password_policy, dict): + if "reuseLimitation" in password_policy: + data["reuse_limitation"] = password_policy["reuseLimitation"] + if "timeIntervalLimitation" in password_policy: + data["time_interval_limitation"] = password_policy["timeIntervalLimitation"] + + # Remove the nested structure from data to avoid conflicts + # (since it's a computed field, not a real field) + data.pop("passwordPolicy", None) + + return data + + @field_validator("security_domains", mode="before") @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - password_policy = response.get("passwordPolicy", {}) - rbac = response.get("rbac", {}) - domains = rbac.get("domains", {}) + def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: + if value is None: + return None + + # If already in list format (Ansible module representation), return as-is + if isinstance(value, list): + return value + + # If in the nested dict format (API representation) + if isinstance(value, dict) and "domains" in value: + domains_dict = value["domains"] + domains_list = [] + + for domain_name, domain_data in domains_dict.items(): + domains_list.append({ + "name": domain_name, + "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])] + }) + + return domains_list - security_domains = [ - LocalUserSecurityDomainModel.from_response(name, config) - for name, config in domains.items() - ] if domains else None + return value + + # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) + @classmethod + def from_response(cls, response: Dict[str, Any]) -> Self: + return cls.model_validate(response, by_alias=True) - return cls( - login_id=response.get("loginID"), - email=response.get("email"), - first_name=response.get("firstName"), - last_name=response.get("lastName"), - user_password=response.get("password"), - reuse_limitation=password_policy.get("reuseLimitation"), - time_interval_limitation=password_policy.get("timeIntervalLimitation"), - security_domains=security_domains, - remote_id_claim=response.get("remoteIDClaim"), - remote_user_authorization=response.get("xLaunch") + + # -- Extra -- + + # TODO: to generate from Fields (low priority) + def get_argument_spec(self): + return dict( + config=dict( + type="list", + elements="dict", + required=True, + options=dict( + email=dict(type="str"), + login_id=dict(type="str", required=True), + first_name=dict(type="str"), + last_name=dict(type="str"), + user_password=dict(type="str", no_log=True), + reuse_limitation=dict(type="int"), + time_interval_limitation=dict(type="int"), + security_domains=dict( + type="list", + elements="dict", + options=dict( + name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), + roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + ), + aliases=["domains"], + ), + remote_id_claim=dict(type="str"), + remote_user_authorization=dict(type="bool"), + ), + ), + override_exceptions=dict(type="list", elements="str"), + state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), ) From 93079d326623bc73a7b07ed55c5168267c437a4b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 24 Feb 2026 12:57:37 -0500 Subject: [PATCH 083/131] [ignore] Adapt the Network Resource Module architecture for ND to smart endpoints and Pydantic models modification (works for merge and replace states). Add comments for next steps. --- plugins/module_utils/api_endpoints/base.py | 5 +- .../module_utils/api_endpoints/local_user.py | 1 + plugins/module_utils/models/base.py | 25 ++- plugins/module_utils/models/local_user.py | 12 +- plugins/module_utils/nd_config_collection.py | 76 ++------ plugins/module_utils/nd_network_resources.py | 163 ++++++------------ plugins/module_utils/orchestrators/base.py | 27 +-- .../module_utils/orchestrators/local_user.py | 12 +- plugins/module_utils/utils.py | 26 ++- plugins/modules/nd_local_user.py | 63 +------ 10 files changed, 159 insertions(+), 251 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 1a9cd768..747c3283 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -15,11 +15,14 @@ IdentifierKey = Union[str, int, Tuple[Any, ...], None] +# TODO: Rename it to APIEndpoint +# NOTE: This is a very minimalist endpoint package -> needs to be enhanced class NDBaseSmartEndpoint(BaseModel, ABC): # TODO: maybe to be modified in the future model_config = ConfigDict(validate_assignment=True) + # TODO: to remove base_path: str @abstractmethod @@ -34,7 +37,7 @@ def verb(self) -> str: # TODO: Maybe to be modifed to be more Pydantic # TODO: Maybe change function's name - # NOTE: function to set mixins fields from identifiers + # NOTE: function to set endpoints attribute fields from identifiers @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): pass diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index de493e40..61f52ad8 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -31,6 +31,7 @@ class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): /api/v1/infra/aaa/localUsers endpoint. """ + # TODO: Remove it base_path: Final = NDBasePath.nd_infra_aaa("localUsers") @property diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 5a64c7a9..db7fd9ae 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -40,6 +40,7 @@ class NDBaseModel(BaseModel, ABC): # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] + unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) def __init_subclass__(cls, **kwargs): @@ -65,8 +66,9 @@ def __init_subclass__(cls, **kwargs): ) # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) + # NOTE: Should we use keyword arguments? @abstractmethod - def to_payload(self) -> Dict[str, Any]: + def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ @@ -74,7 +76,7 @@ def to_payload(self) -> Dict[str, Any]: @classmethod @abstractmethod - def from_response(cls, response: Dict[str, Any]) -> Self: + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: """ Create model instance from API response. """ @@ -142,6 +144,25 @@ def to_diff_dict(self) -> Dict[str, Any]: exclude_none=True, exclude=set(self.exclude_from_diff) ) + + # NOTE: initialize and return a deep copy of the instance? + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... + def merge(self, other_model: "NDBaseModel") -> Self: + if not isinstance(other_model, type(self)): + # TODO: Change error message + return TypeError("models are not of the same type.") + + for field, value in other_model: + if value is None: + continue + + current_value = getattr(self, field) + if isinstance(current_value, NDBaseModel) and isinstance(value, NDBaseModel): + setattr(self, field, current_value.merge(value)) + + else: + setattr(self, field, value) + return self # TODO: Make it a seperated BaseModel (low priority) class NDNestedModel(NDBaseModel): diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 4be05991..ea511097 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -67,14 +67,14 @@ class LocalUserModel(NDBaseModel): # Keys management configurations # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] - unwanted_keys: ClassVar[List[List[str]]]= [ + unwanted_keys: ClassVar[List]= [ ["passwordPolicy", "passwordChangeTime"], # Nested path ["userID"] # Simple key ] # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec - login_id: str = Field(..., alias="loginID") + login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") last_name: Optional[str] = Field(default=None, alias="lastName") @@ -121,8 +121,8 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) } - def to_payload(self) -> Dict[str, Any]: - return self.model_dump(by_alias=True, exclude_none=True) + def to_payload(self, **kwargs) -> Dict[str, Any]: + return self.model_dump(by_alias=True, exclude_none=True, **kwargs) # -- Deserialization (API response / Ansible payload -> Model instance) -- @@ -173,8 +173,8 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - return cls.model_validate(response, by_alias=True) + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(response, by_alias=True, **kwargs) # -- Extra -- diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 2f256d30..a25287aa 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -12,24 +12,26 @@ from copy import deepcopy # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from models.base import NDBaseModel +from .models.base import NDBaseModel +from .utils import issubset # Type aliases # NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) +# TODO: Defined the same acros multiple files -> maybe move to constants.py IdentifierKey = Union[str, int, Tuple[Any, ...]] - +# TODO:Might make it a Pydantic RootModel (low priority but medium impact on NDNetworkResourceModule) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. """ - def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType]] = None): + def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = None): """ Initialize collection. """ - self._model_class = model_class + self._model_class: ModelType = model_class # Dual storage self._items: List[ModelType] = [] @@ -39,6 +41,7 @@ def __init__(self, model_class: type[ModelType], items: Optional[List[ModelType] for item in items: self.add(item) + # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ Extract identifier key from item. @@ -48,6 +51,7 @@ def _extract_key(self, item: ModelType) -> IdentifierKey: except Exception as e: raise ValueError(f"Failed to extract identifier: {e}") from e + # TODO: optimize it -> only needed for delete method (low priority) def _rebuild_index(self) -> None: """Rebuild index from scratch (O(n) operation).""" self._index.clear() @@ -105,8 +109,8 @@ def replace(self, item: ModelType) -> bool: self._items[index] = item return True - - def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[ModelType, ModelType], ModelType]] = None) -> ModelType: + + def merge(self, item: ModelType) -> ModelType: """ Merge item with existing, or add if not present. """ @@ -116,35 +120,11 @@ def merge(self, item: ModelType, custom_merge_function: Optional[Callable[[Model if existing is None: self.add(item) return item - - # Custom or default merge - if custom_merge_function: - merged = custom_merge_function(existing, item) else: - # Default merge - existing_data = existing.model_dump() - new_data = item.model_dump(exclude_unset=True) - merged_data = self._deep_merge(existing_data, new_data) - merged = self._model_class.model_validate(merged_data) - + merged = existing.merge(item) self.replace(merged) return merged - - def _deep_merge(self, base: Dict, update: Dict) -> Dict: - """Recursively merge dictionaries.""" - result = base.copy() - - for key, value in update.items(): - if value is None: - continue - - if key in result and isinstance(result[key], dict) and isinstance(value, dict): - result[key] = self._deep_merge(result[key], value) - else: - result[key] = value - - return result - + def delete(self, key: IdentifierKey) -> bool: """ Delete item by identifier (O(n) operation due to index rebuild) @@ -161,6 +141,7 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations + # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. @@ -182,7 +163,7 @@ def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Unio existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) new_data = self._remove_unwanted_keys(new_data, unwanted_keys) - is_subset = self._issubset(new_data, existing_data) + is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" @@ -214,28 +195,7 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I other_keys = set(other.keys()) return list(current_keys - other_keys) - def _issubset(self, subset: Any, superset: Any) -> bool: - """Check if subset is contained in superset.""" - if type(subset) is not type(superset): - return False - - if not isinstance(subset, dict): - if isinstance(subset, list): - return all(item in superset for item in subset) - return subset == superset - - for key, value in subset.items(): - if value is None: - continue - - if key not in superset: - return False - - if not self._issubset(value, superset[key]): - return False - - return True - + # TODO: Maybe not necessary def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) @@ -282,8 +242,8 @@ def copy(self) -> "NDConfigCollection[ModelType]": items=deepcopy(self._items) ) - # Serialization - + # Collection Serialization + def to_list(self, **kwargs) -> List[Dict]: """ Export as list of dicts (with aliases). @@ -301,7 +261,7 @@ def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigC """ Create collection from list of dicts. """ - items = [model_class.model_validate(item_data) for item_data in data] + items = [model_class.model_validate(item_data, by_name=True) for item_data in data] return cls(model_class=model_class, items=items) @classmethod diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_network_resources.py index ab7df9e2..d52fb9de 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_network_resources.py @@ -9,8 +9,9 @@ __metaclass__ = type from copy import deepcopy -from typing import Optional, List, Dict, Any, Callable, Literal +from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError +from ansible.module_utils.basic import AnsibleModule # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule @@ -20,36 +21,48 @@ from nd import NDModule from nd_config_collection import NDConfigCollection from models.base import NDBaseModel +from .orchestrators.base import NDBaseOrchestrator from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED - +# TODO: replace path and verbs with smart Endpoint (Top priority) +# TODO: Rename it (low priority) +# TODO: Revisit Deserialization in every method (high priority) class NDNetworkResourceModule(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_overwrite_map: Optional[Dict[str, Callable]] = None): + def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ + # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: + # nd_module = NDModule() super().__init__(module) # Configuration - self.path = path + # TODO: make sure `model_class` is the same as the one in `model_orchestrator`. if not, error out (high priority) self.model_class = model_class - self.actions_overwrite_map = actions_overwrite_map or {} + self.model_orchestrator = model_orchestrator(module=module) + # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) + self.state = self.params["state"] + self.ansible_config = self.params["config"] + # Initialize collections + # TODO: Revisit collections initialization especially `init_all_data` (medium priority) + # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) + self.nd_config_collection = NDConfigCollection[model_class] try: - init_all_data = self._query_all() + init_all_data = self.model_orchestrator.query_all() - self.existing = NDConfigCollection.from_api_response( + self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, model_class=model_class ) - self.previous = NDConfigCollection(model_class=model_class) - self.proposed = NDConfigCollection(model_class=model_class) - self.sent = NDConfigCollection(model_class=model_class) + self.previous = self.nd_config_collection(model_class=model_class) + self.proposed = self.nd_config_collection(model_class=model_class) + self.sent = self.nd_config_collection(model_class=model_class) except Exception as e: self.fail_json( @@ -59,83 +72,10 @@ def __init__(self, module, path: str, model_class: type[NDBaseModel], actions_ov # Operation tracking self.nd_logs: List[Dict[str, Any]] = [] - - # Current operation context - self.current_identifier = None - self.existing_config: Dict[str, Any] = {} - self.proposed_config: Dict[str, Any] = {} - - # Action Decorator - - @staticmethod - def actions_overwrite(action: str): - """ - Decorator to allow overriding default action operations. - """ - def decorator(func): - def wrapper(self, *args, **kwargs): - overwrite_action = self.actions_overwrite_map.get(action) - if callable(overwrite_action): - return overwrite_action(self, *args, **kwargs) - else: - return func(self, *args, **kwargs) - return wrapper - return decorator - - # Action Operations - - @actions_overwrite("create") - def _create(self) -> Optional[Dict[str, Any]]: - """ - Create a new configuration object. - """ - if self.module.check_mode: - return self.proposed_config - - try: - return self.request(path=self.path, method="POST", data=self.proposed_config) - except Exception as e: - raise Exception(f"Create failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("update") - def _update(self) -> Optional[Dict[str, Any]]: - """ - Update an existing configuration object. - """ - if self.module.check_mode: - return self.proposed_config - - try: - object_path = f"{self.path}/{self.current_identifier}" - return self.request(path=object_path, method="PUT", data=self.proposed_config) - except Exception as e: - raise Exception(f"Update failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("delete") - def _delete(self) -> None: - """Delete a configuration object.""" - if self.module.check_mode: - return - - try: - object_path = f"{self.path}/{self.current_identifier}" - self.request(path=object_path, method="DELETE") - except Exception as e: - raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e - - @actions_overwrite("query_all") - def _query_all(self) -> List[Dict[str, Any]]: - """ - Query all configuration objects from device. - """ - try: - result = self.query_obj(self.path) - return result or [] - except Exception as e: - raise Exception(f"Query all failed: {e}") from e - + # Logging - + # NOTE: format log placeholder + # TODO: use a proper logger (low priority) def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: """ Create and append a log entry. @@ -159,20 +99,20 @@ def format_log(self, identifier, status: Literal["created", "updated", "deleted" self.nd_logs.append(log_entry) - # State Management - - def manage_state( - self, state: Literal["merged", "replaced", "overridden", "deleted"], new_configs: List[Dict[str, Any]], unwanted_keys: Optional[List] = None, override_exceptions: Optional[List] = None) -> None: + # State Management (core function) + # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) + def manage_state(self) -> None: """ Manage state according to desired configuration. """ unwanted_keys = unwanted_keys or [] - override_exceptions = override_exceptions or [] # Parse and validate configs + # TODO: move it to init() (top priority) + # TODO: Modify it if NDConfigCollection becomes a Pydantic RootModel (low priority) try: parsed_items = [] - for config in new_configs: + for config in self.ansible_config: try: # Parse config into model item = self.model_class.model_validate(config) @@ -186,7 +126,7 @@ def manage_state( return # Create proposed collection - self.proposed = NDConfigCollection( + self.proposed = self.nd_config_collection( model_class=self.model_class, items=parsed_items ) @@ -202,27 +142,29 @@ def manage_state( return # Execute state operations - if state in ["merged", "replaced", "overridden"]: - self._manage_create_update_state(state, unwanted_keys) + if self.state in ["merged", "replaced", "overridden"]: + self._manage_create_update_state() - if state == "overridden": - self._manage_override_deletions(override_exceptions) + if self.state == "overridden": + self._manage_override_deletions() - elif state == "deleted": + elif self.state == "deleted": self._manage_delete_state() + # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) else: - self.fail_json(msg=f"Invalid state: {state}") + self.fail_json(msg=f"Invalid state: {self.state}") - def _manage_create_update_state(self,state: Literal["merged", "replaced", "overridden"], unwanted_keys: List) -> None: + + def _manage_create_update_state(self) -> None: """ Handle merged/replaced/overridden states. """ for proposed_item in self.proposed: try: # Extract identifier + # TODO: Remove self.current_identifier, get it directly into the action functions identifier = proposed_item.get_identifier_value() - self.current_identifier = identifier existing_item = self.existing.get(identifier) self.existing_config = ( @@ -232,10 +174,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr ) # Determine diff status - diff_status = self.existing.get_diff_config( - proposed_item, - unwanted_keys=unwanted_keys - ) + diff_status = self.existing.get_diff_config(proposed_item) # No changes needed if diff_status == "no_diff": @@ -247,7 +186,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr continue # Prepare final config based on state - if state == "merged" and existing_item: + if self.state == "merged" and existing_item: # Merge with existing merged_item = self.existing.merge(proposed_item) final_item = merged_item @@ -264,16 +203,16 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr # Execute API operation if diff_status == "changed": - response = self._update() + response = self.model_orchestrator.update(final_item) operation_status = "updated" else: - response = self._create() + response = self.model_orchestrator.create(final_item) operation_status = "created" # Track sent payload if not self.module.check_mode: self.sent.add(final_item) - sent_payload = self.proposed_config + sent_payload = final_item else: sent_payload = None @@ -297,7 +236,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr after_data=self.existing_config ) - if not self.module.params.get("ignore_errors", False): + if not self.params.get("ignore_errors", False): self.fail_json( msg=error_msg, identifier=str(identifier), @@ -305,6 +244,7 @@ def _manage_create_update_state(self,state: Literal["merged", "replaced", "overr ) return + # TODO: Refactor with orchestrator (Top priority) def _manage_override_deletions(self, override_exceptions: List) -> None: """ Delete items not in proposed config (for overridden state). @@ -351,6 +291,7 @@ def _manage_override_deletions(self, override_exceptions: List) -> None: ) return + # TODO: Refactor with orchestrator (Top priority) def _manage_delete_state(self) -> None: """Handle deleted state.""" for proposed_item in self.proposed: @@ -398,7 +339,7 @@ def _manage_delete_state(self) -> None: return # Output Formatting - + # TODO: move to separate Class (results) -> align it with rest_send PR def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.params.get("output_level", "normal") diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 120ea475..e2d9fa75 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -24,39 +24,43 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - post_endpoint: NDBaseSmartEndpoint - put_endpoint: NDBaseSmartEndpoint - delete_endpoint: NDBaseSmartEndpoint - get_endpoint: NDBaseSmartEndpoint + # TODO: change name from http method to crud (e.g. post -> create) + post_endpoint: Type[NDBaseSmartEndpoint] + put_endpoint: Type[NDBaseSmartEndpoint] + delete_endpoint: Type[NDBaseSmartEndpoint] + get_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender module: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - # TODO: Explore how to make them even more general + # TODO: Explore new ways to make them even more general + # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: - return self.module.request(path=self.post_endpoint.base_path, method=self.post_endpoint.verb, data=model_instance.model_dump()) + api_endpoint = self.post_endpoint() + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e + # TODO: Make the same changes as create() with local api_endpoint variable def update(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.model_dump()) + return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Update failed for {self.current_identifier}: {e}") from e def delete(self, model_instance: NDBaseModel) -> ResponseType: if self.module.check_mode: - return model_instance.model_dump() + return model_instance.to_payload() try: self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) @@ -71,7 +75,8 @@ def query_one(self, model_instance: NDBaseModel) -> ResponseType: except Exception as e: raise Exception(f"Query failed for {self.current_identifier}: {e}") from e - def query_all(self) -> ResponseType: + # TODO: Revisit the straegy around the query_all (see local_user's case) + def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: result = self.module.query_obj(self.get_endpoint.path) return result or [] diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index b156512c..3810fa83 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -9,8 +9,10 @@ __metaclass__ = type from .base import NDBaseOrchestrator +from ..models.base import NDBaseModel from ..models.local_user import LocalUserModel from typing import Dict, List, Any, Union, Type +from ..api_endpoints.base import NDBaseSmartEndpoint from ..api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, @@ -23,12 +25,12 @@ class LocalUserOrchestrator(NDBaseOrchestrator): - model_class = Type[LocalUserModel] + model_class: Type[NDBaseModel] = LocalUserModel - post_endpoint = EpApiV1InfraAaaLocalUsersPost() - put_endpoint = EpApiV1InfraAaaLocalUsersPut() - delete_endpoint = EpApiV1InfraAaaLocalUsersDelete() - get_endpoint = EpApiV1InfraAaaLocalUsersGet() + post_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost + put_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete + get_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 5bf0a0f0..72ccbcd7 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -9,6 +9,7 @@ __metaclass__ = type from copy import deepcopy +from typing import Any def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): @@ -29,4 +30,27 @@ def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remo for index, item in enumerate(v): if isinstance(item, dict): result[k][index] = sanitize_dict(item, keys, values) - return result \ No newline at end of file + return result + + +def issubset(subset: Any, superset: Any) -> bool: + """Check if subset is contained in superset.""" + if type(subset) is not type(superset): + return False + + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + + for key, value in subset.items(): + if value is None: + continue + + if key not in superset: + return False + + if not issubset(value, superset[key]): + return False + + return True diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 3dcaf1a4..901549fb 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -180,53 +180,15 @@ # from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel # from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING -from module_utils.nd import nd_argument_spec -from module_utils.nd_network_resources import NDNetworkResourceModule -from module_utils.models.local_user import LocalUserModel -from module_utils.constants import USER_ROLES_MAPPING +from ..module_utils.nd import nd_argument_spec +from ..module_utils.nd_network_resources import NDNetworkResourceModule +from ..module_utils.models.local_user import LocalUserModel +from ..module_utils.orchestrators.local_user import LocalUserOrchestrator -# NOTE: Maybe Add the overwrite action in the LocalUserModel -def query_all_local_users(nd_module): - """ - Custom query_all action to extract 'localusers' from response. - """ - response = nd_module.query_obj(nd_module.path) - return response.get("localusers", []) - - -# NOTE: Maybe Add More aliases like in the LocalUserModel / Revisit the argmument_spec def main(): argument_spec = nd_argument_spec() - argument_spec.update( - config=dict( - type="list", - elements="dict", - required=True, - options=dict( - email=dict(type="str"), - login_id=dict(type="str", required=True), - first_name=dict(type="str"), - last_name=dict(type="str"), - user_password=dict(type="str", no_log=True), - reuse_limitation=dict(type="int"), - time_interval_limitation=dict(type="int"), - security_domains=dict( - type="list", - elements="dict", - options=dict( - name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), - roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), - ), - aliases=["domains"], - ), - remote_id_claim=dict(type="str"), - remote_user_authorization=dict(type="bool"), - ), - ), - override_exceptions=dict(type="list", elements="str"), - state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), - ) + argument_spec.update(LocalUserModel.get_argument_spec()) module = AnsibleModule( argument_spec=argument_spec, @@ -237,23 +199,12 @@ def main(): # Create NDNetworkResourceModule with LocalUserModel nd_module = NDNetworkResourceModule( module=module, - path="/api/v1/infra/aaa/localUsers", model_class=LocalUserModel, - actions_overwrite_map={ - "query_all": query_all_local_users - } + model_orchestrator=LocalUserOrchestrator, ) # Manage state - nd_module.manage_state( - state=module.params["state"], - new_configs=module.params["config"], - unwanted_keys=[ - ["passwordPolicy", "passwordChangeTime"], # Nested path - ["userID"] # Simple key - ], - override_exceptions=module.params.get("override_exceptions") - ) + nd_module.manage_state() nd_module.exit_json() From be8f3af597aa05a80036c32a27f79dae4e3a610e Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 25 Feb 2026 08:24:28 -0500 Subject: [PATCH 084/131] [ignore] Default to none and update condition for regarding in models/base.py. --- plugins/module_utils/models/base.py | 8 +++++--- plugins/module_utils/models/local_user.py | 1 + 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index db7fd9ae..4ddeacd0 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,6 +15,7 @@ # TODO: Revisit identifiers strategy (low priority) +# TODO: add kwargs to every sub method class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -26,6 +27,7 @@ class NDBaseModel(BaseModel, ABC): - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ # TODO: revisit initial Model Configurations (low priority) + # TODO: enable extra model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, @@ -36,7 +38,7 @@ class NDBaseModel(BaseModel, ABC): # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = None + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "none" # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List[str]] = [] @@ -51,7 +53,7 @@ def __init_subclass__(cls, **kwargs): # Skip enforcement for nested models # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) - if cls.__name__ in ['NDNestedModel']: + if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return if not hasattr(cls, "identifiers") or cls.identifiers is None: @@ -146,7 +148,7 @@ def to_diff_dict(self) -> Dict[str, Any]: ) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel") -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index ea511097..77307d07 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -74,6 +74,7 @@ class LocalUserModel(NDBaseModel): # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec + # TODO: use extra for generating argument_spec (low priority) login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") From 3107942e5569344cd6d3b262cb1dda1510369f24 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:38:50 -0500 Subject: [PATCH 085/131] [ignore] Add choice for when no identifier is needed. Add quick comments and changes to models/local_user.py and api_endpoints/base.py --- plugins/module_utils/api_endpoints/base.py | 6 ++--- plugins/module_utils/models/base.py | 29 +++++++++++----------- plugins/module_utils/models/local_user.py | 6 ++--- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 747c3283..90ef5c87 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -35,9 +35,9 @@ def path(self) -> str: def verb(self) -> str: pass - # TODO: Maybe to be modifed to be more Pydantic - # TODO: Maybe change function's name - # NOTE: function to set endpoints attribute fields from identifiers + # TODO: Maybe to be modifed to be more Pydantic (low priority) + # TODO: Maybe change function's name (low priority) + # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): pass diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 4ddeacd0..159acb93 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,7 +15,6 @@ # TODO: Revisit identifiers strategy (low priority) -# TODO: add kwargs to every sub method class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -24,24 +23,24 @@ class NDBaseModel(BaseModel, ABC): - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) - - none: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) + - singleton: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ # TODO: revisit initial Model Configurations (low priority) - # TODO: enable extra model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, validate_assignment=True, populate_by_name=True, - extra='ignore' + extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "none" + # TODO: Rvisit no identifiers strategy naming (`singleton`) (low priority) + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" # Optional: fields to exclude from diffs (e.g., passwords) - exclude_from_diff: ClassVar[List[str]] = [] + exclude_from_diff: ClassVar[List] = [] unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) @@ -52,7 +51,7 @@ def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) # Skip enforcement for nested models - # TODO: Remove if `NDNestedModel` is a separated BaseModel (low priority) + # TODO: Remove if `NDNestedModel` is a separated BaseModel (low conditional priority) if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -64,11 +63,10 @@ def __init_subclass__(cls, **kwargs): if not hasattr(cls, "identifier_strategy") or cls.identifier_strategy is None: raise ValueError( f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." - f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'none']]] = 'single'`" + f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'singleton']]] = 'single'`" ) # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) - # NOTE: Should we use keyword arguments? @abstractmethod def to_payload(self, **kwargs) -> Dict[str, Any]: """ @@ -85,16 +83,15 @@ def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: pass # TODO: Revisit this function when revisiting identifier strategy (low priority) - # TODO: Add condition when there is no identifiers (high priority) - def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: + def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ Extract identifier value(s) from this instance: - single identifier: Returns field value. - composite identifiers: Returns tuple of all field values. - hierarchical identifiers: Returns tuple of (field_name, value) for first non-None field. """ - if not self.identifiers: - raise ValueError(f"{self.__class__.__name__} has no identifiers defined") + if not self.identifiers and self.identifier_strategy != "singleton": + raise ValueError(f"{self.__class__.__name__} must have identifiers defined with its current identifier strategy: `{self.identifier_strategy}`") if self.identifier_strategy == "single": value = getattr(self, self.identifiers[0], None) @@ -133,6 +130,10 @@ def get_identifier_value(self) -> Union[str, int, Tuple[Any, ...]]: f"No non-None value in hierarchical fields {self.identifiers}" ) + # TODO: Revisit condition when there is no identifiers (low priority) + elif self.identifier_strategy == "singleton": + return self.identifier_strategy + else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") @@ -166,7 +167,7 @@ def merge(self, other_model: "NDBaseModel") -> Self: setattr(self, field, value) return self -# TODO: Make it a seperated BaseModel (low priority) +# TODO: Make it a seperated BaseModel? (low conditional priority) class NDNestedModel(NDBaseModel): """ Base for nested models without identifiers. diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 77307d07..ed09666d 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -16,7 +16,7 @@ # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel from .base import NDBaseModel, NDNestedModel -# TODO: Move it to constants.py and make a reverse class Map for this +# TODO: Move it to constants.py and make a reverse class Map for this (low priority) USER_ROLES_MAPPING = MappingProxyType({ "fabric_admin": "fabric-admin", "observer": "observer", @@ -51,7 +51,7 @@ def serialize_model(self) -> Dict: # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed -# TODO: Add field validation (e.g. me, le, choices, etc...) (medium priority) +# TODO: Add field validation (e.g. me, le, choices, etc...) (low priority) class LocalUserModel(NDBaseModel): """ Local user configuration. @@ -62,7 +62,7 @@ class LocalUserModel(NDBaseModel): # Identifier configuration # TODO: Revisit this identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = ["login_id"] - identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "none"]]] = "single" + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "single" # Keys management configurations # TODO: Revisit these configurations (low priority) From f57dfaa5da9c06ecb153c921778c76de8f9392d6 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:42:12 -0500 Subject: [PATCH 086/131] [ignore] Complete orchestrators/base.py by making simple CRUD operations methods that work for single_identifier strategy (meant to be overridden if needed). --- plugins/module_utils/orchestrators/base.py | 48 ++++++++++--------- .../module_utils/orchestrators/local_user.py | 9 ++-- 2 files changed, 29 insertions(+), 28 deletions(-) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index e2d9fa75..611f39a6 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -24,61 +24,63 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - # TODO: change name from http method to crud (e.g. post -> create) - post_endpoint: Type[NDBaseSmartEndpoint] - put_endpoint: Type[NDBaseSmartEndpoint] + create_endpoint: Type[NDBaseSmartEndpoint] + update_endpoint: Type[NDBaseSmartEndpoint] delete_endpoint: Type[NDBaseSmartEndpoint] - get_endpoint: Type[NDBaseSmartEndpoint] + query_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required - # TODO: Replace it with future sender + # TODO: Replace it with future sender (low priority) module: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - # TODO: Explore new ways to make them even more general + # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) # TODO: Revisit Deserialization - def create(self, model_instance: NDBaseModel) -> ResponseType: + def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - api_endpoint = self.post_endpoint() + api_endpoint = self.create_endpoint() return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e - # TODO: Make the same changes as create() with local api_endpoint variable - def update(self, model_instance: NDBaseModel) -> ResponseType: + def update(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - self.put_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.put_endpoint.path, method=self.put_endpoint.verb, data=model_instance.to_payload()) + api_endpoint = self.update_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: - raise Exception(f"Update failed for {self.current_identifier}: {e}") from e + raise Exception(f"Update failed for {model_instance.get_identifier_value()}: {e}") from e - def delete(self, model_instance: NDBaseModel) -> ResponseType: + def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: if self.module.check_mode: return model_instance.to_payload() try: - self.delete_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.delete_endpoint.path, method=self.delete_endpoint.verb) + api_endpoint = self.delete_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: - raise Exception(f"Delete failed for {self.current_identifier}: {e}") from e + raise Exception(f"Delete failed for {model_instance.get_identifier_value()}: {e}") from e - def query_one(self, model_instance: NDBaseModel) -> ResponseType: + def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - self.get_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=self.get_endpoint.path, method=self.get_endpoint.verb) + api_endpoint = self.query_endpoint() + api_endpoint.set_identifiers(model_instance.get_identifier_value()) + self.query_endpoint.set_identifiers(model_instance.get_identifier_value()) + return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: - raise Exception(f"Query failed for {self.current_identifier}: {e}") from e + raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.get_endpoint.path) + result = self.module.query_obj(self.query_endpoint.path) return result or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e \ No newline at end of file + raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 3810fa83..caacc5aa 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -27,18 +27,17 @@ class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - post_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost - put_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut + create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - get_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.get_endpoint.base_path) + result = self.module.query_obj(self.query_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e - \ No newline at end of file From 43f3e1af0101c71e1af60857b23e1d99d0d2b0c1 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 10:44:23 -0500 Subject: [PATCH 087/131] [ignore] Fix and in nd_config_collections.py. Move to utils.py. --- plugins/module_utils/nd_config_collection.py | 42 +++----------------- plugins/module_utils/utils.py | 29 +++++++++++++- 2 files changed, 34 insertions(+), 37 deletions(-) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index a25287aa..fa6662c9 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -18,10 +18,10 @@ # Type aliases # NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) -# TODO: Defined the same acros multiple files -> maybe move to constants.py +# TODO: Defined the same acros multiple files -> maybe move to constants.py (low priority) IdentifierKey = Union[str, int, Tuple[Any, ...]] -# TODO:Might make it a Pydantic RootModel (low priority but medium impact on NDNetworkResourceModule) +# TODO: Make it a Pydantic RootModel? (low conditional priority but medium impact on NDNetworkResourceModule) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. @@ -59,7 +59,7 @@ def _rebuild_index(self) -> None: key = self._extract_key(item) self._index[key] = index - # Core CRUD Operations + # Core Operations def add(self, item: ModelType) -> IdentifierKey: """ @@ -142,7 +142,7 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) - def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> Literal["new", "no_diff", "changed"]: + def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. """ @@ -158,16 +158,12 @@ def get_diff_config(self, new_item: ModelType, unwanted_keys: Optional[List[Unio existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() - - if unwanted_keys: - existing_data = self._remove_unwanted_keys(existing_data, unwanted_keys) - new_data = self._remove_unwanted_keys(new_data, unwanted_keys) is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" - def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_keys: Optional[List[Union[str, List[str]]]] = None) -> bool: + def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: """ Check if two collections differ. """ @@ -178,7 +174,7 @@ def get_diff_collection(self, other: "NDConfigCollection[ModelType]", unwanted_k return True for item in other: - if self.get_diff_config(item, unwanted_keys) != "no_diff": + if self.get_diff_config(item) != "no_diff": return True for key in self.keys(): @@ -195,32 +191,6 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I other_keys = set(other.keys()) return list(current_keys - other_keys) - # TODO: Maybe not necessary - def _remove_unwanted_keys(self, data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: - """Remove unwanted keys from dict (supports nested paths).""" - data = deepcopy(data) - - for key in unwanted_keys: - if isinstance(key, str): - if key in data: - del data[key] - - elif isinstance(key, list) and len(key) > 0: - try: - parent = data - for k in key[:-1]: - if isinstance(parent, dict) and k in parent: - parent = parent[k] - else: - break - else: - if isinstance(parent, dict) and key[-1] in parent: - del parent[key[-1]] - except (KeyError, TypeError, IndexError): - pass - - return data - # Collection Operations def __len__(self) -> int: diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 72ccbcd7..a7c1d3dc 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -9,7 +9,7 @@ __metaclass__ = type from copy import deepcopy -from typing import Any +from typing import Any, Dict, List, Union def sanitize_dict(dict_to_sanitize, keys=None, values=None, recursive=True, remove_none_values=True): @@ -54,3 +54,30 @@ def issubset(subset: Any, superset: Any) -> bool: return False return True + + +# TODO: Might not necessary with Pydantic validation and serialization built-in methods +def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: + """Remove unwanted keys from dict (supports nested paths).""" + data = deepcopy(data) + + for key in unwanted_keys: + if isinstance(key, str): + if key in data: + del data[key] + + elif isinstance(key, list) and len(key) > 0: + try: + parent = data + for k in key[:-1]: + if isinstance(parent, dict) and k in parent: + parent = parent[k] + else: + break + else: + if isinstance(parent, dict) and key[-1] in parent: + del parent[key[-1]] + except (KeyError, TypeError, IndexError): + pass + + return data From 2eb7775f7fbe467c5435b83541ef5abea9467819 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 14:01:24 -0500 Subject: [PATCH 088/131] [ignore] Rename NDNetworkResourceModule to NDStateMachine. Add file for NDNestedModel. Add types.file. Various Renaming and small Modifications across the repo. WIP. --- plugins/module_utils/api_endpoints/base.py | 2 +- .../module_utils/api_endpoints/local_user.py | 3 +- plugins/module_utils/constants.py | 21 ++++--- plugins/module_utils/models/base.py | 58 +++++++------------ plugins/module_utils/models/local_user.py | 28 ++++----- plugins/module_utils/models/nested.py | 22 +++++++ plugins/module_utils/nd.py | 5 -- plugins/module_utils/nd_config_collection.py | 28 +++++---- ...twork_resources.py => nd_state_machine.py} | 23 ++++---- plugins/module_utils/orchestrators/base.py | 8 +-- .../module_utils/orchestrators/local_user.py | 5 +- plugins/module_utils/types.py | 14 +++++ plugins/modules/nd_local_user.py | 7 +-- 13 files changed, 115 insertions(+), 109 deletions(-) create mode 100644 plugins/module_utils/models/nested.py rename plugins/module_utils/{nd_network_resources.py => nd_state_machine.py} (95%) create mode 100644 plugins/module_utils/types.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 90ef5c87..0355a1de 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -12,8 +12,8 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import Final, Union, Tuple, Any +from ..types import IdentifierKey -IdentifierKey = Union[str, int, Tuple[Any, ...], None] # TODO: Rename it to APIEndpoint # NOTE: This is a very minimalist endpoint package -> needs to be enhanced diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 61f52ad8..666782ab 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -20,8 +20,7 @@ from enums import VerbEnum from base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field - -IdentifierKey = Union[str, int, Tuple[Any, ...], None] +from ..types import IdentifierKey class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index cbba61b3..7bb7e95d 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -9,6 +9,18 @@ __metaclass__ = type +from typing import Dict +from types import MappingProxyType +from copy import deepcopy + +class NDConstantMapping(Dict): + + def __init__(self, data: Dict): + new_dict = deepcopy(data) + for k,v in data.items(): + new_dict[v] = k + return MappingProxyType(new_dict) + OBJECT_TYPES = { "tenant": "OST_TENANT", "vrf": "OST_VRF", @@ -175,12 +187,3 @@ ND_SETUP_NODE_DEPLOYMENT_TYPE = {"physical": "cimc", "virtual": "vnode"} BACKUP_TYPE = {"config_only": "config-only", None: "config-only", "": "config-only", "full": "full"} - -USER_ROLES_MAPPING = { - "fabric_admin": "fabric-admin", - "observer": "observer", - "super_admin": "super-admin", - "support_engineer": "support-engineer", - "approver": "approver", - "designer": "designer", -} diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 159acb93..ca672fd5 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -15,6 +15,7 @@ # TODO: Revisit identifiers strategy (low priority) +# NOTE: what about List of NestedModels? -> make it a separate Sub Model class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. @@ -36,11 +37,12 @@ class NDBaseModel(BaseModel, ABC): # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None - # TODO: Rvisit no identifiers strategy naming (`singleton`) (low priority) + # TODO: Revisit no identifiers strategy naming (`singleton` -> `unique`, `unnamed`) (low priority) identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List] = [] + # TODO: To be removed in the future (see local_user model) unwanted_keys: ClassVar[List] = [] # TODO: Revisit it with identifiers strategy (low priority) @@ -51,7 +53,6 @@ def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) # Skip enforcement for nested models - # TODO: Remove if `NDNestedModel` is a separated BaseModel (low conditional priority) if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -65,22 +66,26 @@ def __init_subclass__(cls, **kwargs): f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'singleton']]] = 'single'`" ) - - # NOTE: Might not need to make them absractmethod because of the Pydantic built-in methods (low priority) - @abstractmethod + def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ - pass + return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - @classmethod - @abstractmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + def to_config(self, **kwargs) -> Dict[str, Any]: """ - Create model instance from API response. + Convert model to Ansible config format. """ - pass + return self.model_dump(by_name=True, exclude_none=True, **kwargs) + + @classmethod + def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(response, by_alias=True, **kwargs) + + @classmethod + def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: + return cls.model_validate(ansible_config, by_name=True, **kwargs) # TODO: Revisit this function when revisiting identifier strategy (low priority) def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: @@ -132,25 +137,26 @@ def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: # TODO: Revisit condition when there is no identifiers (low priority) elif self.identifier_strategy == "singleton": - return self.identifier_strategy + return None else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") - def to_diff_dict(self) -> Dict[str, Any]: + def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ return self.model_dump( by_alias=True, exclude_none=True, - exclude=set(self.exclude_from_diff) + exclude=set(self.exclude_from_diff), + **kwargs ) # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace - def merge(self, other_model: "NDBaseModel") -> Self: + def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") @@ -166,25 +172,3 @@ def merge(self, other_model: "NDBaseModel") -> Self: else: setattr(self, field, value) return self - -# TODO: Make it a seperated BaseModel? (low conditional priority) -class NDNestedModel(NDBaseModel): - """ - Base for nested models without identifiers. - """ - - # TODO: Configuration Fields to be clearly defined here (low priority) - identifiers: ClassVar[List[str]] = [] - - def to_payload(self) -> Dict[str, Any]: - """ - Convert model to API payload format. - """ - return self.model_dump(by_alias=True, exclude_none=True) - - @classmethod - def from_response(cls, response: Dict[str, Any]) -> Self: - """ - Create model instance from API response. - """ - return cls.model_validate(response, by_alias=True) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index ed09666d..dba35aee 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -13,11 +13,14 @@ from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel, NDNestedModel -from .base import NDBaseModel, NDNestedModel - -# TODO: Move it to constants.py and make a reverse class Map for this (low priority) -USER_ROLES_MAPPING = MappingProxyType({ +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from .base import NDBaseModel +from .nested import NDNestedModel +from ..constants import NDConstantMapping + +# Constant defined here as it is only used in this model +USER_ROLES_MAPPING = NDConstantMapping({ "fabric_admin": "fabric-admin", "observer": "observer", "super_admin": "super-admin", @@ -31,7 +34,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): """Security domain configuration for local user (nested model).""" # Fields - name: str = Field(..., alias="name", exclude=True) + name: str = Field(alias="name", exclude=True) roles: Optional[List[str]] = Field(default=None, alias="roles", exclude=True) # -- Serialization (Model instance -> API payload) -- @@ -47,8 +50,7 @@ def serialize_model(self) -> Dict: } } - # -- Deserialization (API response / Ansible payload -> Model instance) -- - # NOTE: Not needed as it already defined in `LocalUserModel` -> investigate if needed + # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity # TODO: Add field validation (e.g. me, le, choices, etc...) (low priority) @@ -121,10 +123,6 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) "domains": domains_dict } - - def to_payload(self, **kwargs) -> Dict[str, Any]: - return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - # -- Deserialization (API response / Ansible payload -> Model instance) -- @model_validator(mode="before") @@ -172,12 +170,6 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: return value - # TODO: only works for api responses but NOT for Ansible configs -> needs to be fixed (high priority) - @classmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: - return cls.model_validate(response, by_alias=True, **kwargs) - - # -- Extra -- # TODO: to generate from Fields (low priority) diff --git a/plugins/module_utils/models/nested.py b/plugins/module_utils/models/nested.py new file mode 100644 index 00000000..f2560819 --- /dev/null +++ b/plugins/module_utils/models/nested.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import List, ClassVar +from .base import NDBaseModel + + +class NDNestedModel(NDBaseModel): + """ + Base for nested models without identifiers. + """ + + # NOTE: model_config, ClassVar, and Fields can be overwritten here if needed + + identifiers: ClassVar[List[str]] = [] diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 5f528bb8..07af68e5 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -239,13 +239,8 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: -<<<<<<< HEAD if data: info = self.connection.send_request(method, uri, json.dumps(data)) -======= - if data is not None: - info = conn.send_request(method, uri, json.dumps(data)) ->>>>>>> 7c967c3 ([minor_change] Add nd_local_user as a new network resource module for Nexus Dashboard v4.1.0 and higher.) else: info = self.connection.send_request(method, uri) self.result["data"] = data diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index fa6662c9..364519b8 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -14,14 +14,12 @@ # TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from .models.base import NDBaseModel from .utils import issubset +from .types import IdentifierKey # Type aliases -# NOTE: Maybe add more type aliases in the future if needed ModelType = TypeVar('ModelType', bound=NDBaseModel) -# TODO: Defined the same acros multiple files -> maybe move to constants.py (low priority) -IdentifierKey = Union[str, int, Tuple[Any, ...]] -# TODO: Make it a Pydantic RootModel? (low conditional priority but medium impact on NDNetworkResourceModule) + class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. @@ -156,9 +154,9 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha if existing is None: return "new" + # TODO: make a diff class level method for NDBaseModel existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() - is_subset = issubset(new_data, existing_data) return "no_diff" if is_subset else "changed" @@ -214,30 +212,30 @@ def copy(self) -> "NDConfigCollection[ModelType]": # Collection Serialization - def to_list(self, **kwargs) -> List[Dict]: + def to_ansible_config(self, **kwargs) -> List[Dict]: """ - Export as list of dicts (with aliases). + Export as an Ansible config. """ - return [item.model_dump(by_alias=True, exclude_none=True, **kwargs) for item in self._items] + return [item.to_config(**kwargs) for item in self._items] - def to_payload_list(self) -> List[Dict[str, Any]]: + def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: """ Export as list of API payloads. """ - return [item.to_payload() for item in self._items] + return [item.to_payload(**kwargs) for item in self._items] @classmethod - def from_list(cls, data: List[Dict], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ - Create collection from list of dicts. + Create collection from Ansible config. """ - items = [model_class.model_validate(item_data, by_name=True) for item_data in data] + items = [model_class.from_config(item_data, **kwargs) for item_data in data] return cls(model_class=model_class, items=items) @classmethod - def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType]) -> "NDConfigCollection[ModelType]": + def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ Create collection from API response. """ - items = [model_class.from_response(item_data) for item_data in response_data] + items = [model_class.from_response(item_data, **kwargs) for item_data in response_data] return cls(model_class=model_class, items=items) diff --git a/plugins/module_utils/nd_network_resources.py b/plugins/module_utils/nd_state_machine.py similarity index 95% rename from plugins/module_utils/nd_network_resources.py rename to plugins/module_utils/nd_state_machine.py index d52fb9de..5306bfe8 100644 --- a/plugins/module_utils/nd_network_resources.py +++ b/plugins/module_utils/nd_state_machine.py @@ -24,26 +24,24 @@ from .orchestrators.base import NDBaseOrchestrator from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: replace path and verbs with smart Endpoint (Top priority) -# TODO: Rename it (low priority) + # TODO: Revisit Deserialization in every method (high priority) -class NDNetworkResourceModule(NDModule): +class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_orchestrator: Type[NDBaseOrchestrator]): + def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: # nd_module = NDModule() super().__init__(module) - + # Configuration - # TODO: make sure `model_class` is the same as the one in `model_orchestrator`. if not, error out (high priority) - self.model_class = model_class self.model_orchestrator = model_orchestrator(module=module) + self.model_class = self.model_orchestrator.model_class # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) self.state = self.params["state"] self.ansible_config = self.params["config"] @@ -52,17 +50,17 @@ def __init__(self, module: AnsibleModule, model_class: Type[NDBaseModel], model_ # Initialize collections # TODO: Revisit collections initialization especially `init_all_data` (medium priority) # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) - self.nd_config_collection = NDConfigCollection[model_class] + self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, - model_class=model_class + model_class=self.model_class ) - self.previous = self.nd_config_collection(model_class=model_class) - self.proposed = self.nd_config_collection(model_class=model_class) - self.sent = self.nd_config_collection(model_class=model_class) + self.previous = self.nd_config_collection(model_class=self.model_class) + self.proposed = self.nd_config_collection(model_class=self.model_class) + self.sent = self.nd_config_collection(model_class=self.model_class) except Exception as e: self.fail_json( @@ -340,6 +338,7 @@ def _manage_delete_state(self) -> None: # Output Formatting # TODO: move to separate Class (results) -> align it with rest_send PR + # TODO: return a defined ordered list of config (for integration test) def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.params.get("output_level", "normal") diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 611f39a6..db72b740 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -27,7 +27,8 @@ class NDBaseOrchestrator(BaseModel): create_endpoint: Type[NDBaseSmartEndpoint] update_endpoint: Type[NDBaseSmartEndpoint] delete_endpoint: Type[NDBaseSmartEndpoint] - query_endpoint: Type[NDBaseSmartEndpoint] + query_one_endpoint: Type[NDBaseSmartEndpoint] + query_all_endpoint: Type[NDBaseSmartEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) @@ -70,9 +71,8 @@ def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - api_endpoint = self.query_endpoint() + api_endpoint = self.query_one_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - self.query_endpoint.set_identifiers(model_instance.get_identifier_value()) return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e @@ -80,7 +80,7 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.query_endpoint.path) + result = self.module.query_obj(self.query_all_endpoint.path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index caacc5aa..ef2aa36a 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -30,14 +30,15 @@ class LocalUserOrchestrator(NDBaseOrchestrator): create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - query_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet def query_all(self): """ Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.query_endpoint.base_path) + result = self.module.query_obj(self.query_all_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/types.py b/plugins/module_utils/types.py new file mode 100644 index 00000000..124aedd5 --- /dev/null +++ b/plugins/module_utils/types.py @@ -0,0 +1,14 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Any, Union, Tuple + + +IdentifierKey = Union[str, int, Tuple[Any, ...]] diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 901549fb..67fb3e80 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -181,7 +181,7 @@ # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel # from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING from ..module_utils.nd import nd_argument_spec -from ..module_utils.nd_network_resources import NDNetworkResourceModule +from ..module_utils.nd_state_machine import NDStateMachine from ..module_utils.models.local_user import LocalUserModel from ..module_utils.orchestrators.local_user import LocalUserOrchestrator @@ -194,12 +194,11 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, ) - + try: # Create NDNetworkResourceModule with LocalUserModel - nd_module = NDNetworkResourceModule( + nd_module = NDStateMachine( module=module, - model_class=LocalUserModel, model_orchestrator=LocalUserOrchestrator, ) From 2baca0aeca54b0c80d37b7642ad2069abb263d5f Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 26 Feb 2026 14:09:18 -0500 Subject: [PATCH 089/131] [ignore] Make a small change to NDModule request function. --- plugins/module_utils/nd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 07af68e5..42b1b118 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -239,7 +239,7 @@ def request( if file is not None: info = self.connection.send_file_request(method, uri, file, data, None, file_key, file_ext) else: - if data: + if data is not None: info = self.connection.send_request(method, uri, json.dumps(data)) else: info = self.connection.send_request(method, uri) From cd60f77ee4256f2e15eb06d52b713c07cfb39153 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Mon, 2 Mar 2026 17:59:17 -0500 Subject: [PATCH 090/131] [ignore] Modify nd_state_machine to work with orchestrators/models/api_endpoints. Adapt api_endpoints, models, orchestrators accordingly. Integration Tests passing for nd_local_user module. Still WIP. --- plugins/module_utils/api_endpoints/base.py | 6 +- .../module_utils/api_endpoints/local_user.py | 6 +- plugins/module_utils/constants.py | 9 +- plugins/module_utils/models/base.py | 3 +- plugins/module_utils/models/local_user.py | 5 +- plugins/module_utils/nd_state_machine.py | 237 ++++++++---------- plugins/module_utils/orchestrators/base.py | 34 ++- .../module_utils/orchestrators/local_user.py | 2 +- plugins/modules/nd_local_user.py | 4 +- requirements.txt | 3 +- .../network-integration.requirements.txt | 3 +- 11 files changed, 140 insertions(+), 172 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 0355a1de..832476ed 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -25,13 +25,13 @@ class NDBaseSmartEndpoint(BaseModel, ABC): # TODO: to remove base_path: str - @abstractmethod @property + @abstractmethod def path(self) -> str: pass - - @abstractmethod + @property + @abstractmethod def verb(self) -> str: pass diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 666782ab..cae1326b 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -16,9 +16,9 @@ __metaclass__ = type # pylint: disable=invalid-name from typing import Literal, Union, Tuple, Any, Final -from mixins import LoginIdMixin -from enums import VerbEnum -from base import NDBaseSmartEndpoint, NDBasePath +from .mixins import LoginIdMixin +from .enums import VerbEnum +from .base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field from ..types import IdentifierKey diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 7bb7e95d..784a7f51 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -16,10 +16,13 @@ class NDConstantMapping(Dict): def __init__(self, data: Dict): - new_dict = deepcopy(data) + self.new_dict = deepcopy(data) for k,v in data.items(): - new_dict[v] = k - return MappingProxyType(new_dict) + self.new_dict[v] = k + self.new_dict = MappingProxyType(self.new_dict) + + def get_dict(self): + return self.new_dict OBJECT_TYPES = { "tenant": "OST_TENANT", diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index ca672fd5..7b569a58 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -32,6 +32,7 @@ class NDBaseModel(BaseModel, ABC): use_enum_values=True, validate_assignment=True, populate_by_name=True, + arbitrary_types_allowed=True, extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) @@ -77,7 +78,7 @@ def to_config(self, **kwargs) -> Dict[str, Any]: """ Convert model to Ansible config format. """ - return self.model_dump(by_name=True, exclude_none=True, **kwargs) + return self.model_dump(by_alias=False, exclude_none=True, **kwargs) @classmethod def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index dba35aee..713d6040 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -27,7 +27,7 @@ "support_engineer": "support-engineer", "approver": "approver", "designer": "designer", -}) +}).get_dict() class LocalUserSecurityDomainModel(NDNestedModel): @@ -173,7 +173,8 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # -- Extra -- # TODO: to generate from Fields (low priority) - def get_argument_spec(self): + @classmethod + def get_argument_spec(cls) -> Dict: return dict( config=dict( type="list", diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 5306bfe8..5b1f770c 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -16,16 +16,16 @@ # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule # from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -# from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +# from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey # from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -from nd import NDModule -from nd_config_collection import NDConfigCollection -from models.base import NDBaseModel +from .nd import NDModule +from .nd_config_collection import NDConfigCollection from .orchestrators.base import NDBaseOrchestrator -from constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from .types import IdentifierKey +from .constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: Revisit Deserialization in every method (high priority) class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. @@ -35,16 +35,21 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest """ Initialize the Network Resource Module. """ - # TODO: Revisit Module initialization and configuration (medium priority). e.g., use instead: + # TODO: Revisit Module initialization and configuration # nd_module = NDModule() - super().__init__(module) + self.module = module + self.nd_module = NDModule(module) + + # Operation tracking + self.nd_logs: List[Dict[str, Any]] = [] + self.result: Dict[str, Any] = {"changed": False} # Configuration - self.model_orchestrator = model_orchestrator(module=module) + self.model_orchestrator = model_orchestrator(sender=self.nd_module) self.model_class = self.model_orchestrator.model_class # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) - self.state = self.params["state"] - self.ansible_config = self.params["config"] + self.state = self.module.params["state"] + self.ansible_config = self.module.params.get("config", []) # Initialize collections @@ -53,46 +58,64 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() - + self.existing = self.nd_config_collection.from_api_response( response_data=init_all_data, model_class=self.model_class ) - self.previous = self.nd_config_collection(model_class=self.model_class) + # Save previous state + self.previous = self.existing.copy() self.proposed = self.nd_config_collection(model_class=self.model_class) self.sent = self.nd_config_collection(model_class=self.model_class) - + + for config in self.ansible_config: + try: + # Parse config into model + item = self.model_class.from_config(config) + self.proposed.add(item) + except ValidationError as e: + self.fail_json( + msg=f"Invalid configuration: {e}", + config=config, + validation_errors=e.errors() + ) + return + except Exception as e: self.fail_json( msg=f"Initialization failed: {str(e)}", error=str(e) ) - - # Operation tracking - self.nd_logs: List[Dict[str, Any]] = [] # Logging # NOTE: format log placeholder # TODO: use a proper logger (low priority) - def format_log(self, identifier, status: Literal["created", "updated", "deleted", "no_change"], after_data: Optional[Dict[str, Any]] = None, sent_payload_data: Optional[Dict[str, Any]] = None) -> None: + def format_log( + self, + identifier: IdentifierKey, + operation_status: Literal["no_change", "created", "updated", "deleted"], + before: Optional[Dict[str, Any]] = None, + after: Optional[Dict[str, Any]] = None, + payload: Optional[Dict[str, Any]] = None, + ) -> None: """ Create and append a log entry. """ log_entry = { "identifier": identifier, - "status": status, - "before": deepcopy(self.existing_config), - "after": deepcopy(after_data) if after_data is not None else self.existing_config, - "sent_payload": deepcopy(sent_payload_data) if sent_payload_data is not None else {} + "operation_status": operation_status, + "before": before, + "after": after, + "payload": payload, } # Add HTTP details if not in check mode - if not self.module.check_mode and self.url is not None: + if not self.module.check_mode and self.nd_module.url is not None: log_entry.update({ - "method": self.method, - "response": self.response, - "status": self.status, - "url": self.url + "method": self.nd_module.method, + "response": self.nd_module.response, + "status": self.nd_module.status, + "url": self.nd_module.url }) self.nd_logs.append(log_entry) @@ -103,42 +126,6 @@ def manage_state(self) -> None: """ Manage state according to desired configuration. """ - unwanted_keys = unwanted_keys or [] - - # Parse and validate configs - # TODO: move it to init() (top priority) - # TODO: Modify it if NDConfigCollection becomes a Pydantic RootModel (low priority) - try: - parsed_items = [] - for config in self.ansible_config: - try: - # Parse config into model - item = self.model_class.model_validate(config) - parsed_items.append(item) - except ValidationError as e: - self.fail_json( - msg=f"Invalid configuration: {e}", - config=config, - validation_errors=e.errors() - ) - return - - # Create proposed collection - self.proposed = self.nd_config_collection( - model_class=self.model_class, - items=parsed_items - ) - - # Save previous state - self.previous = self.existing.copy() - - except Exception as e: - self.fail_json( - msg=f"Failed to prepare configurations: {e}", - error=str(e) - ) - return - # Execute state operations if self.state in ["merged", "replaced", "overridden"]: self._manage_create_update_state() @@ -159,18 +146,10 @@ def _manage_create_update_state(self) -> None: Handle merged/replaced/overridden states. """ for proposed_item in self.proposed: + # Extract identifier + identifier = proposed_item.get_identifier_value() + existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: - # Extract identifier - # TODO: Remove self.current_identifier, get it directly into the action functions - identifier = proposed_item.get_identifier_value() - - existing_item = self.existing.get(identifier) - self.existing_config = ( - existing_item.model_dump(by_alias=True, exclude_none=True) - if existing_item - else {} - ) - # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) @@ -178,51 +157,44 @@ def _manage_create_update_state(self) -> None: if diff_status == "no_diff": self.format_log( identifier=identifier, - status="no_change", - after_data=self.existing_config + operation_status="no_change", + before=existing_config, + after=existing_config, ) continue # Prepare final config based on state - if self.state == "merged" and existing_item: + if self.state == "merged": # Merge with existing merged_item = self.existing.merge(proposed_item) final_item = merged_item else: # Replace or create - if existing_item: + if diff_status == "changed": self.existing.replace(proposed_item) else: self.existing.add(proposed_item) final_item = proposed_item - - # Convert to API payload - self.proposed_config = final_item.to_payload() - + # Execute API operation if diff_status == "changed": - response = self.model_orchestrator.update(final_item) + if not self.module.check_mode: + response = self.model_orchestrator.update(final_item) + self.sent.add(final_item) operation_status = "updated" - else: - response = self.model_orchestrator.create(final_item) + elif diff_status == "new": + if not self.module.check_mode: + response = self.model_orchestrator.create(final_item) + self.sent.add(final_item) operation_status = "created" - # Track sent payload - if not self.module.check_mode: - self.sent.add(final_item) - sent_payload = final_item - else: - sent_payload = None - # Log operation self.format_log( identifier=identifier, - status=operation_status, - after_data=( - response if not self.module.check_mode - else final_item.model_dump(by_alias=True, exclude_none=True) - ), - sent_payload_data=sent_payload + operation_status=operation_status, + before=existing_config, + after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), + payload=final_item.to_payload(), ) except Exception as e: @@ -230,11 +202,12 @@ def _manage_create_update_state(self) -> None: self.format_log( identifier=identifier, - status="no_change", - after_data=self.existing_config + operation_status="no_change", + before=existing_config, + after=existing_config, ) - if not self.params.get("ignore_errors", False): + if not self.module.params.get("ignore_errors", False): self.fail_json( msg=error_msg, identifier=str(identifier), @@ -243,30 +216,21 @@ def _manage_create_update_state(self) -> None: return # TODO: Refactor with orchestrator (Top priority) - def _manage_override_deletions(self, override_exceptions: List) -> None: + def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ diff_identifiers = self.previous.get_diff_identifiers(self.proposed) for identifier in diff_identifiers: - if identifier in override_exceptions: - continue - try: - self.current_identifier = identifier - existing_item = self.existing.get(identifier) if not existing_item: continue - - self.existing_config = existing_item.model_dump( - by_alias=True, - exclude_none=True - ) - + # Execute delete - self._delete() + if not self.module.check_mode: + response = self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) @@ -274,8 +238,10 @@ def _manage_override_deletions(self, override_exceptions: List) -> None: # Log deletion self.format_log( identifier=identifier, - status="deleted", - after_data={} + operation_status="deleted", + before=existing_item.to_config(), + after={}, + ) except Exception as e: @@ -295,25 +261,21 @@ def _manage_delete_state(self) -> None: for proposed_item in self.proposed: try: identifier = proposed_item.get_identifier_value() - self.current_identifier = identifier existing_item = self.existing.get(identifier) if not existing_item: # Already deleted or doesn't exist self.format_log( identifier=identifier, - status="no_change", - after_data={} + operation_status="no_change", + before={}, + after={}, ) continue - self.existing_config = existing_item.model_dump( - by_alias=True, - exclude_none=True - ) - # Execute delete - self._delete() + if not self.module.check_mode: + response = self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) @@ -321,8 +283,9 @@ def _manage_delete_state(self) -> None: # Log deletion self.format_log( identifier=identifier, - status="deleted", - after_data={} + operation_status="deleted", + before=existing_item.to_config(), + after={}, ) except Exception as e: @@ -341,35 +304,35 @@ def _manage_delete_state(self) -> None: # TODO: return a defined ordered list of config (for integration test) def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" - output_level = self.params.get("output_level", "normal") - state = self.params.get("state") + output_level = self.module.params.get("output_level", "normal") + state = self.module.params.get("state") # Add previous state for certain states and output levels if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if output_level in ("debug", "info"): - self.result["previous"] = self.previous.to_list() + self.result["previous"] = self.previous.to_ansible_config() # Check if there were changes - if not self.has_modified and self.previous.get_diff_collection(self.existing): + if self.previous.get_diff_collection(self.existing): self.result["changed"] = True # Add stdout if present - if self.stdout: - self.result["stdout"] = self.stdout + if self.nd_module.stdout: + self.result["stdout"] = self.nd_module.stdout # Add debug information if output_level == "debug": self.result["nd_logs"] = self.nd_logs - if self.url is not None: - self.result["httpapi_logs"] = self.httpapi_logs + if self.nd_module.url is not None: + self.result["httpapi_logs"] = self.nd_module.httpapi_logs if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: self.result["sent"] = self.sent.to_payload_list() - self.result["proposed"] = self.proposed.to_list() + self.result["proposed"] = self.proposed.to_ansible_config() # Always include current state - self.result["current"] = self.existing.to_list() + self.result["current"] = self.existing.to_ansible_config() # Module Exit Methods diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index db72b740..924ea4b0 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -11,8 +11,8 @@ from ..models.base import NDBaseModel from ..nd import NDModule from ..api_endpoints.base import NDBaseSmartEndpoint -from typing import Dict, List, Any, Union, ClassVar, Type -from pydantic import BaseModel +from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from pydantic import BaseModel, ConfigDict ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] @@ -21,6 +21,13 @@ # TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): + model_config = ConfigDict( + use_enum_values=True, + validate_assignment=True, + populate_by_name=True, + arbitrary_types_allowed=True, + ) + model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required @@ -32,40 +39,31 @@ class NDBaseOrchestrator(BaseModel): # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) - module: NDModule + sender: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.create_endpoint() - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e def update(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.update_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Update failed for {model_instance.get_identifier_value()}: {e}") from e def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: - if self.module.check_mode: - return model_instance.to_payload() - try: api_endpoint = self.delete_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Delete failed for {model_instance.get_identifier_value()}: {e}") from e @@ -73,14 +71,14 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: api_endpoint = self.query_one_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) - return self.module.request(path=api_endpoint.path, method=api_endpoint.verb) + return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb) except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e # TODO: Revisit the straegy around the query_all (see local_user's case) - def query_all(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: + def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: - result = self.module.query_obj(self.query_all_endpoint.path) + result = self.sender.query_obj(self.query_all_endpoint.path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index ef2aa36a..46a4ea07 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -38,7 +38,7 @@ def query_all(self): Custom query_all action to extract 'localusers' from response. """ try: - result = self.module.query_obj(self.query_all_endpoint.base_path) + result = self.sender.query_obj(self.query_all_endpoint.base_path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 67fb3e80..b6acee72 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -177,9 +177,9 @@ from ansible.module_utils.basic import AnsibleModule # TODO: To be replaced with: # from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec -# from ansible_collections.cisco.nd.plugins.module_utils.nd_network_resource_module import NDNetworkResourceModule +# from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine # from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -# from ansible_collections.cisco.nd.plugins.module_utils.constants import USER_ROLES_MAPPING +# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator from ..module_utils.nd import nd_argument_spec from ..module_utils.nd_state_machine import NDStateMachine from ..module_utils.models.local_user import LocalUserModel diff --git a/requirements.txt b/requirements.txt index 514632d1..98907e9a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ requests_toolbelt jsonpath-ng -lxml \ No newline at end of file +lxml +pydantic==2.12.5 \ No newline at end of file diff --git a/tests/integration/network-integration.requirements.txt b/tests/integration/network-integration.requirements.txt index 514632d1..98907e9a 100644 --- a/tests/integration/network-integration.requirements.txt +++ b/tests/integration/network-integration.requirements.txt @@ -1,3 +1,4 @@ requests_toolbelt jsonpath-ng -lxml \ No newline at end of file +lxml +pydantic==2.12.5 \ No newline at end of file From 8be0d524281f2b476adcb0e22518339482db859b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 11:46:31 -0500 Subject: [PATCH 091/131] [ignore] Add proper path dependencies and Ran black formatting. --- plugins/module_utils/api_endpoints/base.py | 5 +- plugins/module_utils/api_endpoints/enums.py | 2 +- .../module_utils/api_endpoints/local_user.py | 13 +- plugins/module_utils/api_endpoints/mixins.py | 2 +- plugins/module_utils/constants.py | 7 +- plugins/module_utils/models/base.py | 60 +++--- plugins/module_utils/models/local_user.py | 75 +++----- plugins/module_utils/models/nested.py | 2 +- plugins/module_utils/nd_config_collection.py | 94 +++++----- plugins/module_utils/nd_state_machine.py | 171 +++++++----------- plugins/module_utils/orchestrators/base.py | 9 +- .../module_utils/orchestrators/local_user.py | 12 +- plugins/module_utils/utils.py | 16 +- plugins/modules/nd_api_key.py | 1 - plugins/modules/nd_local_user.py | 25 +-- 15 files changed, 204 insertions(+), 290 deletions(-) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 832476ed..954c1f6a 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -12,13 +12,12 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict from typing import Final, Union, Tuple, Any -from ..types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey # TODO: Rename it to APIEndpoint # NOTE: This is a very minimalist endpoint package -> needs to be enhanced class NDBaseSmartEndpoint(BaseModel, ABC): - # TODO: maybe to be modified in the future model_config = ConfigDict(validate_assignment=True) @@ -29,7 +28,7 @@ class NDBaseSmartEndpoint(BaseModel, ABC): @abstractmethod def path(self) -> str: pass - + @property @abstractmethod def verb(self) -> str: diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py index afb4dd5c..ced62ba7 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/api_endpoints/enums.py @@ -43,4 +43,4 @@ class BooleanStringEnum(str, Enum): """ TRUE = "true" - FALSE = "false" \ No newline at end of file + FALSE = "false" diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index cae1326b..72639495 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -16,11 +16,12 @@ __metaclass__ = type # pylint: disable=invalid-name from typing import Literal, Union, Tuple, Any, Final -from .mixins import LoginIdMixin -from .enums import VerbEnum -from .base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field -from ..types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey + class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ @@ -105,7 +106,7 @@ class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): """ class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( - default="EpApiV1InfraAaaLocalUsersPost", + default="EpApiV1InfraAaaLocalUsersPost", description="Class name for backward compatibility", frozen=True, ) @@ -136,7 +137,7 @@ class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): """ class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( - default="EpApiV1InfraAaaLocalUsersPut", + default="EpApiV1InfraAaaLocalUsersPut", description="Class name for backward compatibility", frozen=True, ) diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py index 8ff3218f..9516c9ce 100644 --- a/plugins/module_utils/api_endpoints/mixins.py +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -22,4 +22,4 @@ class LoginIdMixin(BaseModel): """Mixin for endpoints that require login_id parameter.""" - login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") \ No newline at end of file + login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 784a7f51..afa0a2b0 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -13,17 +13,18 @@ from types import MappingProxyType from copy import deepcopy -class NDConstantMapping(Dict): +class NDConstantMapping(Dict): def __init__(self, data: Dict): self.new_dict = deepcopy(data) - for k,v in data.items(): + for k, v in data.items(): self.new_dict[v] = k self.new_dict = MappingProxyType(self.new_dict) - + def get_dict(self): return self.new_dict + OBJECT_TYPES = { "tenant": "OST_TENANT", "vrf": "OST_VRF", diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 7b569a58..94fb9cc5 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -19,13 +19,14 @@ class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. - + Supports three identifier strategies: - single: One unique required field (e.g., ["login_id"]) - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) - singleton: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) """ + # TODO: revisit initial Model Configurations (low priority) model_config = ConfigDict( str_strip_whitespace=True, @@ -33,14 +34,14 @@ class NDBaseModel(BaseModel, ABC): validate_assignment=True, populate_by_name=True, arbitrary_types_allowed=True, - extra='allow', # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs + extra="allow", # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs ) # TODO: Revisit identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = None # TODO: Revisit no identifiers strategy naming (`singleton` -> `unique`, `unnamed`) (low priority) identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" - + # Optional: fields to exclude from diffs (e.g., passwords) exclude_from_diff: ClassVar[List] = [] # TODO: To be removed in the future (see local_user model) @@ -52,7 +53,7 @@ def __init_subclass__(cls, **kwargs): Enforce configuration for identifiers definition. """ super().__init_subclass__(**kwargs) - + # Skip enforcement for nested models if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return @@ -73,7 +74,7 @@ def to_payload(self, **kwargs) -> Dict[str, Any]: Convert model to API payload format. """ return self.model_dump(by_alias=True, exclude_none=True, **kwargs) - + def to_config(self, **kwargs) -> Dict[str, Any]: """ Convert model to Ansible config format. @@ -83,11 +84,11 @@ def to_config(self, **kwargs) -> Dict[str, Any]: @classmethod def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: return cls.model_validate(response, by_alias=True, **kwargs) - + @classmethod def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: return cls.model_validate(ansible_config, by_name=True, **kwargs) - + # TODO: Revisit this function when revisiting identifier strategy (low priority) def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ @@ -98,74 +99,61 @@ def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: """ if not self.identifiers and self.identifier_strategy != "singleton": raise ValueError(f"{self.__class__.__name__} must have identifiers defined with its current identifier strategy: `{self.identifier_strategy}`") - + if self.identifier_strategy == "single": value = getattr(self, self.identifiers[0], None) if value is None: - raise ValueError( - f"Single identifier field '{self.identifiers[0]}' is None" - ) + raise ValueError(f"Single identifier field '{self.identifiers[0]}' is None") return value - + elif self.identifier_strategy == "composite": values = [] missing = [] - + for field in self.identifiers: value = getattr(self, field, None) if value is None: missing.append(field) values.append(value) - + # NOTE: might be redefined with Pydantic (low priority) if missing: - raise ValueError( - f"Composite identifier fields {missing} are None. " - f"All required: {self.identifiers}" - ) - + raise ValueError(f"Composite identifier fields {missing} are None. " f"All required: {self.identifiers}") + return tuple(values) - + elif self.identifier_strategy == "hierarchical": for field in self.identifiers: value = getattr(self, field, None) if value is not None: return (field, value) - - raise ValueError( - f"No non-None value in hierarchical fields {self.identifiers}" - ) - + + raise ValueError(f"No non-None value in hierarchical fields {self.identifiers}") + # TODO: Revisit condition when there is no identifiers (low priority) elif self.identifier_strategy == "singleton": return None - + else: raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") - def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ - return self.model_dump( - by_alias=True, - exclude_none=True, - exclude=set(self.exclude_from_diff), - **kwargs - ) - + return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) + # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") - + for field, value in other_model: if value is None: continue - + current_value = getattr(self, field) if isinstance(current_value, NDBaseModel) and isinstance(value, NDBaseModel): setattr(self, field, current_value.merge(value)) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 713d6040..e759a6fb 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,26 +8,25 @@ __metaclass__ = type -from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal from typing_extensions import Self - -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel -from .base import NDBaseModel -from .nested import NDNestedModel -from ..constants import NDConstantMapping +from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping # Constant defined here as it is only used in this model -USER_ROLES_MAPPING = NDConstantMapping({ - "fabric_admin": "fabric-admin", - "observer": "observer", - "super_admin": "super-admin", - "support_engineer": "support-engineer", - "approver": "approver", - "designer": "designer", -}).get_dict() +USER_ROLES_MAPPING = NDConstantMapping( + { + "fabric_admin": "fabric-admin", + "observer": "observer", + "super_admin": "super-admin", + "support_engineer": "support-engineer", + "approver": "approver", + "designer": "designer", + } +).get_dict() class LocalUserSecurityDomainModel(NDNestedModel): @@ -41,14 +40,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): @model_serializer() def serialize_model(self) -> Dict: - return { - self.name: { - "roles": [ - USER_ROLES_MAPPING.get(role, role) - for role in (self.roles or []) - ] - } - } + return {self.name: {"roles": [USER_ROLES_MAPPING.get(role, role) for role in (self.roles or [])]}} # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity @@ -60,7 +52,7 @@ class LocalUserModel(NDBaseModel): Identifier: login_id (single field) """ - + # Identifier configuration # TODO: Revisit this identifiers strategy (low priority) identifiers: ClassVar[Optional[List[str]]] = ["login_id"] @@ -69,11 +61,8 @@ class LocalUserModel(NDBaseModel): # Keys management configurations # TODO: Revisit these configurations (low priority) exclude_from_diff: ClassVar[List[str]] = ["user_password"] - unwanted_keys: ClassVar[List]= [ - ["passwordPolicy", "passwordChangeTime"], # Nested path - ["userID"] # Simple key - ] - + unwanted_keys: ClassVar[List] = [["passwordPolicy", "passwordChangeTime"], ["userID"]] # Nested path # Simple key + # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec # TODO: use extra for generating argument_spec (low priority) @@ -96,7 +85,7 @@ def password_policy(self) -> Optional[Dict[str, int]]: """Computed nested structure for API payload.""" if self.reuse_limitation is None and self.time_interval_limitation is None: return None - + policy = {} if self.reuse_limitation is not None: policy["reuseLimitation"] = self.reuse_limitation @@ -108,7 +97,6 @@ def password_policy(self) -> Optional[Dict[str, int]]: def serialize_password(self, value: Optional[SecretStr]) -> Optional[str]: return value.get_secret_value() if value else None - @field_serializer("security_domains") def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) -> Optional[Dict]: # NOTE: exclude `None` values and empty list (-> should we exclude empty list?) @@ -119,9 +107,7 @@ def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) for domain in value: domains_dict.update(domain.to_payload()) - return { - "domains": domains_dict - } + return {"domains": domains_dict} # -- Deserialization (API response / Ansible payload -> Model instance) -- @@ -132,17 +118,17 @@ def deserialize_password_policy(cls, data: Any) -> Any: return data password_policy = data.get("passwordPolicy") - + if password_policy and isinstance(password_policy, dict): if "reuseLimitation" in password_policy: data["reuse_limitation"] = password_policy["reuseLimitation"] if "timeIntervalLimitation" in password_policy: data["time_interval_limitation"] = password_policy["timeIntervalLimitation"] - + # Remove the nested structure from data to avoid conflicts # (since it's a computed field, not a real field) data.pop("passwordPolicy", None) - + return data @field_validator("security_domains", mode="before") @@ -150,24 +136,21 @@ def deserialize_password_policy(cls, data: Any) -> Any: def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: if value is None: return None - + # If already in list format (Ansible module representation), return as-is if isinstance(value, list): return value - + # If in the nested dict format (API representation) if isinstance(value, dict) and "domains" in value: domains_dict = value["domains"] domains_list = [] - + for domain_name, domain_data in domains_dict.items(): - domains_list.append({ - "name": domain_name, - "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])] - }) - + domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])]}) + return domains_list - + return value # -- Extra -- diff --git a/plugins/module_utils/models/nested.py b/plugins/module_utils/models/nested.py index f2560819..0573e5f8 100644 --- a/plugins/module_utils/models/nested.py +++ b/plugins/module_utils/models/nested.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import List, ClassVar -from .base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel class NDNestedModel(NDBaseModel): diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 364519b8..1aa0e2ec 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -10,27 +10,26 @@ from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable from copy import deepcopy +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# TODO: To be replaced with: from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from .models.base import NDBaseModel -from .utils import issubset -from .types import IdentifierKey # Type aliases -ModelType = TypeVar('ModelType', bound=NDBaseModel) +ModelType = TypeVar("ModelType", bound=NDBaseModel) class NDConfigCollection(Generic[ModelType]): """ Nexus Dashboard configuration collection for NDBaseModel instances. """ - + def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = None): """ Initialize collection. """ self._model_class: ModelType = model_class - + # Dual storage self._items: List[ModelType] = [] self._index: Dict[IdentifierKey, int] = {} @@ -38,7 +37,7 @@ def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = No if items: for item in items: self.add(item) - + # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ @@ -48,7 +47,7 @@ def _extract_key(self, item: ModelType) -> IdentifierKey: return item.get_identifier_value() except Exception as e: raise ValueError(f"Failed to extract identifier: {e}") from e - + # TODO: optimize it -> only needed for delete method (low priority) def _rebuild_index(self) -> None: """Rebuild index from scratch (O(n) operation).""" @@ -56,55 +55,47 @@ def _rebuild_index(self) -> None: for index, item in enumerate(self._items): key = self._extract_key(item) self._index[key] = index - + # Core Operations - + def add(self, item: ModelType) -> IdentifierKey: """ Add item to collection (O(1) operation). """ if not isinstance(item, self._model_class): - raise TypeError( - f"Item must be instance of {self._model_class.__name__}, " - f"got {type(item).__name__}" - ) - + raise TypeError(f"Item must be instance of {self._model_class.__name__}, " f"got {type(item).__name__}") + key = self._extract_key(item) - + if key in self._index: - raise ValueError( - f"Item with identifier {key} already exists. Use replace() to update" - ) - + raise ValueError(f"Item with identifier {key} already exists. Use replace() to update") + position = len(self._items) self._items.append(item) self._index[key] = position - + return key - + def get(self, key: IdentifierKey) -> Optional[ModelType]: """ Get item by identifier key (O(1) operation). """ index = self._index.get(key) return self._items[index] if index is not None else None - + def replace(self, item: ModelType) -> bool: """ Replace existing item with same identifier (O(1) operation). """ if not isinstance(item, self._model_class): - raise TypeError( - f"Item must be instance of {self._model_class.__name__}, " - f"got {type(item).__name__}" - ) - + raise TypeError(f"Item must be instance of {self._model_class.__name__}, " f"got {type(item).__name__}") + key = self._extract_key(item) index = self._index.get(key) - + if index is None: return False - + self._items[index] = item return True @@ -114,7 +105,7 @@ def merge(self, item: ModelType) -> ModelType: """ key = self._extract_key(item) existing = self.get(key) - + if existing is None: self.add(item) return item @@ -128,17 +119,17 @@ def delete(self, key: IdentifierKey) -> bool: Delete item by identifier (O(n) operation due to index rebuild) """ index = self._index.get(key) - + if index is None: return False - + del self._items[index] self._rebuild_index() - + return True - + # Diff Operations - + # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "changed"]: """ @@ -148,9 +139,9 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha key = self._extract_key(new_item) except ValueError: return "new" - + existing = self.get(key) - + if existing is None: return "new" @@ -158,16 +149,16 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() is_subset = issubset(new_data, existing_data) - + return "no_diff" if is_subset else "changed" - + def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: """ Check if two collections differ. """ if not isinstance(other, NDConfigCollection): raise TypeError("Argument must be NDConfigCollection") - + if len(self) != len(other): return True @@ -178,9 +169,9 @@ def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: for key in self.keys(): if other.get(key) is None: return True - + return False - + def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[IdentifierKey]: """ Get identifiers in self but not in other. @@ -190,11 +181,11 @@ def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[I return list(current_keys - other_keys) # Collection Operations - + def __len__(self) -> int: """Return number of items.""" return len(self._items) - + def __iter__(self): """Iterate over items.""" return iter(self._items) @@ -205,10 +196,7 @@ def keys(self) -> List[IdentifierKey]: def copy(self) -> "NDConfigCollection[ModelType]": """Create deep copy of collection.""" - return NDConfigCollection( - model_class=self._model_class, - items=deepcopy(self._items) - ) + return NDConfigCollection(model_class=self._model_class, items=deepcopy(self._items)) # Collection Serialization @@ -217,13 +205,13 @@ def to_ansible_config(self, **kwargs) -> List[Dict]: Export as an Ansible config. """ return [item.to_config(**kwargs) for item in self._items] - + def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: """ Export as list of API payloads. """ return [item.to_payload(**kwargs) for item in self._items] - + @classmethod def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ @@ -231,7 +219,7 @@ def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **k """ items = [model_class.from_config(item_data, **kwargs) for item_data in data] return cls(model_class=model_class, items=items) - + @classmethod def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": """ diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 5b1f770c..be5849d4 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -12,31 +12,25 @@ from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError from ansible.module_utils.basic import AnsibleModule - -# TODO: To be replaced with: -# from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -# from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection -# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator -# from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -from .nd import NDModule -from .nd_config_collection import NDConfigCollection -from .orchestrators.base import NDBaseOrchestrator -from .types import IdentifierKey -from .constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED +# TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) +# TODO: class NDStateMachine(NDModule): """ Generic Network Resource Module for Nexus Dashboard. """ - + def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchestrator]): """ Initialize the Network Resource Module. """ - # TODO: Revisit Module initialization and configuration - # nd_module = NDModule() + # TODO: Revisit Module initialization and configuration with rest_send self.module = module self.nd_module = NDModule(module) @@ -51,18 +45,13 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.state = self.module.params["state"] self.ansible_config = self.module.params.get("config", []) - # Initialize collections - # TODO: Revisit collections initialization especially `init_all_data` (medium priority) # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) self.nd_config_collection = NDConfigCollection[self.model_class] try: init_all_data = self.model_orchestrator.query_all() - self.existing = self.nd_config_collection.from_api_response( - response_data=init_all_data, - model_class=self.model_class - ) + self.existing = self.nd_config_collection.from_api_response(response_data=init_all_data, model_class=self.model_class) # Save previous state self.previous = self.existing.copy() self.proposed = self.nd_config_collection(model_class=self.model_class) @@ -74,30 +63,23 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest item = self.model_class.from_config(config) self.proposed.add(item) except ValidationError as e: - self.fail_json( - msg=f"Invalid configuration: {e}", - config=config, - validation_errors=e.errors() - ) + self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) return except Exception as e: - self.fail_json( - msg=f"Initialization failed: {str(e)}", - error=str(e) - ) + self.fail_json(msg=f"Initialization failed: {str(e)}", error=str(e)) # Logging # NOTE: format log placeholder # TODO: use a proper logger (low priority) def format_log( - self, - identifier: IdentifierKey, - operation_status: Literal["no_change", "created", "updated", "deleted"], - before: Optional[Dict[str, Any]] = None, - after: Optional[Dict[str, Any]] = None, - payload: Optional[Dict[str, Any]] = None, - ) -> None: + self, + identifier: IdentifierKey, + operation_status: Literal["no_change", "created", "updated", "deleted"], + before: Optional[Dict[str, Any]] = None, + after: Optional[Dict[str, Any]] = None, + payload: Optional[Dict[str, Any]] = None, + ) -> None: """ Create and append a log entry. """ @@ -108,18 +90,15 @@ def format_log( "after": after, "payload": payload, } - + # Add HTTP details if not in check mode if not self.module.check_mode and self.nd_module.url is not None: - log_entry.update({ - "method": self.nd_module.method, - "response": self.nd_module.response, - "status": self.nd_module.status, - "url": self.nd_module.url - }) - + log_entry.update( + {"method": self.nd_module.method, "response": self.nd_module.response, "status": self.nd_module.status, "url": self.nd_module.url} + ) + self.nd_logs.append(log_entry) - + # State Management (core function) # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) def manage_state(self) -> None: @@ -129,17 +108,17 @@ def manage_state(self) -> None: # Execute state operations if self.state in ["merged", "replaced", "overridden"]: self._manage_create_update_state() - + if self.state == "overridden": self._manage_override_deletions() - + elif self.state == "deleted": self._manage_delete_state() - + # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) + # TODO: boil down an Exception instead of using `fail_json` method else: self.fail_json(msg=f"Invalid state: {self.state}") - def _manage_create_update_state(self) -> None: """ @@ -152,7 +131,7 @@ def _manage_create_update_state(self) -> None: try: # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) - + # No changes needed if diff_status == "no_diff": self.format_log( @@ -162,7 +141,7 @@ def _manage_create_update_state(self) -> None: after=existing_config, ) continue - + # Prepare final config based on state if self.state == "merged": # Merge with existing @@ -187,7 +166,7 @@ def _manage_create_update_state(self) -> None: response = self.model_orchestrator.create(final_item) self.sent.add(final_item) operation_status = "created" - + # Log operation self.format_log( identifier=identifier, @@ -196,32 +175,27 @@ def _manage_create_update_state(self) -> None: after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), payload=final_item.to_payload(), ) - + except Exception as e: error_msg = f"Failed to process {identifier}: {e}" - + self.format_log( identifier=identifier, operation_status="no_change", before=existing_config, after=existing_config, ) - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - - # TODO: Refactor with orchestrator (Top priority) + def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ diff_identifiers = self.previous.get_diff_identifiers(self.proposed) - + for identifier in diff_identifiers: try: existing_item = self.existing.get(identifier) @@ -231,37 +205,31 @@ def _manage_override_deletions(self) -> None: # Execute delete if not self.module.check_mode: response = self.model_orchestrator.delete(existing_item) - + # Remove from collection self.existing.delete(identifier) - + # Log deletion self.format_log( identifier=identifier, operation_status="deleted", before=existing_item.to_config(), after={}, - ) - + except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - - # TODO: Refactor with orchestrator (Top priority) + def _manage_delete_state(self) -> None: """Handle deleted state.""" for proposed_item in self.proposed: try: identifier = proposed_item.get_identifier_value() - + existing_item = self.existing.get(identifier) if not existing_item: # Already deleted or doesn't exist @@ -272,14 +240,14 @@ def _manage_delete_state(self) -> None: after={}, ) continue - + # Execute delete if not self.module.check_mode: response = self.model_orchestrator.delete(existing_item) - + # Remove from collection self.existing.delete(identifier) - + # Log deletion self.format_log( identifier=identifier, @@ -287,18 +255,14 @@ def _manage_delete_state(self) -> None: before=existing_item.to_config(), after={}, ) - + except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - + if not self.module.params.get("ignore_errors", False): - self.fail_json( - msg=error_msg, - identifier=str(identifier), - error=str(e) - ) + self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - + # Output Formatting # TODO: move to separate Class (results) -> align it with rest_send PR # TODO: return a defined ordered list of config (for integration test) @@ -306,36 +270,36 @@ def add_logs_and_outputs(self) -> None: """Add logs and outputs to module result based on output_level.""" output_level = self.module.params.get("output_level", "normal") state = self.module.params.get("state") - + # Add previous state for certain states and output levels if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if output_level in ("debug", "info"): self.result["previous"] = self.previous.to_ansible_config() - + # Check if there were changes if self.previous.get_diff_collection(self.existing): self.result["changed"] = True - + # Add stdout if present if self.nd_module.stdout: self.result["stdout"] = self.nd_module.stdout - + # Add debug information if output_level == "debug": self.result["nd_logs"] = self.nd_logs - + if self.nd_module.url is not None: self.result["httpapi_logs"] = self.nd_module.httpapi_logs - + if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: self.result["sent"] = self.sent.to_payload_list() self.result["proposed"] = self.proposed.to_ansible_config() - + # Always include current state self.result["current"] = self.existing.to_ansible_config() - + # Module Exit Methods - + def fail_json(self, msg: str, **kwargs) -> None: """ Exit module with failure. @@ -343,26 +307,23 @@ def fail_json(self, msg: str, **kwargs) -> None: self.add_logs_and_outputs() self.result.update(**kwargs) self.module.fail_json(msg=msg, **self.result) - + def exit_json(self, **kwargs) -> None: """ Exit module successfully. """ self.add_logs_and_outputs() - + # Add diff if module supports it if self.module._diff and self.result.get("changed") is True: try: # Use diff-safe dicts (excludes sensitive fields) before = [item.to_diff_dict() for item in self.previous] after = [item.to_diff_dict() for item in self.existing] - - self.result["diff"] = dict( - before=before, - after=after - ) + + self.result["diff"] = dict(before=before, after=after) except Exception: pass # Don't fail on diff generation - + self.result.update(**kwargs) self.module.exit_json(**self.result) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 924ea4b0..f9a63fa1 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,11 +8,11 @@ __metaclass__ = type -from ..models.base import NDBaseModel -from ..nd import NDModule -from ..api_endpoints.base import NDBaseSmartEndpoint -from typing import Dict, List, Any, Union, ClassVar, Type, Optional from pydantic import BaseModel, ConfigDict +from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] @@ -20,7 +20,6 @@ # TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): - model_config = ConfigDict( use_enum_values=True, validate_assignment=True, diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 46a4ea07..04f7707f 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -8,12 +8,12 @@ __metaclass__ = type -from .base import NDBaseOrchestrator -from ..models.base import NDBaseModel -from ..models.local_user import LocalUserModel from typing import Dict, List, Any, Union, Type -from ..api_endpoints.base import NDBaseSmartEndpoint -from ..api_endpoints.local_user import ( +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, EpApiV1InfraAaaLocalUsersDelete, @@ -23,8 +23,8 @@ ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] -class LocalUserOrchestrator(NDBaseOrchestrator): +class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index a7c1d3dc..0bf7cfc8 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -37,22 +37,22 @@ def issubset(subset: Any, superset: Any) -> bool: """Check if subset is contained in superset.""" if type(subset) is not type(superset): return False - + if not isinstance(subset, dict): if isinstance(subset, list): return all(item in superset for item in subset) return subset == superset - + for key, value in subset.items(): if value is None: continue - + if key not in superset: return False - + if not issubset(value, superset[key]): return False - + return True @@ -60,12 +60,12 @@ def issubset(subset: Any, superset: Any) -> bool: def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) - + for key in unwanted_keys: if isinstance(key, str): if key in data: del data[key] - + elif isinstance(key, list) and len(key) > 0: try: parent = data @@ -79,5 +79,5 @@ def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) del parent[key[-1]] except (KeyError, TypeError, IndexError): pass - + return data diff --git a/plugins/modules/nd_api_key.py b/plugins/modules/nd_api_key.py index c00428a9..1a3e4823 100644 --- a/plugins/modules/nd_api_key.py +++ b/plugins/modules/nd_api_key.py @@ -146,7 +146,6 @@ def main(): nd.existing = nd.previous = nd.query_objs(path, key="apiKeys") if state == "present": - if len(api_key_name) > 32 or len(api_key_name) < 1: nd.fail_json("A length of 1 to 32 characters is allowed.") elif re.search(r"[^a-zA-Z0-9_.-]", api_key_name): diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index b6acee72..a6972c07 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -175,15 +175,10 @@ """ from ansible.module_utils.basic import AnsibleModule -# TODO: To be replaced with: -# from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec -# from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine -# from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -# from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator -from ..module_utils.nd import nd_argument_spec -from ..module_utils.nd_state_machine import NDStateMachine -from ..module_utils.models.local_user import LocalUserModel -from ..module_utils.orchestrators.local_user import LocalUserOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec +from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine +from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator def main(): @@ -196,17 +191,17 @@ def main(): ) try: - # Create NDNetworkResourceModule with LocalUserModel - nd_module = NDStateMachine( + # Initialize StateMachine + nd_state_machine = NDStateMachine( module=module, model_orchestrator=LocalUserOrchestrator, ) - + # Manage state - nd_module.manage_state() + nd_state_machine.manage_state() + + nd_state_machine.exit_json() - nd_module.exit_json() - except Exception as e: module.fail_json(msg=f"Module execution failed: {str(e)}") From 427947b527314b77fae72fb0609911ed6eb2cb2b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:02:02 -0500 Subject: [PATCH 092/131] [ignore] Clean code for sanity purposes (except Pydantic import checks. --- plugins/module_utils/api_endpoints/base.py | 2 +- plugins/module_utils/api_endpoints/enums.py | 5 +++++ plugins/module_utils/api_endpoints/local_user.py | 4 ++-- plugins/module_utils/api_endpoints/mixins.py | 2 +- plugins/module_utils/models/base.py | 5 +++-- plugins/module_utils/models/local_user.py | 2 -- plugins/module_utils/nd_config_collection.py | 3 +-- plugins/module_utils/nd_state_machine.py | 1 - plugins/module_utils/orchestrators/base.py | 6 ++---- plugins/module_utils/orchestrators/local_user.py | 8 +++----- plugins/module_utils/orchestrators/types.py | 13 +++++++++++++ plugins/module_utils/types.py | 1 - 12 files changed, 31 insertions(+), 21 deletions(-) create mode 100644 plugins/module_utils/orchestrators/types.py diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py index 954c1f6a..8428ffe8 100644 --- a/plugins/module_utils/api_endpoints/base.py +++ b/plugins/module_utils/api_endpoints/base.py @@ -11,7 +11,7 @@ from abc import ABC, abstractmethod from pydantic import BaseModel, ConfigDict -from typing import Final, Union, Tuple, Any +from typing import Final from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/api_endpoints/enums.py index ced62ba7..18a7f5eb 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/api_endpoints/enums.py @@ -7,6 +7,11 @@ """ Enums used in api_endpoints. """ + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + from enum import Enum diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/api_endpoints/local_user.py index 72639495..890b38e7 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/api_endpoints/local_user.py @@ -13,9 +13,9 @@ from __future__ import absolute_import, division, print_function -__metaclass__ = type # pylint: disable=invalid-name +__metaclass__ = type -from typing import Literal, Union, Tuple, Any, Final +from typing import Literal, Final from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py index 9516c9ce..56cdcfc5 100644 --- a/plugins/module_utils/api_endpoints/mixins.py +++ b/plugins/module_utils/api_endpoints/mixins.py @@ -15,7 +15,7 @@ __metaclass__ = type # pylint: disable=invalid-name -from typing import TYPE_CHECKING, Optional +from typing import Optional from pydantic import BaseModel, Field diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 94fb9cc5..8cdcc765 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -8,7 +8,7 @@ __metaclass__ = type -from abc import ABC, abstractmethod +from abc import ABC from pydantic import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional from typing_extensions import Self @@ -144,7 +144,8 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? -> similar to NDCOnfigCollection... -> add argument to make it optional either replace + # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? + # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: if not isinstance(other_model, type(self)): # TODO: Change error message diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index e759a6fb..fe2f2bb5 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -8,9 +8,7 @@ __metaclass__ = type -from types import MappingProxyType from typing import List, Dict, Any, Optional, ClassVar, Literal -from typing_extensions import Self from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 1aa0e2ec..5fd9886d 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -8,13 +8,12 @@ __metaclass__ = type -from typing import TypeVar, Generic, Optional, List, Dict, Any, Union, Tuple, Literal, Callable +from typing import TypeVar, Generic, Optional, List, Dict, Any, Literal from copy import deepcopy from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey - # Type aliases ModelType = TypeVar("ModelType", bound=NDBaseModel) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index be5849d4..923f0b69 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,7 +8,6 @@ __metaclass__ = type -from copy import deepcopy from typing import Optional, List, Dict, Any, Literal, Type from pydantic import ValidationError from ansible.module_utils.basic import AnsibleModule diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index f9a63fa1..4df0797d 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -9,13 +9,11 @@ __metaclass__ = type from pydantic import BaseModel, ConfigDict -from typing import Dict, List, Any, Union, ClassVar, Type, Optional +from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint - - -ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType # TODO: Revisit naming them "Orchestrator" diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 04f7707f..d30b29f8 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -8,11 +8,12 @@ __metaclass__ = type -from typing import Dict, List, Any, Union, Type +from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( EpApiV1InfraAaaLocalUsersPost, EpApiV1InfraAaaLocalUsersPut, @@ -21,9 +22,6 @@ ) -ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] - - class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel @@ -33,7 +31,7 @@ class LocalUserOrchestrator(NDBaseOrchestrator): query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet - def query_all(self): + def query_all(self) -> ResponseType: """ Custom query_all action to extract 'localusers' from response. """ diff --git a/plugins/module_utils/orchestrators/types.py b/plugins/module_utils/orchestrators/types.py new file mode 100644 index 00000000..b721c65b --- /dev/null +++ b/plugins/module_utils/orchestrators/types.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Any, Union, List, Dict + +ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] diff --git a/plugins/module_utils/types.py b/plugins/module_utils/types.py index 124aedd5..3111a095 100644 --- a/plugins/module_utils/types.py +++ b/plugins/module_utils/types.py @@ -10,5 +10,4 @@ from typing import Any, Union, Tuple - IdentifierKey = Union[str, int, Tuple[Any, ...]] From 99458390b9ac15b0c3c88846ad4633ddf1873d99 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:18:35 -0500 Subject: [PATCH 093/131] [ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues. --- plugins/module_utils/api_endpoints/base.py | 180 ------------------ plugins/module_utils/api_endpoints/mixins.py | 25 --- plugins/module_utils/endpoints/base.py | 7 + .../{api_endpoints => endpoints}/enums.py | 2 +- plugins/module_utils/endpoints/mixins.py | 3 +- .../v1/infra_aaa_local_users.py} | 32 ++-- plugins/module_utils/nd_state_machine.py | 1 + plugins/module_utils/orchestrators/base.py | 4 +- .../module_utils/orchestrators/local_user.py | 24 +-- 9 files changed, 41 insertions(+), 237 deletions(-) delete mode 100644 plugins/module_utils/api_endpoints/base.py delete mode 100644 plugins/module_utils/api_endpoints/mixins.py rename plugins/module_utils/{api_endpoints => endpoints}/enums.py (97%) rename plugins/module_utils/{api_endpoints/local_user.py => endpoints/v1/infra_aaa_local_users.py} (74%) diff --git a/plugins/module_utils/api_endpoints/base.py b/plugins/module_utils/api_endpoints/base.py deleted file mode 100644 index 8428ffe8..00000000 --- a/plugins/module_utils/api_endpoints/base.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@allenrobel) -# Copyright: (c) 2026, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -from abc import ABC, abstractmethod -from pydantic import BaseModel, ConfigDict -from typing import Final -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import IdentifierKey - - -# TODO: Rename it to APIEndpoint -# NOTE: This is a very minimalist endpoint package -> needs to be enhanced -class NDBaseSmartEndpoint(BaseModel, ABC): - # TODO: maybe to be modified in the future - model_config = ConfigDict(validate_assignment=True) - - # TODO: to remove - base_path: str - - @property - @abstractmethod - def path(self) -> str: - pass - - @property - @abstractmethod - def verb(self) -> str: - pass - - # TODO: Maybe to be modifed to be more Pydantic (low priority) - # TODO: Maybe change function's name (low priority) - # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration - @abstractmethod - def set_identifiers(self, identifier: IdentifierKey = None): - pass - - -class NDBasePath: - """ - # Summary - - Centralized API Base Paths - - ## Description - - Provides centralized base path definitions for all ND API endpoints. - This allows API path changes to be managed in a single location. - - ## Usage - - ```python - # Get a complete base path - path = BasePath.control_fabrics("MyFabric", "config-deploy") - # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/MyFabric/config-deploy - - # Build custom paths - path = BasePath.v1("custom", "endpoint") - # Returns: /appcenter/cisco/ndfc/api/v1/custom/endpoint - ``` - - ## Design Notes - - - All base paths are defined as class constants for easy modification - - Helper methods compose paths from base constants - - Use these methods in Pydantic endpoint models to ensure consistency - - If NDFC changes base API paths, only this class needs updating - """ - - # Root API paths - NDFC_API: Final = "/appcenter/cisco/ndfc/api" - ND_INFRA_API: Final = "/api/v1/infra" - ONEMANAGE: Final = "/onemanage" - LOGIN: Final = "/login" - - @classmethod - def api(cls, *segments: str) -> str: - """ - # Summary - - Build path from NDFC API root. - - ## Parameters - - - segments: Path segments to append - - ## Returns - - - Complete path string - - ## Example - - ```python - path = BasePath.api("custom", "endpoint") - # Returns: /appcenter/cisco/ndfc/api/custom/endpoint - ``` - """ - if not segments: - return cls.NDFC_API - return f"{cls.NDFC_API}/{'/'.join(segments)}" - - @classmethod - def v1(cls, *segments: str) -> str: - """ - # Summary - - Build v1 API path. - - ## Parameters - - - segments: Path segments to append after v1 - - ## Returns - - - Complete v1 API path - - ## Example - - ```python - path = BasePath.v1("lan-fabric", "rest") - # Returns: /appcenter/cisco/ndfc/api/v1/lan-fabric/rest - ``` - """ - return cls.api("v1", *segments) - - @classmethod - def nd_infra(cls, *segments: str) -> str: - """ - # Summary - - Build ND infra API path. - - ## Parameters - - - segments: Path segments to append after /api/v1/infra - - ## Returns - - - Complete ND infra API path - - ## Example - - ```python - path = BasePath.nd_infra("aaa", "localUsers") - # Returns: /api/v1/infra/aaa/localUsers - ``` - """ - if not segments: - return cls.ND_INFRA_API - return f"{cls.ND_INFRA_API}/{'/'.join(segments)}" - - @classmethod - def nd_infra_aaa(cls, *segments: str) -> str: - """ - # Summary - - Build ND infra AAA API path. - - ## Parameters - - - segments: Path segments to append after aaa (e.g., "localUsers") - - ## Returns - - - Complete ND infra AAA path - - ## Example - - ```python - path = BasePath.nd_infra_aaa("localUsers") - # Returns: /api/v1/infra/aaa/localUsers - ``` - """ - return cls.nd_infra("aaa", *segments) diff --git a/plugins/module_utils/api_endpoints/mixins.py b/plugins/module_utils/api_endpoints/mixins.py deleted file mode 100644 index 56cdcfc5..00000000 --- a/plugins/module_utils/api_endpoints/mixins.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@allenrobel) -# Copyright: (c) 2026, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) -""" -Reusable mixin classes for endpoint models. - -This module provides mixin classes that can be composed to add common -fields to endpoint models without duplication. -""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type # pylint: disable=invalid-name - -from typing import Optional -from pydantic import BaseModel, Field - - -class LoginIdMixin(BaseModel): - """Mixin for endpoints that require login_id parameter.""" - - login_id: Optional[str] = Field(default=None, min_length=1, description="Login ID") diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index 3ccdff1c..bfd59ee1 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -1,4 +1,5 @@ # Copyright: (c) 2026, Allen Robel (@arobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) """ @@ -22,6 +23,7 @@ Field, ) from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import IdentifierKey class NDEndpointBaseModel(BaseModel, ABC): @@ -129,3 +131,8 @@ def verb(self) -> HttpVerbEnum: None """ + + # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration + @abstractmethod + def set_identifiers(self, identifier: IdentifierKey = None): + pass diff --git a/plugins/module_utils/api_endpoints/enums.py b/plugins/module_utils/endpoints/enums.py similarity index 97% rename from plugins/module_utils/api_endpoints/enums.py rename to plugins/module_utils/endpoints/enums.py index 18a7f5eb..802b8fe8 100644 --- a/plugins/module_utils/api_endpoints/enums.py +++ b/plugins/module_utils/endpoints/enums.py @@ -5,7 +5,7 @@ # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) """ -Enums used in api_endpoints. +Enums used in endpoints. """ from __future__ import absolute_import, division, print_function diff --git a/plugins/module_utils/endpoints/mixins.py b/plugins/module_utils/endpoints/mixins.py index 47695611..22d9a2dc 100644 --- a/plugins/module_utils/endpoints/mixins.py +++ b/plugins/module_utils/endpoints/mixins.py @@ -1,4 +1,5 @@ -# Copyright: (c) 2026, Allen Robel (@arobel) +# Copyright: (c) 2026, Allen Robel (@allenrobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) """ diff --git a/plugins/module_utils/api_endpoints/local_user.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py similarity index 74% rename from plugins/module_utils/api_endpoints/local_user.py rename to plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 890b38e7..1e1d7823 100644 --- a/plugins/module_utils/api_endpoints/local_user.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -16,14 +16,14 @@ __metaclass__ = type from typing import Literal, Final -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.mixins import LoginIdMixin -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint, NDBasePath from pydantic import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -class _EpApiV1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): +class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): """ Base class for ND Infra AAA Local Users endpoints. @@ -53,7 +53,7 @@ def set_identifiers(self, identifier: IdentifierKey = None): self.login_id = identifier -class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersGet(_V1InfraAaaLocalUsersBase): """ # Summary @@ -74,8 +74,8 @@ class EpApiV1InfraAaaLocalUsersGet(_EpApiV1InfraAaaLocalUsersBase): - GET """ - class_name: Literal["EpApiV1InfraAaaLocalUsersGet"] = Field( - default="EpApiV1InfraAaaLocalUsersGet", + class_name: Literal["V1InfraAaaLocalUsersGet"] = Field( + default="V1InfraAaaLocalUsersGet", description="Class name for backward compatibility", frozen=True, ) @@ -86,7 +86,7 @@ def verb(self) -> VerbEnum: return VerbEnum.GET -class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersPost(_V1InfraAaaLocalUsersBase): """ # Summary @@ -105,8 +105,8 @@ class EpApiV1InfraAaaLocalUsersPost(_EpApiV1InfraAaaLocalUsersBase): - POST """ - class_name: Literal["EpApiV1InfraAaaLocalUsersPost"] = Field( - default="EpApiV1InfraAaaLocalUsersPost", + class_name: Literal["V1InfraAaaLocalUsersPost"] = Field( + default="V1InfraAaaLocalUsersPost", description="Class name for backward compatibility", frozen=True, ) @@ -117,7 +117,7 @@ def verb(self) -> VerbEnum: return VerbEnum.POST -class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersPut(_V1InfraAaaLocalUsersBase): """ # Summary @@ -136,8 +136,8 @@ class EpApiV1InfraAaaLocalUsersPut(_EpApiV1InfraAaaLocalUsersBase): - PUT """ - class_name: Literal["EpApiV1InfraAaaLocalUsersPut"] = Field( - default="EpApiV1InfraAaaLocalUsersPut", + class_name: Literal["V1InfraAaaLocalUsersPut"] = Field( + default="V1InfraAaaLocalUsersPut", description="Class name for backward compatibility", frozen=True, ) @@ -148,7 +148,7 @@ def verb(self) -> VerbEnum: return VerbEnum.PUT -class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): +class V1InfraAaaLocalUsersDelete(_V1InfraAaaLocalUsersBase): """ # Summary @@ -167,8 +167,8 @@ class EpApiV1InfraAaaLocalUsersDelete(_EpApiV1InfraAaaLocalUsersBase): - DELETE """ - class_name: Literal["EpApiV1InfraAaaLocalUsersDelete"] = Field( - default="EpApiV1InfraAaaLocalUsersDelete", + class_name: Literal["V1InfraAaaLocalUsersDelete"] = Field( + default="V1InfraAaaLocalUsersDelete", description="Class name for backward compatibility", frozen=True, ) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 923f0b69..ae0a67ce 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -125,6 +125,7 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier + response = {} identifier = proposed_item.get_identifier_value() existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 4df0797d..8c84de8e 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,8 +12,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType # TODO: Revisit naming them "Orchestrator" diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index d30b29f8..bea4a486 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,24 +12,24 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.api_endpoints.local_user import ( - EpApiV1InfraAaaLocalUsersPost, - EpApiV1InfraAaaLocalUsersPut, - EpApiV1InfraAaaLocalUsersDelete, - EpApiV1InfraAaaLocalUsersGet, +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( + V1InfraAaaLocalUsersPost, + V1InfraAaaLocalUsersPut, + V1InfraAaaLocalUsersDelete, + V1InfraAaaLocalUsersGet, ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - create_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseSmartEndpoint] = EpApiV1InfraAaaLocalUsersGet + create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete + query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet def query_all(self) -> ResponseType: """ From 2dec365437b6fbfa81c94e5fcbfe3ddec541fdff Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 15:06:04 -0500 Subject: [PATCH 094/131] [ignore] Remove NDModule inheritence from NDStateMachine. Add first iteration of (Mock Pydantic objects/methods) to pass sanity checks for Pydantic importation. --- plugins/module_utils/nd_state_machine.py | 6 +- plugins/module_utils/pydantic_compat.py | 200 +++++++++++++++++++++++ 2 files changed, 203 insertions(+), 3 deletions(-) create mode 100644 plugins/module_utils/pydantic_compat.py diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index ae0a67ce..e68010fb 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -19,8 +19,8 @@ # TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) -# TODO: -class NDStateMachine(NDModule): +# TODO: Remove inheritence from NDModule (Top Priority) +class NDStateMachine: """ Generic Network Resource Module for Nexus Dashboard. """ @@ -31,7 +31,7 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest """ # TODO: Revisit Module initialization and configuration with rest_send self.module = module - self.nd_module = NDModule(module) + self.nd_module = NDModule(self.module) # Operation tracking self.nd_logs: List[Dict[str, Any]] = [] diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py new file mode 100644 index 00000000..f1d90fe3 --- /dev/null +++ b/plugins/module_utils/pydantic_compat.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +# pylint: disable=too-few-public-methods +""" +Pydantic compatibility layer. + +This module provides a single location for Pydantic imports with fallback +implementations when Pydantic is not available. This ensures consistent +behavior across all modules and follows the DRY principle. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +import traceback +from typing import TYPE_CHECKING, Any, Callable, Union + +if TYPE_CHECKING: + # Type checkers always see the real Pydantic types + from pydantic import ( + AfterValidator, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + PydanticExperimentalWarning, + StrictBool, + ValidationError, + field_serializer, + model_serializer, + field_validator, + model_validator, + validator, + ) +else: + # Runtime: try to import, with fallback + try: + from pydantic import ( + AfterValidator, + BaseModel, + BeforeValidator, + ConfigDict, + Field, + PydanticExperimentalWarning, + StrictBool, + ValidationError, + field_serializer, + model_serializer, + field_validator, + model_validator, + validator, + ) + except ImportError: + HAS_PYDANTIC = False # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR: Union[str, None] = traceback.format_exc() # pylint: disable=invalid-name + + # Fallback: Minimal BaseModel replacement + class BaseModel: + """Fallback BaseModel when pydantic is not available.""" + + model_config = {"validate_assignment": False, "use_enum_values": False} + + def __init__(self, **kwargs): + """Accept keyword arguments and set them as attributes.""" + for key, value in kwargs.items(): + setattr(self, key, value) + + def model_dump(self, exclude_none: bool = False, exclude_defaults: bool = False) -> dict: # pylint: disable=unused-argument + """Return a dictionary of field names and values. + + Args: + exclude_none: If True, exclude fields with None values + exclude_defaults: Accepted for API compatibility but not implemented in fallback + """ + result = {} + for key, value in self.__dict__.items(): + if exclude_none and value is None: + continue + result[key] = value + return result + + # Fallback: ConfigDict that does nothing + def ConfigDict(**kwargs) -> dict: # pylint: disable=unused-argument,invalid-name + """Pydantic ConfigDict fallback when pydantic is not available.""" + return kwargs + + # Fallback: Field that does nothing + def Field(**kwargs) -> Any: # pylint: disable=unused-argument,invalid-name + """Pydantic Field fallback when pydantic is not available.""" + if "default_factory" in kwargs: + return kwargs["default_factory"]() + return kwargs.get("default") + + # Fallback: field_serializer decorator that does nothing + def field_serializer(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic field_serializer fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: model_serializer decorator that does nothing + def model_serializer(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic model_serializer fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: field_validator decorator that does nothing + def field_validator(*args, **kwargs) -> Callable[..., Any]: # pylint: disable=unused-argument,invalid-name + """Pydantic field_validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: AfterValidator that returns the function unchanged + def AfterValidator(func): # pylint: disable=invalid-name + """Pydantic AfterValidator fallback when pydantic is not available.""" + return func + + # Fallback: BeforeValidator that returns the function unchanged + def BeforeValidator(func): # pylint: disable=invalid-name + """Pydantic BeforeValidator fallback when pydantic is not available.""" + return func + + # Fallback: PydanticExperimentalWarning + PydanticExperimentalWarning = Warning + + # Fallback: StrictBool + StrictBool = bool + + # Fallback: ValidationError + class ValidationError(Exception): + """ + Pydantic ValidationError fallback when pydantic is not available. + """ + + def __init__(self, message="A custom error occurred."): + self.message = message + super().__init__(self.message) + + def __str__(self): + return f"ValidationError: {self.message}" + + # Fallback: model_validator decorator that does nothing + def model_validator(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic model_validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + # Fallback: validator decorator that does nothing + def validator(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic validator fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + + else: + HAS_PYDANTIC = True # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name + +# Set HAS_PYDANTIC for when TYPE_CHECKING is True +if TYPE_CHECKING: + HAS_PYDANTIC = True # pylint: disable=invalid-name + PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name + +__all__ = [ + "AfterValidator", + "BaseModel", + "BeforeValidator", + "ConfigDict", + "Field", + "HAS_PYDANTIC", + "PYDANTIC_IMPORT_ERROR", + "PydanticExperimentalWarning", + "StrictBool", + "ValidationError", + "field_serializer", + "model_serializer", + "field_validator", + "model_validator", + "validator", +] From 244a5477985143ff4d6b538110f8ef9d7169ccef Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 17:19:47 -0500 Subject: [PATCH 095/131] [ignore] Rename NDBaseSmartEndpoint to NDBaseEndpoint. Fix importation issues. --- plugins/module_utils/endpoints/base.py | 4 +++- .../endpoints/v1/infra_aaa_local_users.py | 4 ++-- plugins/module_utils/orchestrators/base.py | 14 +++++++------- plugins/module_utils/orchestrators/local_user.py | 14 +++++++------- plugins/modules/nd_local_user.py | 5 ++++- 5 files changed, 23 insertions(+), 18 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index bfd59ee1..2d214878 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -23,7 +23,7 @@ Field, ) from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import IdentifierKey +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey class NDEndpointBaseModel(BaseModel, ABC): @@ -132,6 +132,8 @@ def verb(self) -> HttpVerbEnum: None """ + # TODO: Maybe to be modifed to be more Pydantic (low priority) + # TODO: Maybe change function's name (low priority) # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 1e1d7823..0008b188 100644 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -18,12 +18,12 @@ from typing import Literal, Final from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint, NDBasePath +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath from pydantic import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseSmartEndpoint): +class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseEndpoint): """ Base class for ND Infra AAA Local Users endpoints. diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 8c84de8e..b0e34b61 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,8 +12,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType # TODO: Revisit naming them "Orchestrator" @@ -28,11 +28,11 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - create_endpoint: Type[NDBaseSmartEndpoint] - update_endpoint: Type[NDBaseSmartEndpoint] - delete_endpoint: Type[NDBaseSmartEndpoint] - query_one_endpoint: Type[NDBaseSmartEndpoint] - query_all_endpoint: Type[NDBaseSmartEndpoint] + create_endpoint: Type[NDBaseEndpoint] + update_endpoint: Type[NDBaseEndpoint] + delete_endpoint: Type[NDBaseEndpoint] + query_one_endpoint: Type[NDBaseEndpoint] + query_all_endpoint: Type[NDBaseEndpoint] # NOTE: Module Field is always required # TODO: Replace it with future sender (low priority) diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index bea4a486..5e52a00b 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,8 +12,8 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( V1InfraAaaLocalUsersPost, V1InfraAaaLocalUsersPut, @@ -25,11 +25,11 @@ class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet + create_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersDelete + query_one_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet def query_all(self) -> ResponseType: """ diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index a6972c07..6f296065 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -198,8 +198,11 @@ def main(): ) # Manage state + # TODO: return module output class object: + # output = nd_state_machine.manage_state() + # module.exit_json(**output) nd_state_machine.manage_state() - + nd_state_machine.exit_json() except Exception as e: From 903a5e935e49e601e7f3906198de1fbcb41efe7e Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 4 Mar 2026 11:12:27 -0500 Subject: [PATCH 096/131] [ignore] Replace all pydantic imports with pydantic_compat. Fix sanity issues. --- plugins/module_utils/constants.py | 4 ++++ .../endpoints/v1/infra_aaa_local_users.py | 2 +- plugins/module_utils/models/base.py | 9 ++++----- plugins/module_utils/models/local_user.py | 19 ++++++++++++------ plugins/module_utils/nd_state_machine.py | 2 +- plugins/module_utils/orchestrators/base.py | 2 +- plugins/module_utils/pydantic_compat.py | 20 ++++++++++++++++++- plugins/modules/nd_local_user.py | 3 ++- 8 files changed, 45 insertions(+), 16 deletions(-) diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index afa0a2b0..563041a0 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -16,6 +16,7 @@ class NDConstantMapping(Dict): def __init__(self, data: Dict): + self.data = data self.new_dict = deepcopy(data) for k, v in data.items(): self.new_dict[v] = k @@ -24,6 +25,9 @@ def __init__(self, data: Dict): def get_dict(self): return self.new_dict + def get_original_data(self): + return list(self.data.keys()) + OBJECT_TYPES = { "tenant": "OST_TENANT", diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index 0008b188..d1013e24 100644 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -19,7 +19,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath -from pydantic import Field +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import Field from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 8cdcc765..67ce5de0 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,9 +9,8 @@ __metaclass__ = type from abc import ABC -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional -from typing_extensions import Self # TODO: Revisit identifiers strategy (low priority) @@ -82,11 +81,11 @@ def to_config(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=False, exclude_none=True, **kwargs) @classmethod - def from_response(cls, response: Dict[str, Any], **kwargs) -> Self: + def from_response(cls, response: Dict[str, Any], **kwargs) -> "NDBaseModel": return cls.model_validate(response, by_alias=True, **kwargs) @classmethod - def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> Self: + def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> "NDBaseModel": return cls.model_validate(ansible_config, by_name=True, **kwargs) # TODO: Revisit this function when revisiting identifier strategy (low priority) @@ -146,7 +145,7 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: # NOTE: initialize and return a deep copy of the instance? # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace - def merge(self, other_model: "NDBaseModel", **kwargs) -> Self: + def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": if not isinstance(other_model, type(self)): # TODO: Change error message return TypeError("models are not of the same type.") diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index fe2f2bb5..0575c1be 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -9,7 +9,15 @@ __metaclass__ = type from typing import List, Dict, Any, Optional, ClassVar, Literal -from pydantic import Field, SecretStr, model_serializer, field_serializer, field_validator, model_validator, computed_field +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ( + Field, + SecretStr, + model_serializer, + field_serializer, + field_validator, + model_validator, + computed_field, +) from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping @@ -24,7 +32,7 @@ "approver": "approver", "designer": "designer", } -).get_dict() +) class LocalUserSecurityDomainModel(NDNestedModel): @@ -38,7 +46,7 @@ class LocalUserSecurityDomainModel(NDNestedModel): @model_serializer() def serialize_model(self) -> Dict: - return {self.name: {"roles": [USER_ROLES_MAPPING.get(role, role) for role in (self.roles or [])]}} + return {self.name: {"roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in (self.roles or [])]}} # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity @@ -145,7 +153,7 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: domains_list = [] for domain_name, domain_data in domains_dict.items(): - domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get(role, role) for role in domain_data.get("roles", [])]}) + domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in domain_data.get("roles", [])]}) return domains_list @@ -174,7 +182,7 @@ def get_argument_spec(cls) -> Dict: elements="dict", options=dict( name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), - roles=dict(type="list", elements="str", choices=list(USER_ROLES_MAPPING)), + roles=dict(type="list", elements="str", choices=USER_ROLES_MAPPING.get_original_data()), ), aliases=["domains"], ), @@ -182,6 +190,5 @@ def get_argument_spec(cls) -> Dict: remote_user_authorization=dict(type="bool"), ), ), - override_exceptions=dict(type="list", elements="str"), state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), ) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index e68010fb..81d6a966 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import Optional, List, Dict, Any, Literal, Type -from pydantic import ValidationError +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index b0e34b61..1a3b1921 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,7 +8,7 @@ __metaclass__ = type -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py index f1d90fe3..e8924cd2 100644 --- a/plugins/module_utils/pydantic_compat.py +++ b/plugins/module_utils/pydantic_compat.py @@ -32,12 +32,14 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, model_serializer, field_validator, model_validator, validator, + computed_field, ) else: # Runtime: try to import, with fallback @@ -50,12 +52,14 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, model_serializer, field_validator, model_validator, validator, + computed_field, ) except ImportError: HAS_PYDANTIC = False # pylint: disable=invalid-name @@ -106,7 +110,7 @@ def decorator(func): return func return decorator - + # Fallback: model_serializer decorator that does nothing def model_serializer(*args, **kwargs): # pylint: disable=unused-argument """Pydantic model_serializer fallback when pydantic is not available.""" @@ -125,6 +129,15 @@ def decorator(func): return decorator + # Fallback: computed_field decorator that does nothing + def computed_field(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic computed_field fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + # Fallback: AfterValidator that returns the function unchanged def AfterValidator(func): # pylint: disable=invalid-name """Pydantic AfterValidator fallback when pydantic is not available.""" @@ -141,6 +154,9 @@ def BeforeValidator(func): # pylint: disable=invalid-name # Fallback: StrictBool StrictBool = bool + # Fallback: SecretStr + SecretStr = str + # Fallback: ValidationError class ValidationError(Exception): """ @@ -191,10 +207,12 @@ def decorator(func): "PYDANTIC_IMPORT_ERROR", "PydanticExperimentalWarning", "StrictBool", + "SecretStr", "ValidationError", "field_serializer", "model_serializer", "field_validator", "model_validator", "validator", + "computed_field", ] diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 6f296065..65f2e464 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -27,6 +27,7 @@ - The list of the local users to configure. type: list elements: dict + required: True suboptions: email: description: @@ -202,7 +203,7 @@ def main(): # output = nd_state_machine.manage_state() # module.exit_json(**output) nd_state_machine.manage_state() - + nd_state_machine.exit_json() except Exception as e: From dd935419f8d65538bdd913aeaf1093af538b58bf Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 6 Mar 2026 13:35:38 -0500 Subject: [PATCH 097/131] [ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly --- plugins/module_utils/models/base.py | 7 +- plugins/module_utils/models/local_user.py | 3 +- plugins/module_utils/nd_config_collection.py | 3 +- plugins/module_utils/nd_output.py | 70 +++++++ plugins/module_utils/nd_state_machine.py | 186 +++---------------- plugins/module_utils/orchestrators/base.py | 3 - plugins/module_utils/utils.py | 2 +- plugins/modules/nd_local_user.py | 12 +- 8 files changed, 107 insertions(+), 179 deletions(-) create mode 100644 plugins/module_utils/nd_output.py diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 67ce5de0..14c04945 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -143,12 +143,11 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) # NOTE: initialize and return a deep copy of the instance? - # TODO: Might be missing a proper merge on fields of type `List[NDNestedModel]`? - # -> similar to NDCOnfigCollection... -> add argument to make it optional either replace def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": if not isinstance(other_model, type(self)): - # TODO: Change error message - return TypeError("models are not of the same type.") + return TypeError( + f"NDBaseModel.merge method requires models of the same type. self of type {type(self)} and other_model of type {type(other_model)}" + ) for field, value in other_model: if value is None: diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 0575c1be..e2e7faf8 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -71,7 +71,6 @@ class LocalUserModel(NDBaseModel): # Fields # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec - # TODO: use extra for generating argument_spec (low priority) login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") @@ -161,7 +160,7 @@ def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: # -- Extra -- - # TODO: to generate from Fields (low priority) + # TODO: to generate from Fields: use extra for generating argument_spec (low priority) @classmethod def get_argument_spec(cls) -> Dict: return dict( diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 5fd9886d..1f751822 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -37,7 +37,6 @@ def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = No for item in items: self.add(item) - # TODO: might not be necessary def _extract_key(self, item: ModelType) -> IdentifierKey: """ Extract identifier key from item. @@ -144,7 +143,7 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha if existing is None: return "new" - # TODO: make a diff class level method for NDBaseModel + # TODO: make a diff class level method for NDBaseModel (high priority) existing_data = existing.to_diff_dict() new_data = new_item.to_diff_dict() is_subset = issubset(new_data, existing_data) diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py new file mode 100644 index 00000000..027592df --- /dev/null +++ b/plugins/module_utils/nd_output.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Gaspard Micol (@gmicol) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Dict, Any, Optional, List, Union +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection + + +class NDOutput: + def __init__(self, module: AnsibleModule): + self._output_level: str = module.params.get("output_level", "normal") + self._changed: bool = False + self._before: Union[NDConfigCollection, List] = [] + self._after: Union[NDConfigCollection, List] = [] + self._diff: Union[NDConfigCollection, List] = [] + self._proposed: Union[NDConfigCollection, List] = [] + self._logs: List = [] + self._extra: Dict[str, Any] = {} + + def format(self, **kwargs): + if isinstance(self._before, NDConfigCollection) and isinstance(self._after, NDConfigCollection) and self._before.get_diff_collection(self._after): + self._changed = True + + output = { + "output_level": self._output_level, + "changed": self._changed, + "after": self._after.to_ansible_config() if isinstance(self._after, NDConfigCollection) else self._after, + "before": self._before.to_ansible_config() if isinstance(self._before, NDConfigCollection) else self._before, + "diff": self._diff.to_ansible_config() if isinstance(self._diff, NDConfigCollection) else self._diff, + } + + if self._output_level in ("debug", "info"): + output["proposed"] = self._proposed.to_ansible_config() if isinstance(self._proposed, NDConfigCollection) else self._proposed + if self._output_level == "debug": + output["logs"] = "Not yet implemented" + + if self._extra: + output.update(self._extra) + + output.update(**kwargs) + + return output + + def assign( + self, + after: Optional[NDConfigCollection] = None, + before: Optional[NDConfigCollection] = None, + diff: Optional[NDConfigCollection] = None, + proposed: Optional[NDConfigCollection] = None, + logs: Optional[List] = None, + **kwargs + ) -> None: + if isinstance(after, NDConfigCollection): + self._after = after + if isinstance(before, NDConfigCollection): + self._before = before + if isinstance(diff, NDConfigCollection): + self._diff = diff + if isinstance(proposed, NDConfigCollection): + self._proposed = proposed + if isinstance(logs, List): + self._logs = logs + self._extra.update(**kwargs) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 81d6a966..4146926e 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,18 +8,15 @@ __metaclass__ = type -from typing import Optional, List, Dict, Any, Literal, Type +from typing import List, Dict, Any, Type from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator -from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -from ansible_collections.cisco.nd.plugins.module_utils.constants import ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED -# TODO: Revisit StateMachine when there is more arguments than config (e.g., "fabric" and "config" for switches config) -# TODO: Remove inheritence from NDModule (Top Priority) class NDStateMachine: """ Generic Network Resource Module for Nexus Dashboard. @@ -34,29 +31,27 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_module = NDModule(self.module) # Operation tracking - self.nd_logs: List[Dict[str, Any]] = [] - self.result: Dict[str, Any] = {"changed": False} + self.output = NDOutput(self.module) # Configuration self.model_orchestrator = model_orchestrator(sender=self.nd_module) self.model_class = self.model_orchestrator.model_class - # TODO: Revisit these class variables when udpating Module intialization and configuration (medium priority) + # TODO: Revisit these class variables when udpating Module intialization and configuration (low priority) self.state = self.module.params["state"] - self.ansible_config = self.module.params.get("config", []) # Initialize collections - # TODO: Revisit class variables `previous`, `existing`, etc... (medium priority) self.nd_config_collection = NDConfigCollection[self.model_class] try: - init_all_data = self.model_orchestrator.query_all() - - self.existing = self.nd_config_collection.from_api_response(response_data=init_all_data, model_class=self.model_class) - # Save previous state - self.previous = self.existing.copy() - self.proposed = self.nd_config_collection(model_class=self.model_class) + response_data = self.model_orchestrator.query_all() + # State of configuration objects in ND before change execution + self.before = self.nd_config_collection.from_api_response(response_data=response_data, model_class=self.model_class) + # State of current configuration objects in ND during change execution + self.existing = self.before.copy() + # Ongoing collection of configuration objects that were changed self.sent = self.nd_config_collection(model_class=self.model_class) - - for config in self.ansible_config: + # Collection of configuration objects given by user + self.proposed = self.nd_config_collection(model_class=self.model_class) + for config in self.module.params.get("config", []): try: # Parse config into model item = self.model_class.from_config(config) @@ -64,42 +59,11 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest except ValidationError as e: self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) return - + self.output.assign(after=self.existing, before=self.before, proposed=self.proposed) except Exception as e: - self.fail_json(msg=f"Initialization failed: {str(e)}", error=str(e)) - - # Logging - # NOTE: format log placeholder - # TODO: use a proper logger (low priority) - def format_log( - self, - identifier: IdentifierKey, - operation_status: Literal["no_change", "created", "updated", "deleted"], - before: Optional[Dict[str, Any]] = None, - after: Optional[Dict[str, Any]] = None, - payload: Optional[Dict[str, Any]] = None, - ) -> None: - """ - Create and append a log entry. - """ - log_entry = { - "identifier": identifier, - "operation_status": operation_status, - "before": before, - "after": after, - "payload": payload, - } - - # Add HTTP details if not in check mode - if not self.module.check_mode and self.nd_module.url is not None: - log_entry.update( - {"method": self.nd_module.method, "response": self.nd_module.response, "status": self.nd_module.status, "url": self.nd_module.url} - ) - - self.nd_logs.append(log_entry) + self.fail_json(msg=f"NDStateMachine initialization failed: {str(e)}", error=str(e)) # State Management (core function) - # TODO: adapt all `manage` functions to endpoint/orchestrator strategies (Top priority) def manage_state(self) -> None: """ Manage state according to desired configuration. @@ -114,7 +78,6 @@ def manage_state(self) -> None: elif self.state == "deleted": self._manage_delete_state() - # TODO: not needed with Ansible `argument_spec` validation. Keep it for now but needs to be removed (low priority) # TODO: boil down an Exception instead of using `fail_json` method else: self.fail_json(msg=f"Invalid state: {self.state}") @@ -125,28 +88,19 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier - response = {} identifier = proposed_item.get_identifier_value() - existing_config = self.existing.get(identifier).to_config() if self.existing.get(identifier) else {} try: # Determine diff status diff_status = self.existing.get_diff_config(proposed_item) # No changes needed if diff_status == "no_diff": - self.format_log( - identifier=identifier, - operation_status="no_change", - before=existing_config, - after=existing_config, - ) continue # Prepare final config based on state if self.state == "merged": # Merge with existing - merged_item = self.existing.merge(proposed_item) - final_item = merged_item + final_item = self.existing.merge(proposed_item) else: # Replace or create if diff_status == "changed": @@ -158,34 +112,18 @@ def _manage_create_update_state(self) -> None: # Execute API operation if diff_status == "changed": if not self.module.check_mode: - response = self.model_orchestrator.update(final_item) + self.model_orchestrator.update(final_item) self.sent.add(final_item) - operation_status = "updated" elif diff_status == "new": if not self.module.check_mode: - response = self.model_orchestrator.create(final_item) + self.model_orchestrator.create(final_item) self.sent.add(final_item) - operation_status = "created" # Log operation - self.format_log( - identifier=identifier, - operation_status=operation_status, - before=existing_config, - after=self.model_class.model_validate(response).to_config() if not self.module.check_mode else final_item.to_config(), - payload=final_item.to_payload(), - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to process {identifier}: {e}" - - self.format_log( - identifier=identifier, - operation_status="no_change", - before=existing_config, - after=existing_config, - ) - if not self.module.params.get("ignore_errors", False): self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return @@ -194,7 +132,7 @@ def _manage_override_deletions(self) -> None: """ Delete items not in proposed config (for overridden state). """ - diff_identifiers = self.previous.get_diff_identifiers(self.proposed) + diff_identifiers = self.before.get_diff_identifiers(self.proposed) for identifier in diff_identifiers: try: @@ -204,18 +142,13 @@ def _manage_override_deletions(self) -> None: # Execute delete if not self.module.check_mode: - response = self.model_orchestrator.delete(existing_item) + self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) # Log deletion - self.format_log( - identifier=identifier, - operation_status="deleted", - before=existing_item.to_config(), - after={}, - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" @@ -232,29 +165,17 @@ def _manage_delete_state(self) -> None: existing_item = self.existing.get(identifier) if not existing_item: - # Already deleted or doesn't exist - self.format_log( - identifier=identifier, - operation_status="no_change", - before={}, - after={}, - ) continue # Execute delete if not self.module.check_mode: - response = self.model_orchestrator.delete(existing_item) + self.model_orchestrator.delete(existing_item) # Remove from collection self.existing.delete(identifier) # Log deletion - self.format_log( - identifier=identifier, - operation_status="deleted", - before=existing_item.to_config(), - after={}, - ) + self.output.assign(after=self.existing) except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" @@ -263,67 +184,10 @@ def _manage_delete_state(self) -> None: self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) return - # Output Formatting - # TODO: move to separate Class (results) -> align it with rest_send PR - # TODO: return a defined ordered list of config (for integration test) - def add_logs_and_outputs(self) -> None: - """Add logs and outputs to module result based on output_level.""" - output_level = self.module.params.get("output_level", "normal") - state = self.module.params.get("state") - - # Add previous state for certain states and output levels - if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - if output_level in ("debug", "info"): - self.result["previous"] = self.previous.to_ansible_config() - - # Check if there were changes - if self.previous.get_diff_collection(self.existing): - self.result["changed"] = True - - # Add stdout if present - if self.nd_module.stdout: - self.result["stdout"] = self.nd_module.stdout - - # Add debug information - if output_level == "debug": - self.result["nd_logs"] = self.nd_logs - - if self.nd_module.url is not None: - self.result["httpapi_logs"] = self.nd_module.httpapi_logs - - if state in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: - self.result["sent"] = self.sent.to_payload_list() - self.result["proposed"] = self.proposed.to_ansible_config() - - # Always include current state - self.result["current"] = self.existing.to_ansible_config() - # Module Exit Methods def fail_json(self, msg: str, **kwargs) -> None: """ Exit module with failure. """ - self.add_logs_and_outputs() - self.result.update(**kwargs) - self.module.fail_json(msg=msg, **self.result) - - def exit_json(self, **kwargs) -> None: - """ - Exit module successfully. - """ - self.add_logs_and_outputs() - - # Add diff if module supports it - if self.module._diff and self.result.get("changed") is True: - try: - # Use diff-safe dicts (excludes sensitive fields) - before = [item.to_diff_dict() for item in self.previous] - after = [item.to_diff_dict() for item in self.existing] - - self.result["diff"] = dict(before=before, after=after) - except Exception: - pass # Don't fail on diff generation - - self.result.update(**kwargs) - self.module.exit_json(**self.result) + self.module.fail_json(msg=msg) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 1a3b1921..1a8b4f10 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -16,7 +16,6 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -# TODO: Revisit naming them "Orchestrator" class NDBaseOrchestrator(BaseModel): model_config = ConfigDict( use_enum_values=True, @@ -40,7 +39,6 @@ class NDBaseOrchestrator(BaseModel): # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) - # TODO: Revisit Deserialization def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: api_endpoint = self.create_endpoint() @@ -72,7 +70,6 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e - # TODO: Revisit the straegy around the query_all (see local_user's case) def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: result = self.sender.query_obj(self.query_all_endpoint.path) diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 0bf7cfc8..e09bd499 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -56,7 +56,7 @@ def issubset(subset: Any, superset: Any) -> bool: return True -# TODO: Might not necessary with Pydantic validation and serialization built-in methods +# TODO: Might not necessary with Pydantic validation and serialization built-in methods (see models/local_user) def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 65f2e464..d1d871fe 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -128,10 +128,10 @@ reuse_limitation: 20 time_interval_limitation: 10 security_domains: - name: all - roles: - - observer - - support_engineer + - name: all + roles: + - observer + - support_engineer remote_id_claim: remote_user remote_user_authorization: true state: merged @@ -204,10 +204,10 @@ def main(): # module.exit_json(**output) nd_state_machine.manage_state() - nd_state_machine.exit_json() + module.exit_json(**nd_state_machine.output.format()) except Exception as e: - module.fail_json(msg=f"Module execution failed: {str(e)}") + module.fail_json(msg=f"Module execution failed: {str(e)}", **nd_state_machine.output.format()) if __name__ == "__main__": From 344fe197005fd3b03c3c09fc74f510dc51cc31f7 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 10 Mar 2026 13:36:50 -0400 Subject: [PATCH 098/131] [ignore] Update NDOutput class. Remove all fail_json dependencies in NDStateMachineand add custom Exception for it in common/exceptions dir. Set json mode for to_diff_dict method in NDBaseModel. --- plugins/module_utils/common/exceptions.py | 11 ++++++-- plugins/module_utils/models/base.py | 4 +-- plugins/module_utils/nd_output.py | 7 +++-- plugins/module_utils/nd_state_machine.py | 32 +++++++---------------- 4 files changed, 23 insertions(+), 31 deletions(-) diff --git a/plugins/module_utils/common/exceptions.py b/plugins/module_utils/common/exceptions.py index 16e31ac6..0d7b7bcc 100644 --- a/plugins/module_utils/common/exceptions.py +++ b/plugins/module_utils/common/exceptions.py @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) """ @@ -144,3 +143,11 @@ def to_dict(self) -> dict[str, Any]: - None """ return self.error_data.model_dump(exclude_none=True) + + +class NDStateMachineError(Exception): + """ + Raised when NDStateMachine is failing. + """ + + pass diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 14c04945..30e5de5e 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -72,7 +72,7 @@ def to_payload(self, **kwargs) -> Dict[str, Any]: """ Convert model to API payload format. """ - return self.model_dump(by_alias=True, exclude_none=True, **kwargs) + return self.model_dump(by_alias=True, exclude_none=True, mode="json", **kwargs) def to_config(self, **kwargs) -> Dict[str, Any]: """ @@ -140,7 +140,7 @@ def to_diff_dict(self, **kwargs) -> Dict[str, Any]: """ Export for diff comparison (excludes sensitive fields). """ - return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), **kwargs) + return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), mode="json", **kwargs) # NOTE: initialize and return a deep copy of the instance? def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py index 027592df..dbfc2cd2 100644 --- a/plugins/module_utils/nd_output.py +++ b/plugins/module_utils/nd_output.py @@ -9,13 +9,12 @@ __metaclass__ = type from typing import Dict, Any, Optional, List, Union -from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection class NDOutput: - def __init__(self, module: AnsibleModule): - self._output_level: str = module.params.get("output_level", "normal") + def __init__(self, output_level: str): + self._output_level: str = output_level self._changed: bool = False self._before: Union[NDConfigCollection, List] = [] self._after: Union[NDConfigCollection, List] = [] @@ -24,7 +23,7 @@ def __init__(self, module: AnsibleModule): self._logs: List = [] self._extra: Dict[str, Any] = {} - def format(self, **kwargs): + def format(self, **kwargs) -> Dict[str, Any]: if isinstance(self._before, NDConfigCollection) and isinstance(self._after, NDConfigCollection) and self._before.get_diff_collection(self._after): self._changed = True diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 4146926e..bd86da3c 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -8,13 +8,14 @@ __metaclass__ = type -from typing import List, Dict, Any, Type +from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError class NDStateMachine: @@ -31,7 +32,7 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.nd_module = NDModule(self.module) # Operation tracking - self.output = NDOutput(self.module) + self.output = NDOutput(output_level=module.params.get("output_level", "normal")) # Configuration self.model_orchestrator = model_orchestrator(sender=self.nd_module) @@ -57,11 +58,10 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest item = self.model_class.from_config(config) self.proposed.add(item) except ValidationError as e: - self.fail_json(msg=f"Invalid configuration: {e}", config=config, validation_errors=e.errors()) - return + raise NDStateMachineError(f"Invalid configuration. for config {config}: {str(e)}") self.output.assign(after=self.existing, before=self.before, proposed=self.proposed) except Exception as e: - self.fail_json(msg=f"NDStateMachine initialization failed: {str(e)}", error=str(e)) + raise NDStateMachineError(f"Initialization failed: {str(e)}") # State Management (core function) def manage_state(self) -> None: @@ -78,9 +78,8 @@ def manage_state(self) -> None: elif self.state == "deleted": self._manage_delete_state() - # TODO: boil down an Exception instead of using `fail_json` method else: - self.fail_json(msg=f"Invalid state: {self.state}") + raise NDStateMachineError(f"Invalid state: {self.state}") def _manage_create_update_state(self) -> None: """ @@ -125,8 +124,7 @@ def _manage_create_update_state(self) -> None: except Exception as e: error_msg = f"Failed to process {identifier}: {e}" if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return + raise NDStateMachineError(error_msg) def _manage_override_deletions(self) -> None: """ @@ -152,10 +150,8 @@ def _manage_override_deletions(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return + raise NDStateMachineError(error_msg) def _manage_delete_state(self) -> None: """Handle deleted state.""" @@ -179,15 +175,5 @@ def _manage_delete_state(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" - if not self.module.params.get("ignore_errors", False): - self.fail_json(msg=error_msg, identifier=str(identifier), error=str(e)) - return - - # Module Exit Methods - - def fail_json(self, msg: str, **kwargs) -> None: - """ - Exit module with failure. - """ - self.module.fail_json(msg=msg) + raise NDStateMachineError(error_msg) From b0f04bcd60d5e17219c7f1d68eb974a0d0e63ab0 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 11 Mar 2026 11:48:38 -0400 Subject: [PATCH 099/131] [ignore] Fix serialization of model with minimal changes to base.py and local_user.py. Add method to NDBaseModel and apply relevant changes to nd_config_collection. --- plugins/module_utils/models/base.py | 211 ++++++++++++------- plugins/module_utils/models/local_user.py | 180 ++++++++++------ plugins/module_utils/nd_config_collection.py | 5 +- plugins/module_utils/pydantic_compat.py | 6 + 4 files changed, 256 insertions(+), 146 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 30e5de5e..79f9ec80 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,154 +9,221 @@ __metaclass__ = type from abc import ABC -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict -from typing import List, Dict, Any, ClassVar, Tuple, Union, Literal, Optional +from pydantic import BaseModel, ConfigDict +from typing import List, Dict, Any, ClassVar, Set, Tuple, Union, Literal, Optional +from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset -# TODO: Revisit identifiers strategy (low priority) -# NOTE: what about List of NestedModels? -> make it a separate Sub Model class NDBaseModel(BaseModel, ABC): """ Base model for all Nexus Dashboard API objects. - Supports three identifier strategies: - - single: One unique required field (e.g., ["login_id"]) - - composite: Multiple required fields as tuple (e.g., ["device", "interface"]) - - hierarchical: Priority-ordered fields (e.g., ["uuid", "name"]) - - singleton: no identifiers required (e.g., only a single instance can exist in Nexus Dasboard) + Class-level configuration attributes: + identifiers: List of field names used to uniquely identify this object. + identifier_strategy: How identifiers are interpreted. + exclude_from_diff: Fields excluded from diff comparisons. + unwanted_keys: Keys to strip from API responses before processing. + payload_nested_fields: Mapping of {payload_key: [field_names]} for fields + that should be grouped under a nested key in payload mode but remain + flat in config mode. + payload_exclude_fields: Fields to exclude from payload output + (e.g., because they are restructured into nested keys). + config_exclude_fields: Fields to exclude from config output + (e.g., computed payload-only structures). """ - # TODO: revisit initial Model Configurations (low priority) model_config = ConfigDict( str_strip_whitespace=True, use_enum_values=True, validate_assignment=True, populate_by_name=True, arbitrary_types_allowed=True, - extra="allow", # NOTE: enabled extra: allows to add extra Field infos for generating Ansible argument_spec and Module Docs + extra="ignore", ) - # TODO: Revisit identifiers strategy (low priority) + # --- Identifier Configuration --- + identifiers: ClassVar[Optional[List[str]]] = None - # TODO: Revisit no identifiers strategy naming (`singleton` -> `unique`, `unnamed`) (low priority) identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "singleton" - # Optional: fields to exclude from diffs (e.g., passwords) - exclude_from_diff: ClassVar[List] = [] - # TODO: To be removed in the future (see local_user model) + # --- Serialization Configuration --- + + exclude_from_diff: ClassVar[Set[str]] = set() unwanted_keys: ClassVar[List] = [] - # TODO: Revisit it with identifiers strategy (low priority) + # Declarative nested-field grouping for payload mode + # e.g., {"passwordPolicy": ["reuse_limitation", "time_interval_limitation"]} + # means: in payload mode, remove these fields from top level and nest them + # under "passwordPolicy" with their alias names. + payload_nested_fields: ClassVar[Dict[str, List[str]]] = {} + + # Fields to explicitly exclude per mode + payload_exclude_fields: ClassVar[Set[str]] = set() + config_exclude_fields: ClassVar[Set[str]] = set() + + # --- Subclass Validation --- + def __init_subclass__(cls, **kwargs): - """ - Enforce configuration for identifiers definition. - """ super().__init_subclass__(**kwargs) # Skip enforcement for nested models - if cls.__name__ in ["NDNestedModel"] or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): + if cls.__name__ == "NDNestedModel" or any(base.__name__ == "NDNestedModel" for base in cls.__mro__): return if not hasattr(cls, "identifiers") or cls.identifiers is None: - raise ValueError( - f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." - f"Example: `identifiers: ClassVar[Optional[List[str]]] = ['login_id']`" - ) + raise ValueError(f"Class {cls.__name__} must define 'identifiers'. " f"Example: identifiers: ClassVar[Optional[List[str]]] = ['login_id']") if not hasattr(cls, "identifier_strategy") or cls.identifier_strategy is None: - raise ValueError( - f"Class {cls.__name__} must define 'identifiers' and 'identifier_strategy'." - f"Example: `identifier_strategy: ClassVar[Optional[Literal['single', 'composite', 'hierarchical', 'singleton']]] = 'single'`" - ) + raise ValueError(f"Class {cls.__name__} must define 'identifier_strategy'. " f"Example: identifier_strategy: ClassVar[...] = 'single'") - def to_payload(self, **kwargs) -> Dict[str, Any]: + # --- Core Serialization --- + + def _build_payload_nested(self, data: Dict[str, Any]) -> Dict[str, Any]: """ - Convert model to API payload format. + Apply payload_nested_fields: pull specified fields out of the top-level + dict and group them under their declared parent key. """ - return self.model_dump(by_alias=True, exclude_none=True, mode="json", **kwargs) + if not self.payload_nested_fields: + return data + + result = dict(data) + + for nested_key, field_names in self.payload_nested_fields.items(): + nested_dict = {} + for field_name in field_names: + # Resolve the alias for this field + field_info = self.__class__.model_fields.get(field_name) + if field_info is None: + continue + + alias = field_info.alias or field_name + + # Pull value from the serialized data (which uses aliases in payload mode) + if alias in result: + nested_dict[alias] = result.pop(alias) + + if nested_dict: + result[nested_key] = nested_dict + + return result + + def to_payload(self, **kwargs) -> Dict[str, Any]: + """Convert model to API payload format (aliased keys, nested structures).""" + data = self.model_dump( + by_alias=True, + exclude_none=True, + mode="json", + context={"mode": "payload"}, + exclude=self.payload_exclude_fields or None, + **kwargs, + ) + return self._build_payload_nested(data) def to_config(self, **kwargs) -> Dict[str, Any]: - """ - Convert model to Ansible config format. - """ - return self.model_dump(by_alias=False, exclude_none=True, **kwargs) + """Convert model to Ansible config format (Python field names, flat structure).""" + return self.model_dump( + by_alias=False, + exclude_none=True, + context={"mode": "config"}, + exclude=self.config_exclude_fields or None, + **kwargs, + ) + + # --- Core Deserialization --- @classmethod def from_response(cls, response: Dict[str, Any], **kwargs) -> "NDBaseModel": + """Create model instance from API response dict.""" return cls.model_validate(response, by_alias=True, **kwargs) @classmethod def from_config(cls, ansible_config: Dict[str, Any], **kwargs) -> "NDBaseModel": + """Create model instance from Ansible config dict.""" return cls.model_validate(ansible_config, by_name=True, **kwargs) - # TODO: Revisit this function when revisiting identifier strategy (low priority) - def get_identifier_value(self, **kwargs) -> Union[str, int, Tuple[Any, ...]]: + # --- Identifier Access --- + + def get_identifier_value(self) -> Optional[Union[str, int, Tuple[Any, ...]]]: """ - Extract identifier value(s) from this instance: - - single identifier: Returns field value. - - composite identifiers: Returns tuple of all field values. - - hierarchical identifiers: Returns tuple of (field_name, value) for first non-None field. + Extract identifier value(s) based on the configured strategy. + + Returns: + - single: The field value + - composite: Tuple of all field values + - hierarchical: Tuple of (field_name, value) for first non-None field + - singleton: None """ - if not self.identifiers and self.identifier_strategy != "singleton": - raise ValueError(f"{self.__class__.__name__} must have identifiers defined with its current identifier strategy: `{self.identifier_strategy}`") + strategy = self.identifier_strategy - if self.identifier_strategy == "single": + if strategy == "singleton": + return None + + if not self.identifiers: + raise ValueError(f"{self.__class__.__name__} has strategy '{strategy}' but no identifiers defined.") + + if strategy == "single": value = getattr(self, self.identifiers[0], None) if value is None: raise ValueError(f"Single identifier field '{self.identifiers[0]}' is None") return value - elif self.identifier_strategy == "composite": + elif strategy == "composite": values = [] missing = [] - for field in self.identifiers: value = getattr(self, field, None) if value is None: missing.append(field) values.append(value) - - # NOTE: might be redefined with Pydantic (low priority) if missing: raise ValueError(f"Composite identifier fields {missing} are None. " f"All required: {self.identifiers}") - return tuple(values) - elif self.identifier_strategy == "hierarchical": + elif strategy == "hierarchical": for field in self.identifiers: value = getattr(self, field, None) if value is not None: return (field, value) - raise ValueError(f"No non-None value in hierarchical fields {self.identifiers}") - # TODO: Revisit condition when there is no identifiers (low priority) - elif self.identifier_strategy == "singleton": - return None - else: - raise ValueError(f"Unknown identifier strategy: {self.identifier_strategy}") + raise ValueError(f"Unknown identifier strategy: {strategy}") + + # --- Diff & Merge --- def to_diff_dict(self, **kwargs) -> Dict[str, Any]: + """Export for diff comparison, excluding sensitive fields.""" + return self.model_dump( + by_alias=True, + exclude_none=True, + exclude=self.exclude_from_diff or None, + mode="json", + **kwargs, + ) + + def get_diff(self, other: "NDBaseModel") -> bool: + """Diff comparison.""" + self_data = self.to_diff_dict() + other_data = other.to_diff_dict() + return issubset(other_data, self_data) + + def merge(self, other: "NDBaseModel") -> "NDBaseModel": """ - Export for diff comparison (excludes sensitive fields). - """ - return self.model_dump(by_alias=True, exclude_none=True, exclude=set(self.exclude_from_diff), mode="json", **kwargs) + Merge another model's non-None values into this instance. + Recursively merges nested NDBaseModel fields. - # NOTE: initialize and return a deep copy of the instance? - def merge(self, other_model: "NDBaseModel", **kwargs) -> "NDBaseModel": - if not isinstance(other_model, type(self)): - return TypeError( - f"NDBaseModel.merge method requires models of the same type. self of type {type(self)} and other_model of type {type(other_model)}" - ) + Returns self for chaining. + """ + if not isinstance(other, type(self)): + raise TypeError(f"Cannot merge {type(other).__name__} into {type(self).__name__}. " f"Both must be the same type.") - for field, value in other_model: + for field_name, value in other: if value is None: continue - current_value = getattr(self, field) - if isinstance(current_value, NDBaseModel) and isinstance(value, NDBaseModel): - setattr(self, field, current_value.merge(value)) - + current = getattr(self, field_name) + if isinstance(current, NDBaseModel) and isinstance(value, NDBaseModel): + current.merge(value) else: - setattr(self, field, value) + setattr(self, field_name, value) + return self diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index e2e7faf8..0320d3c1 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -16,13 +16,14 @@ field_serializer, field_validator, model_validator, - computed_field, + FieldSerializationInfo, + SerializationInfo, ) from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping -# Constant defined here as it is only used in this model + USER_ROLES_MAPPING = NDConstantMapping( { "fabric_admin": "fabric-admin", @@ -36,131 +37,155 @@ class LocalUserSecurityDomainModel(NDNestedModel): - """Security domain configuration for local user (nested model).""" + """ + Security domain with assigned roles for a local user. - # Fields - name: str = Field(alias="name", exclude=True) - roles: Optional[List[str]] = Field(default=None, alias="roles", exclude=True) + Canonical form (config): {"name": "all", "roles": ["observer", "support_engineer"]} + API payload form: {"all": {"roles": ["observer", "support-engineer"]}} + """ - # -- Serialization (Model instance -> API payload) -- + name: str = Field(alias="name") + roles: Optional[List[str]] = Field(default=None, alias="roles") @model_serializer() - def serialize_model(self) -> Dict: - return {self.name: {"roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in (self.roles or [])]}} + def serialize(self, info: SerializationInfo) -> Any: + mode = (info.context or {}).get("mode", "payload") - # NOTE: Deserialization defined in `LocalUserModel` due to API response complexity + if mode == "config": + result = {"name": self.name} + if self.roles is not None: + result["roles"] = list(self.roles) + return result + + # Payload mode: nested dict with API role names + api_roles = [USER_ROLES_MAPPING.get_dict().get(role, role) for role in (self.roles or [])] + return {self.name: {"roles": api_roles}} -# TODO: Add field validation (e.g. me, le, choices, etc...) (low priority) class LocalUserModel(NDBaseModel): """ - Local user configuration. + Local user configuration for Nexus Dashboard. + + Identifier: login_id (single) - Identifier: login_id (single field) + Serialization notes: + - In payload mode, `reuse_limitation` and `time_interval_limitation` + are nested under `passwordPolicy` (handled by base class via + `payload_nested_fields`). + - In config mode, they remain as flat top-level fields. + - `security_domains` serializes as a nested dict in payload mode + and a flat list of dicts in config mode. """ - # Identifier configuration - # TODO: Revisit this identifiers strategy (low priority) + # --- Identifier Configuration --- + identifiers: ClassVar[Optional[List[str]]] = ["login_id"] identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "single" - # Keys management configurations - # TODO: Revisit these configurations (low priority) - exclude_from_diff: ClassVar[List[str]] = ["user_password"] - unwanted_keys: ClassVar[List] = [["passwordPolicy", "passwordChangeTime"], ["userID"]] # Nested path # Simple key + # --- Serialization Configuration --- + + exclude_from_diff: ClassVar[set] = {"user_password"} + unwanted_keys: ClassVar[List] = [ + ["passwordPolicy", "passwordChangeTime"], + ["userID"], + ] + + # In payload mode, nest these fields under "passwordPolicy" + payload_nested_fields: ClassVar[Dict[str, List[str]]] = { + "passwordPolicy": ["reuse_limitation", "time_interval_limitation"], + } + + # --- Fields --- - # Fields - # NOTE: `alias` are NOT the ansible aliases. they are the equivalent attribute's names from the API spec login_id: str = Field(alias="loginID") email: Optional[str] = Field(default=None, alias="email") first_name: Optional[str] = Field(default=None, alias="firstName") last_name: Optional[str] = Field(default=None, alias="lastName") user_password: Optional[SecretStr] = Field(default=None, alias="password") - reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation", exclude=True) - time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation", exclude=True) + reuse_limitation: Optional[int] = Field(default=None, alias="reuseLimitation") + time_interval_limitation: Optional[int] = Field(default=None, alias="timeIntervalLimitation") security_domains: Optional[List[LocalUserSecurityDomainModel]] = Field(default=None, alias="rbac") remote_id_claim: Optional[str] = Field(default=None, alias="remoteIDClaim") remote_user_authorization: Optional[bool] = Field(default=None, alias="xLaunch") - # -- Serialization (Model instance -> API payload) -- - - @computed_field(alias="passwordPolicy") - @property - def password_policy(self) -> Optional[Dict[str, int]]: - """Computed nested structure for API payload.""" - if self.reuse_limitation is None and self.time_interval_limitation is None: - return None - - policy = {} - if self.reuse_limitation is not None: - policy["reuseLimitation"] = self.reuse_limitation - if self.time_interval_limitation is not None: - policy["timeIntervalLimitation"] = self.time_interval_limitation - return policy + # --- Serializers --- @field_serializer("user_password") def serialize_password(self, value: Optional[SecretStr]) -> Optional[str]: return value.get_secret_value() if value else None @field_serializer("security_domains") - def serialize_domains(self, value: Optional[List[LocalUserSecurityDomainModel]]) -> Optional[Dict]: - # NOTE: exclude `None` values and empty list (-> should we exclude empty list?) + def serialize_security_domains( + self, + value: Optional[List[LocalUserSecurityDomainModel]], + info: FieldSerializationInfo, + ) -> Any: if not value: return None + mode = (info.context or {}).get("mode", "payload") + + if mode == "config": + return [domain.model_dump(context=info.context) for domain in value] + + # Payload mode: merge all domain dicts into {"domains": {...}} domains_dict = {} for domain in value: - domains_dict.update(domain.to_payload()) - + domains_dict.update(domain.model_dump(context=info.context)) return {"domains": domains_dict} - # -- Deserialization (API response / Ansible payload -> Model instance) -- + # --- Validators (Deserialization) --- @model_validator(mode="before") @classmethod - def deserialize_password_policy(cls, data: Any) -> Any: + def flatten_password_policy(cls, data: Any) -> Any: + """ + Flatten nested passwordPolicy from API response into top-level fields. + This is the inverse of the payload_nested_fields nesting. + """ if not isinstance(data, dict): return data - password_policy = data.get("passwordPolicy") - - if password_policy and isinstance(password_policy, dict): - if "reuseLimitation" in password_policy: - data["reuse_limitation"] = password_policy["reuseLimitation"] - if "timeIntervalLimitation" in password_policy: - data["time_interval_limitation"] = password_policy["timeIntervalLimitation"] - - # Remove the nested structure from data to avoid conflicts - # (since it's a computed field, not a real field) - data.pop("passwordPolicy", None) + policy = data.pop("passwordPolicy", None) + if isinstance(policy, dict): + if "reuseLimitation" in policy: + data.setdefault("reuseLimitation", policy["reuseLimitation"]) + if "timeIntervalLimitation" in policy: + data.setdefault("timeIntervalLimitation", policy["timeIntervalLimitation"]) return data @field_validator("security_domains", mode="before") @classmethod - def deserialize_domains(cls, value: Any) -> Optional[List[Dict]]: + def normalize_security_domains(cls, value: Any) -> Optional[List[Dict]]: + """ + Accept security_domains in either format: + - List of dicts (Ansible config): [{"name": "all", "roles": [...]}] + - Nested dict (API response): {"domains": {"all": {"roles": [...]}}} + Always normalizes to the list-of-dicts form for model storage. + """ if value is None: return None - # If already in list format (Ansible module representation), return as-is + # Already normalized (from Ansible config) if isinstance(value, list): return value - # If in the nested dict format (API representation) + # API response format if isinstance(value, dict) and "domains" in value: - domains_dict = value["domains"] - domains_list = [] - - for domain_name, domain_data in domains_dict.items(): - domains_list.append({"name": domain_name, "roles": [USER_ROLES_MAPPING.get_dict().get(role, role) for role in domain_data.get("roles", [])]}) - - return domains_list + reverse_mapping = {v: k for k, v in USER_ROLES_MAPPING.get_dict().items()} + return [ + { + "name": domain_name, + "roles": [reverse_mapping.get(role, role) for role in domain_data.get("roles", [])], + } + for domain_name, domain_data in value["domains"].items() + ] return value - # -- Extra -- + # --- Argument Spec --- - # TODO: to generate from Fields: use extra for generating argument_spec (low priority) @classmethod def get_argument_spec(cls) -> Dict: return dict( @@ -180,8 +205,19 @@ def get_argument_spec(cls) -> Dict: type="list", elements="dict", options=dict( - name=dict(type="str", required=True, aliases=["security_domain_name", "domain_name"]), - roles=dict(type="list", elements="str", choices=USER_ROLES_MAPPING.get_original_data()), + name=dict( + type="str", + required=True, + aliases=[ + "security_domain_name", + "domain_name", + ], + ), + roles=dict( + type="list", + elements="str", + choices=USER_ROLES_MAPPING.get_original_data(), + ), ), aliases=["domains"], ), @@ -189,5 +225,9 @@ def get_argument_spec(cls) -> Dict: remote_user_authorization=dict(type="bool"), ), ), - state=dict(type="str", default="merged", choices=["merged", "replaced", "overridden", "deleted"]), + state=dict( + type="str", + default="merged", + choices=["merged", "replaced", "overridden", "deleted"], + ), ) diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 1f751822..364b8a8f 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -143,10 +143,7 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha if existing is None: return "new" - # TODO: make a diff class level method for NDBaseModel (high priority) - existing_data = existing.to_diff_dict() - new_data = new_item.to_diff_dict() - is_subset = issubset(new_data, existing_data) + is_subset = existing.get_diff(new_item) return "no_diff" if is_subset else "changed" diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py index e8924cd2..4456018a 100644 --- a/plugins/module_utils/pydantic_compat.py +++ b/plugins/module_utils/pydantic_compat.py @@ -40,6 +40,8 @@ model_validator, validator, computed_field, + FieldSerializationInfo, + SerializationInfo, ) else: # Runtime: try to import, with fallback @@ -60,6 +62,8 @@ model_validator, validator, computed_field, + FieldSerializationInfo, + SerializationInfo, ) except ImportError: HAS_PYDANTIC = False # pylint: disable=invalid-name @@ -215,4 +219,6 @@ def decorator(func): "model_validator", "validator", "computed_field", + "FieldSerializationInfo", + "SerializationInfo", ] From 595e514d9eadfae8880e8de1b190a4889afac86d Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 11 Mar 2026 13:56:54 -0400 Subject: [PATCH 100/131] [ignore] Complete nd_local_user integration test for creation and update asserts. --- .../targets/nd_local_user/tasks/main.yml | 296 +++++++++++++++++- 1 file changed, 288 insertions(+), 8 deletions(-) diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml index 77e55cd1..de8ad5ed 100644 --- a/tests/integration/targets/nd_local_user/tasks/main.yml +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -46,15 +46,125 @@ - name: all state: merged check_mode: true - register: cm_create_local_user + register: cm_create_local_users - name: Create local users with full and minimum configuration (normal mode) cisco.nd.nd_local_user: <<: *create_local_user - register: nm_create_local_user + register: nm_create_local_users + +- name: Asserts for local users creation tasks + ansible.builtin.assert: + that: + - cm_create_local_users is changed + - cm_create_local_users.after | length == 3 + - cm_create_local_users.after.0.login_id == "admin" + - cm_create_local_users.after.0.first_name == "admin" + - cm_create_local_users.after.0.remote_user_authorization == false + - cm_create_local_users.after.0.reuse_limitation == 0 + - cm_create_local_users.after.0.security_domains | length == 1 + - cm_create_local_users.after.0.security_domains.0.name == "all" + - cm_create_local_users.after.0.security_domains.0.roles | length == 1 + - cm_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - cm_create_local_users.after.0.time_interval_limitation == 0 + - cm_create_local_users.after.1.email == "ansibleuser@example.com" + - cm_create_local_users.after.1.first_name == "Ansible first name" + - cm_create_local_users.after.1.last_name == "Ansible last name" + - cm_create_local_users.after.1.login_id == "ansible_local_user" + - cm_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - cm_create_local_users.after.1.remote_user_authorization == true + - cm_create_local_users.after.1.reuse_limitation == 20 + - cm_create_local_users.after.1.security_domains | length == 1 + - cm_create_local_users.after.1.security_domains.0.name == "all" + - cm_create_local_users.after.1.security_domains.0.roles | length == 2 + - cm_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - cm_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - cm_create_local_users.after.1.time_interval_limitation == 10 + - cm_create_local_users.after.2.login_id == "ansible_local_user_2" + - cm_create_local_users.after.2.security_domains | length == 1 + - cm_create_local_users.after.2.security_domains.0.name == "all" + - cm_create_local_users.before | length == 1 + - cm_create_local_users.before.0.login_id == "admin" + - cm_create_local_users.before.0.first_name == "admin" + - cm_create_local_users.before.0.remote_user_authorization == false + - cm_create_local_users.before.0.reuse_limitation == 0 + - cm_create_local_users.before.0.security_domains | length == 1 + - cm_create_local_users.before.0.security_domains.0.name == "all" + - cm_create_local_users.before.0.security_domains.0.roles | length == 1 + - cm_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - cm_create_local_users.before.0.time_interval_limitation == 0 + - cm_create_local_users.diff == [] + - cm_create_local_users.proposed.0.email == "ansibleuser@example.com" + - cm_create_local_users.proposed.0.first_name == "Ansible first name" + - cm_create_local_users.proposed.0.last_name == "Ansible last name" + - cm_create_local_users.proposed.0.login_id == "ansible_local_user" + - cm_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - cm_create_local_users.proposed.0.remote_user_authorization == true + - cm_create_local_users.proposed.0.reuse_limitation == 20 + - cm_create_local_users.proposed.0.security_domains | length == 1 + - cm_create_local_users.proposed.0.security_domains.0.name == "all" + - cm_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - cm_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - cm_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - cm_create_local_users.proposed.0.time_interval_limitation == 10 + - cm_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - cm_create_local_users.proposed.1.security_domains | length == 1 + - cm_create_local_users.proposed.1.security_domains.0.name == "all" + - nm_create_local_users is changed + - nm_create_local_users.after.0.first_name == "admin" + - nm_create_local_users.after.0.remote_user_authorization == false + - nm_create_local_users.after.0.reuse_limitation == 0 + - nm_create_local_users.after.0.security_domains | length == 1 + - nm_create_local_users.after.0.security_domains.0.name == "all" + - nm_create_local_users.after.0.security_domains.0.roles | length == 1 + - nm_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - nm_create_local_users.after.0.time_interval_limitation == 0 + - nm_create_local_users.after.1.email == "ansibleuser@example.com" + - nm_create_local_users.after.1.first_name == "Ansible first name" + - nm_create_local_users.after.1.last_name == "Ansible last name" + - nm_create_local_users.after.1.login_id == "ansible_local_user" + - nm_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - nm_create_local_users.after.1.remote_user_authorization == true + - nm_create_local_users.after.1.reuse_limitation == 20 + - nm_create_local_users.after.1.security_domains | length == 1 + - nm_create_local_users.after.1.security_domains.0.name == "all" + - nm_create_local_users.after.1.security_domains.0.roles | length == 2 + - nm_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - nm_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - nm_create_local_users.after.1.time_interval_limitation == 10 + - nm_create_local_users.after.2.login_id == "ansible_local_user_2" + - nm_create_local_users.after.2.security_domains | length == 1 + - nm_create_local_users.after.2.security_domains.0.name == "all" + - nm_create_local_users.before | length == 1 + - nm_create_local_users.before.0.login_id == "admin" + - nm_create_local_users.before.0.first_name == "admin" + - nm_create_local_users.before.0.remote_user_authorization == false + - nm_create_local_users.before.0.reuse_limitation == 0 + - nm_create_local_users.before.0.security_domains | length == 1 + - nm_create_local_users.before.0.security_domains.0.name == "all" + - nm_create_local_users.before.0.security_domains.0.roles | length == 1 + - nm_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - nm_create_local_users.before.0.time_interval_limitation == 0 + - nm_create_local_users.diff == [] + - nm_create_local_users.proposed.0.email == "ansibleuser@example.com" + - nm_create_local_users.proposed.0.first_name == "Ansible first name" + - nm_create_local_users.proposed.0.last_name == "Ansible last name" + - nm_create_local_users.proposed.0.login_id == "ansible_local_user" + - nm_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - nm_create_local_users.proposed.0.remote_user_authorization == true + - nm_create_local_users.proposed.0.reuse_limitation == 20 + - nm_create_local_users.proposed.0.security_domains | length == 1 + - nm_create_local_users.proposed.0.security_domains.0.name == "all" + - nm_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - nm_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - nm_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - nm_create_local_users.proposed.0.time_interval_limitation == 10 + - nm_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - nm_create_local_users.proposed.1.security_domains | length == 1 + - nm_create_local_users.proposed.1.security_domains.0.name == "all" # UPDATE -- name: Update all ansible_local_user's attributes (check mode) +- name: Replace all ansible_local_user's attributes (check mode) cisco.nd.nd_local_user: &update_first_local_user <<: *nd_info config: @@ -72,12 +182,12 @@ remote_user_authorization: false state: replaced check_mode: true - register: cm_update_local_user + register: cm_replace_local_user -- name: Update local user (normal mode) +- name: Replace all ansible_local_user's attributes (normal mode) cisco.nd.nd_local_user: <<: *update_first_local_user - register: nm_update_local_user + register: nm_replace_local_user - name: Update all ansible_local_user_2's attributes except password cisco.nd.nd_local_user: &update_second_local_user @@ -95,12 +205,178 @@ remote_id_claim: ansible_remote_user_2 remote_user_authorization: true state: merged - register: nm_update_local_user_2 + register: nm_merge_local_user_2 - name: Update all ansible_local_user_2's attributes except password again (idempotency) cisco.nd.nd_local_user: <<: *update_second_local_user - register: nm_update_local_user_2_again + register: nm_merge_local_user_2_again + + +- name: Override local users with minimum configuration + cisco.nd.nd_local_user: + <<: *nd_info + config: + - login_id: admin + first_name: admin + remote_user_authorization: false + reuse_limitation: 0 + time_interval_limitation: 0 + security_domains: + - name: all + roles: + - super_admin + - email: overrideansibleuser@example.com + login_id: ansible_local_user + first_name: Overridden Ansible first name + last_name: Overriden Ansible last name + user_password: overideansibleLocalUserPassword1% + reuse_limitation: 15 + time_interval_limitation: 5 + security_domains: + - name: all + roles: + - observer + remote_id_claim: ansible_remote_user + remote_user_authorization: true + - login_id: ansible_local_user_3 + user_password: ansibleLocalUser3Password1%Test + security_domains: + - name: all + state: overridden + register: nm_override_local_users + +- name: Asserts for local users update tasks + ansible.builtin.assert: + that: + - cm_replace_local_user is changed + - cm_replace_local_user.after | length == 3 + - cm_replace_local_user.after.0.login_id == "ansible_local_user_2" + - cm_replace_local_user.after.0.security_domains | length == 1 + - cm_replace_local_user.after.0.security_domains.0.name == "all" + - cm_replace_local_user.after.1.email == "updatedansibleuser@example.com"" + - cm_replace_local_user.after.1.first_name == "Updated Ansible first name" + - cm_replace_local_user.after.1.last_name == "Updated Ansible last name" + - cm_replace_local_user.after.1.login_id == "ansible_local_user" + - cm_replace_local_user.after.1.remote_id_claim == "" + - cm_replace_local_user.after.1.remote_user_authorization == false + - cm_replace_local_user.after.1.reuse_limitation == 25 + - cm_replace_local_user.after.1.security_domains | length == 1 + - cm_replace_local_user.after.1.security_domains.0.name == "all" + - cm_replace_local_user.after.1.security_domains.0.roles | length == 1 + - cm_replace_local_user.after.1.security_domains.0.roles.0 == "super_admin" + - cm_replace_local_user.after.1.time_interval_limitation == 15 + - cm_replace_local_user.after.2.login_id == "admin" + - cm_replace_local_user.after.2.first_name == "admin" + - cm_replace_local_user.after.2.remote_user_authorization == false + - cm_replace_local_user.after.2.reuse_limitation == 0 + - cm_replace_local_user.after.2.security_domains | length == 1 + - cm_replace_local_user.after.2.security_domains.0.name == "all" + - cm_replace_local_user.after.2.security_domains.0.roles | length == 1 + - cm_replace_local_user.after.2.security_domains.0.roles.0 == "super_admin" + - cm_replace_local_user.after.2.time_interval_limitation == 0 + - cm_replace_local_user.before | length == 3 + - cm_replace_local_user.before.2.first_name == "admin" + - cm_replace_local_user.before.2.remote_user_authorization == false + - cm_replace_local_user.before.2.reuse_limitation == 0 + - cm_replace_local_user.before.2.security_domains | length == 1 + - cm_replace_local_user.before.2.security_domains.0.name == "all" + - cm_replace_local_user.before.2.security_domains.0.roles | length == 1 + - cm_replace_local_user.before.2.security_domains.0.roles.0 == "super_admin" + - cm_replace_local_user.before.2.time_interval_limitation == 0 + - cm_replace_local_user.before.1.email == "ansibleuser@example.com" + - cm_replace_local_user.before.1.first_name == "Ansible first name" + - cm_replace_local_user.before.1.last_name == "Ansible last name" + - cm_replace_local_user.before.1.login_id == "ansible_local_user" + - cm_replace_local_user.before.1.remote_id_claim == "ansible_remote_user" + - cm_replace_local_user.before.1.remote_user_authorization == true + - cm_replace_local_user.before.1.reuse_limitation == 20 + - cm_replace_local_user.before.1.security_domains | length == 1 + - cm_replace_local_user.before.1.security_domains.0.name == "all" + - cm_replace_local_user.before.1.security_domains.0.roles | length == 2 + - cm_replace_local_user.before.1.security_domains.0.roles.0 == "observer" + - cm_replace_local_user.before.1.security_domains.0.roles.1 == "support_engineer" + - cm_replace_local_user.before.1.time_interval_limitation == 10 + - cm_replace_local_user.before.0.login_id == "ansible_local_user_2" + - cm_replace_local_user.before.0.security_domains | length == 1 + - cm_replace_local_user.before.0.security_domains.0.name == "all" + - cm_replace_local_user.diff == [] + - cm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com"" + - cm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" + - cm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" + - cm_replace_local_user.proposed.0.login_id == "ansible_local_user" + - cm_replace_local_user.proposed.0.remote_id_claim == "" + - cm_replace_local_user.proposed.0.remote_user_authorization == false + - cm_replace_local_user.proposed.0.reuse_limitation == 25 + - cm_replace_local_user.proposed.0.security_domains | length == 1 + - cm_replace_local_user.proposed.0.security_domains.0.name == "all" + - cm_replace_local_user.proposed.0.security_domains.0.roles | length == 1 + - cm_replace_local_user.proposed.0.security_domains.0.roles.0 == "super_admin" + - cm_replace_local_user.proposed.0.time_interval_limitation == 15 + - nm_replace_local_user is changed + - nm_replace_local_user.after | length == 3 + - nm_replace_local_user.after.0.login_id == "ansible_local_user_2" + - nm_replace_local_user.after.0.security_domains | length == 1 + - nm_replace_local_user.after.0.security_domains.0.name == "all" + - nm_replace_local_user.after.1.email == "updatedansibleuser@example.com"" + - nm_replace_local_user.after.1.first_name == "Updated Ansible first name" + - nm_replace_local_user.after.1.last_name == "Updated Ansible last name" + - nm_replace_local_user.after.1.login_id == "ansible_local_user" + - nm_replace_local_user.after.1.remote_id_claim == "" + - nm_replace_local_user.after.1.remote_user_authorization == false + - nm_replace_local_user.after.1.reuse_limitation == 25 + - nm_replace_local_user.after.1.security_domains | length == 1 + - nm_replace_local_user.after.1.security_domains.0.name == "all" + - nm_replace_local_user.after.1.security_domains.0.roles | length == 1 + - nm_replace_local_user.after.1.security_domains.0.roles.0 == "super_admin" + - nm_replace_local_user.after.1.time_interval_limitation == 15 + - nm_replace_local_user.after.2.login_id == "admin" + - nm_replace_local_user.after.2.first_name == "admin" + - nm_replace_local_user.after.2.remote_user_authorization == false + - nm_replace_local_user.after.2.reuse_limitation == 0 + - nm_replace_local_user.after.2.security_domains | length == 1 + - nm_replace_local_user.after.2.security_domains.0.name == "all" + - nm_replace_local_user.after.2.security_domains.0.roles | length == 1 + - nm_replace_local_user.after.2.security_domains.0.roles.0 == "super_admin" + - nm_replace_local_user.after.2.time_interval_limitation == 0 + - nm_replace_local_user.before | length == 3 + - nm_replace_local_user.before.2.first_name == "admin" + - nm_replace_local_user.before.2.remote_user_authorization == false + - nm_replace_local_user.before.2.reuse_limitation == 0 + - nm_replace_local_user.before.2.security_domains | length == 1 + - nm_replace_local_user.before.2.security_domains.0.name == "all" + - nm_replace_local_user.before.2.security_domains.0.roles | length == 1 + - nm_replace_local_user.before.2.security_domains.0.roles.0 == "super_admin" + - nm_replace_local_user.before.2.time_interval_limitation == 0 + - nm_replace_local_user.before.1.email == "ansibleuser@example.com" + - nm_replace_local_user.before.1.first_name == "Ansible first name" + - nm_replace_local_user.before.1.last_name == "Ansible last name" + - nm_replace_local_user.before.1.login_id == "ansible_local_user" + - nm_replace_local_user.before.1.remote_id_claim == "ansible_remote_user" + - nm_replace_local_user.before.1.remote_user_authorization == true + - nm_replace_local_user.before.1.reuse_limitation == 20 + - nm_replace_local_user.before.1.security_domains | length == 1 + - nm_replace_local_user.before.1.security_domains.0.name == "all" + - nm_replace_local_user.before.1.security_domains.0.roles | length == 2 + - nm_replace_local_user.before.1.security_domains.0.roles.0 == "observer" + - nm_replace_local_user.before.1.security_domains.0.roles.1 == "support_engineer" + - nm_replace_local_user.before.1.time_interval_limitation == 10 + - nm_replace_local_user.before.0.login_id == "ansible_local_user_2" + - nm_replace_local_user.before.0.security_domains | length == 1 + - nm_replace_local_user.before.0.security_domains.0.name == "all" + - nm_replace_local_user.diff == [] + - nm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com"" + - nm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" + - nm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" + - nm_replace_local_user.proposed.0.login_id == "ansible_local_user" + - nm_replace_local_user.proposed.0.remote_id_claim == "" + - nm_replace_local_user.proposed.0.remote_user_authorization == false + - nm_replace_local_user.proposed.0.reuse_limitation == 25 + - nm_replace_local_user.proposed.0.security_domains | length == 1 + - nm_replace_local_user.proposed.0.security_domains.0.name == "all" + - nm_replace_local_user.proposed.0.security_domains.0.roles | length == 1 + - nm_replace_local_user.proposed.0.security_domains.0.roles.0 == "super_admin" + - nm_replace_local_user.proposed.0.time_interval_limitation == 15 # DELETE @@ -123,6 +399,9 @@ <<: *delete_local_user register: nm_delete_local_user_again +- name: Asserts for local users deletion tasks + ansible.builtin.assert: + that: # CLEAN UP - name: Ensure local users do not exist @@ -131,4 +410,5 @@ config: - login_id: ansible_local_user - login_id: ansible_local_user_2 + - login_id: ansible_local_user_3 state: deleted From 85852154a00ab795c27b6fc52de8bba97868ed62 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 11:45:29 -0400 Subject: [PATCH 101/131] [ignore] Finish integration test file for nd_local_user module. Remove Generic Class inheritence from NDConfigCollection. Clean Pydantic imports. --- plugins/module_utils/models/base.py | 2 +- plugins/module_utils/nd_config_collection.py | 33 +-- plugins/module_utils/nd_output.py | 2 +- plugins/module_utils/nd_state_machine.py | 9 +- plugins/module_utils/utils.py | 2 +- plugins/modules/nd_local_user.py | 2 +- .../targets/nd_local_user/tasks/main.yml | 267 +++++++++++++++++- 7 files changed, 275 insertions(+), 42 deletions(-) diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 79f9ec80..21fb983e 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,7 +9,7 @@ __metaclass__ = type from abc import ABC -from pydantic import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Set, Tuple, Union, Literal, Optional from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 364b8a8f..d34ca462 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -11,33 +11,30 @@ from typing import TypeVar, Generic, Optional, List, Dict, Any, Literal from copy import deepcopy from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey -# Type aliases -ModelType = TypeVar("ModelType", bound=NDBaseModel) -class NDConfigCollection(Generic[ModelType]): +class NDConfigCollection: """ Nexus Dashboard configuration collection for NDBaseModel instances. """ - def __init__(self, model_class: ModelType, items: Optional[List[ModelType]] = None): + def __init__(self, model_class: NDBaseModel, items: Optional[List[NDBaseModel]] = None): """ Initialize collection. """ - self._model_class: ModelType = model_class + self._model_class: NDBaseModel = model_class # Dual storage - self._items: List[ModelType] = [] + self._items: List[NDBaseModel] = [] self._index: Dict[IdentifierKey, int] = {} if items: for item in items: self.add(item) - def _extract_key(self, item: ModelType) -> IdentifierKey: + def _extract_key(self, item: NDBaseModel) -> IdentifierKey: """ Extract identifier key from item. """ @@ -56,7 +53,7 @@ def _rebuild_index(self) -> None: # Core Operations - def add(self, item: ModelType) -> IdentifierKey: + def add(self, item: NDBaseModel) -> IdentifierKey: """ Add item to collection (O(1) operation). """ @@ -74,14 +71,14 @@ def add(self, item: ModelType) -> IdentifierKey: return key - def get(self, key: IdentifierKey) -> Optional[ModelType]: + def get(self, key: IdentifierKey) -> Optional[NDBaseModel]: """ Get item by identifier key (O(1) operation). """ index = self._index.get(key) return self._items[index] if index is not None else None - def replace(self, item: ModelType) -> bool: + def replace(self, item: NDBaseModel) -> bool: """ Replace existing item with same identifier (O(1) operation). """ @@ -97,7 +94,7 @@ def replace(self, item: ModelType) -> bool: self._items[index] = item return True - def merge(self, item: ModelType) -> ModelType: + def merge(self, item: NDBaseModel) -> NDBaseModel: """ Merge item with existing, or add if not present. """ @@ -129,7 +126,7 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) - def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "changed"]: + def get_diff_config(self, new_item: NDBaseModel) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. """ @@ -147,7 +144,7 @@ def get_diff_config(self, new_item: ModelType) -> Literal["new", "no_diff", "cha return "no_diff" if is_subset else "changed" - def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: + def get_diff_collection(self, other: "NDConfigCollection") -> bool: """ Check if two collections differ. """ @@ -167,7 +164,7 @@ def get_diff_collection(self, other: "NDConfigCollection[ModelType]") -> bool: return False - def get_diff_identifiers(self, other: "NDConfigCollection[ModelType]") -> List[IdentifierKey]: + def get_diff_identifiers(self, other: "NDConfigCollection") -> List[IdentifierKey]: """ Get identifiers in self but not in other. """ @@ -189,7 +186,7 @@ def keys(self) -> List[IdentifierKey]: """Get all identifier keys.""" return list(self._index.keys()) - def copy(self) -> "NDConfigCollection[ModelType]": + def copy(self) -> "NDConfigCollection": """Create deep copy of collection.""" return NDConfigCollection(model_class=self._model_class, items=deepcopy(self._items)) @@ -208,7 +205,7 @@ def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: return [item.to_payload(**kwargs) for item in self._items] @classmethod - def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": + def from_ansible_config(cls, data: List[Dict], model_class: type[NDBaseModel], **kwargs) -> "NDConfigCollection": """ Create collection from Ansible config. """ @@ -216,7 +213,7 @@ def from_ansible_config(cls, data: List[Dict], model_class: type[ModelType], **k return cls(model_class=model_class, items=items) @classmethod - def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[ModelType], **kwargs) -> "NDConfigCollection[ModelType]": + def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[NDBaseModel], **kwargs) -> "NDConfigCollection": """ Create collection from API response. """ diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py index dbfc2cd2..0e5ed6ef 100644 --- a/plugins/module_utils/nd_output.py +++ b/plugins/module_utils/nd_output.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index bd86da3c..3b6c891c 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -41,17 +41,16 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.state = self.module.params["state"] # Initialize collections - self.nd_config_collection = NDConfigCollection[self.model_class] try: response_data = self.model_orchestrator.query_all() # State of configuration objects in ND before change execution - self.before = self.nd_config_collection.from_api_response(response_data=response_data, model_class=self.model_class) + self.before = NDConfigCollection.from_api_response(response_data=response_data, model_class=self.model_class) # State of current configuration objects in ND during change execution self.existing = self.before.copy() # Ongoing collection of configuration objects that were changed - self.sent = self.nd_config_collection(model_class=self.model_class) + self.sent = NDConfigCollection(model_class=self.model_class) # Collection of configuration objects given by user - self.proposed = self.nd_config_collection(model_class=self.model_class) + self.proposed = NDConfigCollection(model_class=self.model_class) for config in self.module.params.get("config", []): try: # Parse config into model diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index e09bd499..76e936bb 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index d1d871fe..56e59ad5 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -1,7 +1,7 @@ #!/usr/bin/python # -*- coding: utf-8 -*- -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml index de8ad5ed..b7f205ae 100644 --- a/tests/integration/targets/nd_local_user/tasks/main.yml +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -1,5 +1,5 @@ # Test code for the ND modules -# Copyright: (c) 2025, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -19,6 +19,7 @@ config: - login_id: ansible_local_user - login_id: ansible_local_user_2 + - login_id: ansible_local_user_3 state: deleted # CREATE @@ -217,19 +218,10 @@ cisco.nd.nd_local_user: <<: *nd_info config: - - login_id: admin - first_name: admin - remote_user_authorization: false - reuse_limitation: 0 - time_interval_limitation: 0 - security_domains: - - name: all - roles: - - super_admin - email: overrideansibleuser@example.com login_id: ansible_local_user first_name: Overridden Ansible first name - last_name: Overriden Ansible last name + last_name: Overridden Ansible last name user_password: overideansibleLocalUserPassword1% reuse_limitation: 15 time_interval_limitation: 5 @@ -239,6 +231,15 @@ - observer remote_id_claim: ansible_remote_user remote_user_authorization: true + - login_id: admin + first_name: admin + remote_user_authorization: false + reuse_limitation: 0 + time_interval_limitation: 0 + security_domains: + - name: all + roles: + - super_admin - login_id: ansible_local_user_3 user_password: ansibleLocalUser3Password1%Test security_domains: @@ -254,7 +255,7 @@ - cm_replace_local_user.after.0.login_id == "ansible_local_user_2" - cm_replace_local_user.after.0.security_domains | length == 1 - cm_replace_local_user.after.0.security_domains.0.name == "all" - - cm_replace_local_user.after.1.email == "updatedansibleuser@example.com"" + - cm_replace_local_user.after.1.email == "updatedansibleuser@example.com" - cm_replace_local_user.after.1.first_name == "Updated Ansible first name" - cm_replace_local_user.after.1.last_name == "Updated Ansible last name" - cm_replace_local_user.after.1.login_id == "ansible_local_user" @@ -301,7 +302,7 @@ - cm_replace_local_user.before.0.security_domains | length == 1 - cm_replace_local_user.before.0.security_domains.0.name == "all" - cm_replace_local_user.diff == [] - - cm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com"" + - cm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com" - cm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" - cm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" - cm_replace_local_user.proposed.0.login_id == "ansible_local_user" @@ -318,7 +319,7 @@ - nm_replace_local_user.after.0.login_id == "ansible_local_user_2" - nm_replace_local_user.after.0.security_domains | length == 1 - nm_replace_local_user.after.0.security_domains.0.name == "all" - - nm_replace_local_user.after.1.email == "updatedansibleuser@example.com"" + - nm_replace_local_user.after.1.email == "updatedansibleuser@example.com" - nm_replace_local_user.after.1.first_name == "Updated Ansible first name" - nm_replace_local_user.after.1.last_name == "Updated Ansible last name" - nm_replace_local_user.after.1.login_id == "ansible_local_user" @@ -365,7 +366,7 @@ - nm_replace_local_user.before.0.security_domains | length == 1 - nm_replace_local_user.before.0.security_domains.0.name == "all" - nm_replace_local_user.diff == [] - - nm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com"" + - nm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com" - nm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" - nm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" - nm_replace_local_user.proposed.0.login_id == "ansible_local_user" @@ -377,6 +378,161 @@ - nm_replace_local_user.proposed.0.security_domains.0.roles | length == 1 - nm_replace_local_user.proposed.0.security_domains.0.roles.0 == "super_admin" - nm_replace_local_user.proposed.0.time_interval_limitation == 15 + - nm_merge_local_user_2 is changed + - nm_merge_local_user_2.after | length == 3 + - nm_merge_local_user_2.after.0.email == "secondansibleuser@example.com" + - nm_merge_local_user_2.after.0.first_name == "Second Ansible first name" + - nm_merge_local_user_2.after.0.last_name == "Second Ansible last name" + - nm_merge_local_user_2.after.0.login_id == "ansible_local_user_2" + - nm_merge_local_user_2.after.0.remote_id_claim == "ansible_remote_user_2" + - nm_merge_local_user_2.after.0.remote_user_authorization == true + - nm_merge_local_user_2.after.0.reuse_limitation == 20 + - nm_merge_local_user_2.after.0.security_domains | length == 1 + - nm_merge_local_user_2.after.0.security_domains.0.name == "all" + - nm_merge_local_user_2.after.0.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.after.0.security_domains.0.roles.0 == "fabric_admin" + - nm_merge_local_user_2.after.0.time_interval_limitation == 10 + - nm_merge_local_user_2.after.1.email == "updatedansibleuser@example.com" + - nm_merge_local_user_2.after.1.first_name == "Updated Ansible first name" + - nm_merge_local_user_2.after.1.last_name == "Updated Ansible last name" + - nm_merge_local_user_2.after.1.login_id == "ansible_local_user" + - nm_merge_local_user_2.after.1.remote_user_authorization == false + - nm_merge_local_user_2.after.1.reuse_limitation == 25 + - nm_merge_local_user_2.after.1.security_domains | length == 1 + - nm_merge_local_user_2.after.1.security_domains.0.name == "all" + - nm_merge_local_user_2.after.1.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.after.1.security_domains.0.roles.0 == "super_admin" + - nm_merge_local_user_2.after.1.time_interval_limitation == 15 + - nm_merge_local_user_2.after.2.login_id == "admin" + - nm_merge_local_user_2.after.2.first_name == "admin" + - nm_merge_local_user_2.after.2.remote_user_authorization == false + - nm_merge_local_user_2.after.2.reuse_limitation == 0 + - nm_merge_local_user_2.after.2.security_domains | length == 1 + - nm_merge_local_user_2.after.2.security_domains.0.name == "all" + - nm_merge_local_user_2.after.2.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.after.2.security_domains.0.roles.0 == "super_admin" + - nm_merge_local_user_2.after.2.time_interval_limitation == 0 + - nm_merge_local_user_2.before | length == 3 + - nm_merge_local_user_2.before.2.first_name == "admin" + - nm_merge_local_user_2.before.2.remote_user_authorization == false + - nm_merge_local_user_2.before.2.reuse_limitation == 0 + - nm_merge_local_user_2.before.2.security_domains | length == 1 + - nm_merge_local_user_2.before.2.security_domains.0.name == "all" + - nm_merge_local_user_2.before.2.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.before.2.security_domains.0.roles.0 == "super_admin" + - nm_merge_local_user_2.before.2.time_interval_limitation == 0 + - nm_merge_local_user_2.before.1.email == "updatedansibleuser@example.com" + - nm_merge_local_user_2.before.1.first_name == "Updated Ansible first name" + - nm_merge_local_user_2.before.1.last_name == "Updated Ansible last name" + - nm_merge_local_user_2.before.1.login_id == "ansible_local_user" + - nm_merge_local_user_2.before.1.remote_user_authorization == false + - nm_merge_local_user_2.before.1.reuse_limitation == 25 + - nm_merge_local_user_2.before.1.security_domains | length == 1 + - nm_merge_local_user_2.before.1.security_domains.0.name == "all" + - nm_merge_local_user_2.before.1.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.before.1.security_domains.0.roles.0 == "super_admin" + - nm_merge_local_user_2.before.1.time_interval_limitation == 15 + - nm_merge_local_user_2.before.0.login_id == "ansible_local_user_2" + - nm_merge_local_user_2.before.0.security_domains | length == 1 + - nm_merge_local_user_2.before.0.security_domains.0.name == "all" + - nm_merge_local_user_2.diff == [] + - nm_merge_local_user_2.proposed.0.email == "secondansibleuser@example.com" + - nm_merge_local_user_2.proposed.0.first_name == "Second Ansible first name" + - nm_merge_local_user_2.proposed.0.last_name == "Second Ansible last name" + - nm_merge_local_user_2.proposed.0.login_id == "ansible_local_user_2" + - nm_merge_local_user_2.proposed.0.remote_id_claim == "ansible_remote_user_2" + - nm_merge_local_user_2.proposed.0.remote_user_authorization == true + - nm_merge_local_user_2.proposed.0.reuse_limitation == 20 + - nm_merge_local_user_2.proposed.0.security_domains | length == 1 + - nm_merge_local_user_2.proposed.0.security_domains.0.name == "all" + - nm_merge_local_user_2.proposed.0.security_domains.0.roles | length == 1 + - nm_merge_local_user_2.proposed.0.security_domains.0.roles.0 == "fabric_admin" + - nm_merge_local_user_2.proposed.0.time_interval_limitation == 10 + - nm_merge_local_user_2_again is not changed + - nm_merge_local_user_2_again.after == nm_merge_local_user_2.after + - nm_merge_local_user_2_again.diff == [] + - nm_merge_local_user_2_again.proposed == nm_merge_local_user_2.proposed + - nm_override_local_users is changed + - nm_override_local_users.after | length == 3 + - nm_override_local_users.after.0.email == "overrideansibleuser@example.com" + - nm_override_local_users.after.0.first_name == "Overridden Ansible first name" + - nm_override_local_users.after.0.last_name == "Overridden Ansible last name" + - nm_override_local_users.after.0.login_id == "ansible_local_user" + - nm_override_local_users.after.0.remote_id_claim == "ansible_remote_user" + - nm_override_local_users.after.0.remote_user_authorization == true + - nm_override_local_users.after.0.reuse_limitation == 15 + - nm_override_local_users.after.0.security_domains | length == 1 + - nm_override_local_users.after.0.security_domains.0.name == "all" + - nm_override_local_users.after.0.security_domains.0.roles | length == 1 + - nm_override_local_users.after.0.security_domains.0.roles.0 == "observer" + - nm_override_local_users.after.0.time_interval_limitation == 5 + - nm_override_local_users.after.1.login_id == "admin" + - nm_override_local_users.after.1.first_name == "admin" + - nm_override_local_users.after.1.remote_user_authorization == false + - nm_override_local_users.after.1.reuse_limitation == 0 + - nm_override_local_users.after.1.security_domains | length == 1 + - nm_override_local_users.after.1.security_domains.0.name == "all" + - nm_override_local_users.after.1.security_domains.0.roles | length == 1 + - nm_override_local_users.after.1.security_domains.0.roles.0 == "super_admin" + - nm_override_local_users.after.1.time_interval_limitation == 0 + - nm_override_local_users.after.2.login_id == "ansible_local_user_3" + - nm_override_local_users.after.2.security_domains.0.name == "all" + - nm_override_local_users.before | length == 3 + - nm_override_local_users.before.2.first_name == "admin" + - nm_override_local_users.before.2.remote_user_authorization == false + - nm_override_local_users.before.2.reuse_limitation == 0 + - nm_override_local_users.before.2.security_domains | length == 1 + - nm_override_local_users.before.2.security_domains.0.name == "all" + - nm_override_local_users.before.2.security_domains.0.roles | length == 1 + - nm_override_local_users.before.2.security_domains.0.roles.0 == "super_admin" + - nm_override_local_users.before.2.time_interval_limitation == 0 + - nm_override_local_users.before.1.email == "updatedansibleuser@example.com" + - nm_override_local_users.before.1.first_name == "Updated Ansible first name" + - nm_override_local_users.before.1.last_name == "Updated Ansible last name" + - nm_override_local_users.before.1.login_id == "ansible_local_user" + - nm_override_local_users.before.1.remote_user_authorization == false + - nm_override_local_users.before.1.reuse_limitation == 25 + - nm_override_local_users.before.1.security_domains | length == 1 + - nm_override_local_users.before.1.security_domains.0.name == "all" + - nm_override_local_users.before.1.security_domains.0.roles | length == 1 + - nm_override_local_users.before.1.security_domains.0.roles.0 == "super_admin" + - nm_override_local_users.before.1.time_interval_limitation == 15 + - nm_override_local_users.before.0.email == "secondansibleuser@example.com" + - nm_override_local_users.before.0.first_name == "Second Ansible first name" + - nm_override_local_users.before.0.last_name == "Second Ansible last name" + - nm_override_local_users.before.0.login_id == "ansible_local_user_2" + - nm_override_local_users.before.0.remote_id_claim == "ansible_remote_user_2" + - nm_override_local_users.before.0.remote_user_authorization == true + - nm_override_local_users.before.0.reuse_limitation == 20 + - nm_override_local_users.before.0.security_domains | length == 1 + - nm_override_local_users.before.0.security_domains.0.name == "all" + - nm_override_local_users.before.0.security_domains.0.roles | length == 1 + - nm_override_local_users.before.0.security_domains.0.roles.0 == "fabric_admin" + - nm_override_local_users.before.0.time_interval_limitation == 10 + - nm_override_local_users.diff == [] + - nm_override_local_users.proposed.0.email == "overrideansibleuser@example.com" + - nm_override_local_users.proposed.0.first_name == "Overridden Ansible first name" + - nm_override_local_users.proposed.0.last_name == "Overridden Ansible last name" + - nm_override_local_users.proposed.0.login_id == "ansible_local_user" + - nm_override_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - nm_override_local_users.proposed.0.remote_user_authorization == true + - nm_override_local_users.proposed.0.reuse_limitation == 15 + - nm_override_local_users.proposed.0.security_domains | length == 1 + - nm_override_local_users.proposed.0.security_domains.0.name == "all" + - nm_override_local_users.proposed.0.security_domains.0.roles | length == 1 + - nm_override_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - nm_override_local_users.proposed.0.time_interval_limitation == 5 + - nm_override_local_users.proposed.1.login_id == "admin" + - nm_override_local_users.proposed.1.first_name == "admin" + - nm_override_local_users.proposed.1.remote_user_authorization == false + - nm_override_local_users.proposed.1.reuse_limitation == 0 + - nm_override_local_users.proposed.1.security_domains | length == 1 + - nm_override_local_users.proposed.1.security_domains.0.name == "all" + - nm_override_local_users.proposed.1.security_domains.0.roles | length == 1 + - nm_override_local_users.proposed.1.security_domains.0.roles.0 == "super_admin" + - nm_override_local_users.proposed.1.time_interval_limitation == 0 + - nm_override_local_users.proposed.2.login_id == "ansible_local_user_3" + - nm_override_local_users.proposed.2.security_domains.0.name == "all" # DELETE @@ -402,6 +558,87 @@ - name: Asserts for local users deletion tasks ansible.builtin.assert: that: + - cm_delete_local_user is changed + - cm_delete_local_user.after | length == 2 + - cm_delete_local_user.after.0.login_id == "ansible_local_user_3" + - cm_delete_local_user.after.0.security_domains.0.name == "all" + - cm_delete_local_user.after.1.login_id == "admin" + - cm_delete_local_user.after.1.first_name == "admin" + - cm_delete_local_user.after.1.remote_user_authorization == false + - cm_delete_local_user.after.1.reuse_limitation == 0 + - cm_delete_local_user.after.1.security_domains | length == 1 + - cm_delete_local_user.after.1.security_domains.0.name == "all" + - cm_delete_local_user.after.1.security_domains.0.roles | length == 1 + - cm_delete_local_user.after.1.security_domains.0.roles.0 == "super_admin" + - cm_delete_local_user.after.1.time_interval_limitation == 0 + - cm_delete_local_user.before | length == 3 + - cm_delete_local_user.before.0.email == "overrideansibleuser@example.com" + - cm_delete_local_user.before.0.first_name == "Overridden Ansible first name" + - cm_delete_local_user.before.0.last_name == "Overridden Ansible last name" + - cm_delete_local_user.before.0.login_id == "ansible_local_user" + - cm_delete_local_user.before.0.remote_id_claim == "ansible_remote_user" + - cm_delete_local_user.before.0.remote_user_authorization == true + - cm_delete_local_user.before.0.reuse_limitation == 15 + - cm_delete_local_user.before.0.security_domains | length == 1 + - cm_delete_local_user.before.0.security_domains.0.name == "all" + - cm_delete_local_user.before.0.security_domains.0.roles | length == 1 + - cm_delete_local_user.before.0.security_domains.0.roles.0 == "observer" + - cm_delete_local_user.before.0.time_interval_limitation == 5 + - cm_delete_local_user.before.1.login_id == "ansible_local_user_3" + - cm_delete_local_user.before.1.security_domains.0.name == "all" + - cm_delete_local_user.before.2.first_name == "admin" + - cm_delete_local_user.before.2.remote_user_authorization == false + - cm_delete_local_user.before.2.reuse_limitation == 0 + - cm_delete_local_user.before.2.security_domains | length == 1 + - cm_delete_local_user.before.2.security_domains.0.name == "all" + - cm_delete_local_user.before.2.security_domains.0.roles | length == 1 + - cm_delete_local_user.before.2.security_domains.0.roles.0 == "super_admin" + - cm_delete_local_user.before.2.time_interval_limitation == 0 + - cm_delete_local_user.diff == [] + - cm_delete_local_user.proposed.0.login_id == "ansible_local_user" + - nm_delete_local_user is changed + - nm_delete_local_user.after | length == 2 + - nm_delete_local_user.after.0.login_id == "ansible_local_user_3" + - nm_delete_local_user.after.0.security_domains.0.name == "all" + - nm_delete_local_user.after.1.login_id == "admin" + - nm_delete_local_user.after.1.first_name == "admin" + - nm_delete_local_user.after.1.remote_user_authorization == false + - nm_delete_local_user.after.1.reuse_limitation == 0 + - nm_delete_local_user.after.1.security_domains | length == 1 + - nm_delete_local_user.after.1.security_domains.0.name == "all" + - nm_delete_local_user.after.1.security_domains.0.roles | length == 1 + - nm_delete_local_user.after.1.security_domains.0.roles.0 == "super_admin" + - nm_delete_local_user.after.1.time_interval_limitation == 0 + - nm_delete_local_user.before | length == 3 + - nm_delete_local_user.before.0.email == "overrideansibleuser@example.com" + - nm_delete_local_user.before.0.first_name == "Overridden Ansible first name" + - nm_delete_local_user.before.0.last_name == "Overridden Ansible last name" + - nm_delete_local_user.before.0.login_id == "ansible_local_user" + - nm_delete_local_user.before.0.remote_id_claim == "ansible_remote_user" + - nm_delete_local_user.before.0.remote_user_authorization == true + - nm_delete_local_user.before.0.reuse_limitation == 15 + - nm_delete_local_user.before.0.security_domains | length == 1 + - nm_delete_local_user.before.0.security_domains.0.name == "all" + - nm_delete_local_user.before.0.security_domains.0.roles | length == 1 + - nm_delete_local_user.before.0.security_domains.0.roles.0 == "observer" + - nm_delete_local_user.before.0.time_interval_limitation == 5 + - nm_delete_local_user.before.1.login_id == "ansible_local_user_3" + - nm_delete_local_user.before.1.security_domains.0.name == "all" + - nm_delete_local_user.before.2.first_name == "admin" + - nm_delete_local_user.before.2.remote_user_authorization == false + - nm_delete_local_user.before.2.reuse_limitation == 0 + - nm_delete_local_user.before.2.security_domains | length == 1 + - nm_delete_local_user.before.2.security_domains.0.name == "all" + - nm_delete_local_user.before.2.security_domains.0.roles | length == 1 + - nm_delete_local_user.before.2.security_domains.0.roles.0 == "super_admin" + - nm_delete_local_user.before.2.time_interval_limitation == 0 + - nm_delete_local_user.diff == [] + - nm_delete_local_user.proposed.0.login_id == "ansible_local_user" + - nm_delete_local_user_again is not changed + - nm_delete_local_user_again.after == nm_delete_local_user.after + - nm_delete_local_user_again.before == nm_delete_local_user.after + - nm_delete_local_user_again.diff == [] + - nm_delete_local_user_again.proposed == nm_delete_local_user.proposed # CLEAN UP - name: Ensure local users do not exist From de87bb188ba36a8c1467af4bee6afbe831acd8a9 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 12:33:23 -0400 Subject: [PATCH 102/131] [ignore] Fix sanity issues by enhancing pydantic_compat.py. Fix Black formatting. --- plugins/module_utils/models/local_user.py | 1 - plugins/module_utils/nd_config_collection.py | 3 +-- plugins/module_utils/pydantic_compat.py | 14 ++++++++++++++ 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 0320d3c1..38f2b5d2 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -23,7 +23,6 @@ from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel from ansible_collections.cisco.nd.plugins.module_utils.constants import NDConstantMapping - USER_ROLES_MAPPING = NDConstantMapping( { "fabric_admin": "fabric-admin", diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index d34ca462..fa574ca2 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -8,13 +8,12 @@ __metaclass__ = type -from typing import TypeVar, Generic, Optional, List, Dict, Any, Literal +from typing import Optional, List, Dict, Any, Literal from copy import deepcopy from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey - class NDConfigCollection: """ Nexus Dashboard configuration collection for NDBaseModel instances. diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py index 4456018a..2596d852 100644 --- a/plugins/module_utils/pydantic_compat.py +++ b/plugins/module_utils/pydantic_compat.py @@ -192,6 +192,20 @@ def decorator(func): return decorator + # Fallback: FieldSerializationInfo placeholder class that does nothing + class FieldSerializationInfo: + """Pydantic FieldSerializationInfo fallback when pydantic is not available.""" + + def __init__(self, **kwargs): + pass + + # Fallback: SerializationInfo placeholder class that does nothing + class SerializationInfo: + """Pydantic SerializationInfo fallback when pydantic is not available.""" + + def __init__(self, **kwargs): + pass + else: HAS_PYDANTIC = True # pylint: disable=invalid-name PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name From d78bae4a4552c4d9c8be0cdb77fb7cfe2ffaa30c Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 12:48:36 -0400 Subject: [PATCH 103/131] [ignore] Remove all TODO comments. --- plugins/module_utils/endpoints/base.py | 4 +--- plugins/module_utils/endpoints/v1/infra_aaa_local_users.py | 1 - plugins/module_utils/nd_config_collection.py | 1 - plugins/module_utils/nd_state_machine.py | 2 -- plugins/module_utils/orchestrators/base.py | 2 -- plugins/module_utils/utils.py | 1 - plugins/modules/nd_local_user.py | 3 --- 7 files changed, 1 insertion(+), 13 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index 2d214878..e5eb8c72 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -131,9 +131,7 @@ def verb(self) -> HttpVerbEnum: None """ - - # TODO: Maybe to be modifed to be more Pydantic (low priority) - # TODO: Maybe change function's name (low priority) + # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py index d1013e24..9235afb6 100644 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py @@ -31,7 +31,6 @@ class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseEndpoint): /api/v1/infra/aaa/localUsers endpoint. """ - # TODO: Remove it base_path: Final = NDBasePath.nd_infra_aaa("localUsers") @property diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index fa574ca2..abcfc0f7 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -42,7 +42,6 @@ def _extract_key(self, item: NDBaseModel) -> IdentifierKey: except Exception as e: raise ValueError(f"Failed to extract identifier: {e}") from e - # TODO: optimize it -> only needed for delete method (low priority) def _rebuild_index(self) -> None: """Rebuild index from scratch (O(n) operation).""" self._index.clear() diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 3b6c891c..3840b360 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -27,7 +27,6 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest """ Initialize the Network Resource Module. """ - # TODO: Revisit Module initialization and configuration with rest_send self.module = module self.nd_module = NDModule(self.module) @@ -37,7 +36,6 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest # Configuration self.model_orchestrator = model_orchestrator(sender=self.nd_module) self.model_class = self.model_orchestrator.model_class - # TODO: Revisit these class variables when udpating Module intialization and configuration (low priority) self.state = self.module.params["state"] # Initialize collections diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 1a8b4f10..ddcb7569 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -34,11 +34,9 @@ class NDBaseOrchestrator(BaseModel): query_all_endpoint: Type[NDBaseEndpoint] # NOTE: Module Field is always required - # TODO: Replace it with future sender (low priority) sender: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - # TODO: Explore new ways to make them even more general -> e.g., create a general API operation function (low priority) def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: try: api_endpoint = self.create_endpoint() diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 76e936bb..2e62c6eb 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -56,7 +56,6 @@ def issubset(subset: Any, superset: Any) -> bool: return True -# TODO: Might not necessary with Pydantic validation and serialization built-in methods (see models/local_user) def remove_unwanted_keys(data: Dict, unwanted_keys: List[Union[str, List[str]]]) -> Dict: """Remove unwanted keys from dict (supports nested paths).""" data = deepcopy(data) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 56e59ad5..f5efea03 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -199,9 +199,6 @@ def main(): ) # Manage state - # TODO: return module output class object: - # output = nd_state_machine.manage_state() - # module.exit_json(**output) nd_state_machine.manage_state() module.exit_json(**nd_state_machine.output.format()) From 109ff9e96a74c1a06b8c95efd94f39f3d26348f9 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 13:47:23 -0400 Subject: [PATCH 104/131] [ignore] Update endpoints to match latest nd42_integration branch. Update orchestrators accordingly. --- .../endpoints/v1/infra/aaa_local_users.py | 209 +++++++++ .../endpoints/v1/infra_aaa_local_users.py | 178 ------- plugins/module_utils/orchestrators/base.py | 14 +- .../module_utils/orchestrators/local_user.py | 24 +- ..._endpoints_api_v1_infra_aaa_local_users.py | 437 ++++++++++++++++++ 5 files changed, 665 insertions(+), 197 deletions(-) create mode 100644 plugins/module_utils/endpoints/v1/infra/aaa_local_users.py delete mode 100644 plugins/module_utils/endpoints/v1/infra_aaa_local_users.py create mode 100644 tests/unit/module_utils/endpoints/test_endpoints_api_v1_infra_aaa_local_users.py diff --git a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py new file mode 100644 index 00000000..925c5548 --- /dev/null +++ b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py @@ -0,0 +1,209 @@ +# Copyright: (c) 2026, Gaspard Micol (@gmicol) +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) +""" +ND Infra AAA Local Users endpoint models. + +This module contains endpoint definitions for AAA Local Users operations in the ND Infra API. +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Literal + +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import Field +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.base_path import BasePath + +from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum +from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey + + +class _EpInfraAaaLocalUsersBase(LoginIdMixin, NDEndpointBaseModel): + """ + Base class for ND Infra AAA Local Users endpoints. + + Provides common functionality for all HTTP methods on the /api/v1/infra/aaa/localUsers endpoint. + """ + + @property + def path(self) -> str: + """ + # Summary + + Build the endpoint path. + + ## Returns + + - Complete endpoint path string, optionally including login_id + """ + if self.login_id is not None: + return BasePath.path("aaa", "localUsers", self.login_id) + return BasePath.path("aaa", "localUsers") + + def set_identifiers(self, identifier: IdentifierKey = None): + self.login_id = identifier + + +class EpInfraAaaLocalUsersGet(_EpInfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users GET Endpoint + + ## Description + + Endpoint to retrieve local users from the ND Infra AAA service. + Optionally retrieve a specific local user by login_id. + + ## Path + + - /api/v1/infra/aaa/localUsers + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - GET + + ## Usage + + ```python + # Get all local users + request = EpApiV1InfraAaaLocalUsersGet() + path = request.path + verb = request.verb + + # Get specific local user + request = EpApiV1InfraAaaLocalUsersGet() + request.login_id = "admin" + path = request.path + verb = request.verb + ``` + """ + + class_name: Literal["EpInfraAaaLocalUsersGet"] = Field(default="EpInfraAaaLocalUsersGet", frozen=True, description="Class name for backward compatibility") + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.GET + + +class EpInfraAaaLocalUsersPost(_EpInfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users POST Endpoint + + ## Description + + Endpoint to create a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers + + ## Verb + + - POST + + ## Usage + + ```python + request = EpApiV1InfraAaaLocalUsersPost() + path = request.path + verb = request.verb + ``` + """ + + class_name: Literal["EpInfraAaaLocalUsersPost"] = Field( + default="EpInfraAaaLocalUsersPost", frozen=True, description="Class name for backward compatibility" + ) + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.POST + + +class EpInfraAaaLocalUsersPut(_EpInfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users PUT Endpoint + + ## Description + + Endpoint to update a local user in the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - PUT + + ## Usage + + ```python + request = EpApiV1InfraAaaLocalUsersPut() + request.login_id = "admin" + path = request.path + verb = request.verb + ``` + """ + + class_name: Literal["EpInfraAaaLocalUsersPut"] = Field(default="EpInfraAaaLocalUsersPut", frozen=True, description="Class name for backward compatibility") + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.PUT + + +class EpInfraAaaLocalUsersDelete(_EpInfraAaaLocalUsersBase): + """ + # Summary + + ND Infra AAA Local Users DELETE Endpoint + + ## Description + + Endpoint to delete a local user from the ND Infra AAA service. + + ## Path + + - /api/v1/infra/aaa/localUsers/{login_id} + + ## Verb + + - DELETE + + ## Usage + + ```python + request = EpApiV1InfraAaaLocalUsersDelete() + request.login_id = "admin" + path = request.path + verb = request.verb + ``` + """ + + class_name: Literal["EpInfraAaaLocalUsersDelete"] = Field( + default="EpInfraAaaLocalUsersDelete", frozen=True, description="Class name for backward compatibility" + ) + + @property + def verb(self) -> HttpVerbEnum: + """Return the HTTP verb for this endpoint.""" + return HttpVerbEnum.DELETE diff --git a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py deleted file mode 100644 index 9235afb6..00000000 --- a/plugins/module_utils/endpoints/v1/infra_aaa_local_users.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@allenrobel) -# Copyright: (c) 2026, Gaspard Micol (@gmicol) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) -""" -ND Infra AAA LocalUsers endpoint models. - -This module contains endpoint definitions for LocalUsers-related operations -in the ND Infra AAA API. -""" - -from __future__ import absolute_import, division, print_function - -__metaclass__ = type - -from typing import Literal, Final -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.enums import VerbEnum -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint, NDBasePath -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import Field -from ansible_collections.cisco.nd.plugins.module_utils.types import IdentifierKey - - -class _V1InfraAaaLocalUsersBase(LoginIdMixin, NDBaseEndpoint): - """ - Base class for ND Infra AAA Local Users endpoints. - - Provides common functionality for all HTTP methods on the - /api/v1/infra/aaa/localUsers endpoint. - """ - - base_path: Final = NDBasePath.nd_infra_aaa("localUsers") - - @property - def path(self) -> str: - """ - # Summary - - Build the endpoint path. - - ## Returns - - - Complete endpoint path string, optionally including login_id - """ - if self.login_id is not None: - return NDBasePath.nd_infra_aaa("localUsers", self.login_id) - return self.base_path - - def set_identifiers(self, identifier: IdentifierKey = None): - self.login_id = identifier - - -class V1InfraAaaLocalUsersGet(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users GET Endpoint - - ## Description - - Endpoint to retrieve local users from the ND Infra AAA service. - Optionally retrieve a specific local user by login_id. - - ## Path - - - /api/v1/infra/aaa/localUsers - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - GET - """ - - class_name: Literal["V1InfraAaaLocalUsersGet"] = Field( - default="V1InfraAaaLocalUsersGet", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.GET - - -class V1InfraAaaLocalUsersPost(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users POST Endpoint - - ## Description - - Endpoint to create a local user in the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers - - ## Verb - - - POST - """ - - class_name: Literal["V1InfraAaaLocalUsersPost"] = Field( - default="V1InfraAaaLocalUsersPost", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.POST - - -class V1InfraAaaLocalUsersPut(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users PUT Endpoint - - ## Description - - Endpoint to update a local user in the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - PUT - """ - - class_name: Literal["V1InfraAaaLocalUsersPut"] = Field( - default="V1InfraAaaLocalUsersPut", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.PUT - - -class V1InfraAaaLocalUsersDelete(_V1InfraAaaLocalUsersBase): - """ - # Summary - - ND Infra AAA Local Users DELETE Endpoint - - ## Description - - Endpoint to delete a local user from the ND Infra AAA service. - - ## Path - - - /api/v1/infra/aaa/localUsers/{login_id} - - ## Verb - - - DELETE - """ - - class_name: Literal["V1InfraAaaLocalUsersDelete"] = Field( - default="V1InfraAaaLocalUsersDelete", - description="Class name for backward compatibility", - frozen=True, - ) - - @property - def verb(self) -> VerbEnum: - """Return the HTTP verb for this endpoint.""" - return VerbEnum.DELETE diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index ddcb7569..651a9d30 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -12,7 +12,7 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType @@ -27,11 +27,11 @@ class NDBaseOrchestrator(BaseModel): model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] # NOTE: if not defined by subclasses, return an error as they are required - create_endpoint: Type[NDBaseEndpoint] - update_endpoint: Type[NDBaseEndpoint] - delete_endpoint: Type[NDBaseEndpoint] - query_one_endpoint: Type[NDBaseEndpoint] - query_all_endpoint: Type[NDBaseEndpoint] + create_endpoint: Type[NDEndpointBaseModel] + update_endpoint: Type[NDEndpointBaseModel] + delete_endpoint: Type[NDEndpointBaseModel] + query_one_endpoint: Type[NDEndpointBaseModel] + query_all_endpoint: Type[NDEndpointBaseModel] # NOTE: Module Field is always required sender: NDModule @@ -70,7 +70,7 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: - result = self.sender.query_obj(self.query_all_endpoint.path) + result = self.sender.query_obj(self.query_all_endpoint().path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 5e52a00b..db7bbfdc 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -12,31 +12,31 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( - V1InfraAaaLocalUsersPost, - V1InfraAaaLocalUsersPut, - V1InfraAaaLocalUsersDelete, - V1InfraAaaLocalUsersGet, +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( + EpInfraAaaLocalUsersPost, + EpInfraAaaLocalUsersPut, + EpInfraAaaLocalUsersDelete, + EpInfraAaaLocalUsersGet, ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel - create_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseEndpoint] = V1InfraAaaLocalUsersGet + create_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPost + update_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPut + delete_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersDelete + query_one_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet + query_all_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet def query_all(self) -> ResponseType: """ Custom query_all action to extract 'localusers' from response. """ try: - result = self.sender.query_obj(self.query_all_endpoint.base_path) + result = self.sender.query_obj(self.query_all_endpoint().path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/tests/unit/module_utils/endpoints/test_endpoints_api_v1_infra_aaa_local_users.py b/tests/unit/module_utils/endpoints/test_endpoints_api_v1_infra_aaa_local_users.py new file mode 100644 index 00000000..71cfd9b6 --- /dev/null +++ b/tests/unit/module_utils/endpoints/test_endpoints_api_v1_infra_aaa_local_users.py @@ -0,0 +1,437 @@ +# Copyright: (c) 2026, Allen Robel (@arobel) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +""" +Unit tests for infra_aaa_local_users.py + +Tests the ND Infra AAA endpoint classes +""" + +from __future__ import absolute_import, annotations, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +import pytest # pylint: disable=unused-import +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( + EpInfraAaaLocalUsersDelete, + EpInfraAaaLocalUsersGet, + EpInfraAaaLocalUsersPost, + EpInfraAaaLocalUsersPut, +) +from ansible_collections.cisco.nd.plugins.module_utils.enums import HttpVerbEnum +from ansible_collections.cisco.nd.tests.unit.module_utils.common_utils import ( + does_not_raise, +) + +# ============================================================================= +# Test: EpInfraAaaLocalUsersGet +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00010(): + """ + # Summary + + Verify EpInfraAaaLocalUsersGet basic instantiation + + ## Test + + - Instance can be created + - class_name is set correctly + - verb is GET + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.__init__() + - EpInfraAaaLocalUsersGet.verb + - EpInfraAaaLocalUsersGet.class_name + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet() + assert instance.class_name == "EpInfraAaaLocalUsersGet" + assert instance.verb == HttpVerbEnum.GET + + +def test_endpoints_api_v1_infra_aaa_00020(): + """ + # Summary + + Verify EpInfraAaaLocalUsersGet path without login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers" when login_id is None + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.path + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet() + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers" + + +def test_endpoints_api_v1_infra_aaa_00030(): + """ + # Summary + + Verify EpInfraAaaLocalUsersGet path with login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers/admin" when login_id is set + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.path + - EpInfraAaaLocalUsersGet.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet() + instance.login_id = "admin" + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers/admin" + + +def test_endpoints_api_v1_infra_aaa_00040(): + """ + # Summary + + Verify EpInfraAaaLocalUsersGet login_id can be set at instantiation + + ## Test + + - login_id can be provided during instantiation + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.__init__() + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet(login_id="testuser") + assert instance.login_id == "testuser" + assert instance.path == "/api/v1/infra/aaa/localUsers/testuser" + + +# ============================================================================= +# Test: EpInfraAaaLocalUsersPost +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00100(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPost basic instantiation + + ## Test + + - Instance can be created + - class_name is set correctly + - verb is POST + + ## Classes and Methods + + - EpInfraAaaLocalUsersPost.__init__() + - EpInfraAaaLocalUsersPost.verb + - EpInfraAaaLocalUsersPost.class_name + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPost() + assert instance.class_name == "EpInfraAaaLocalUsersPost" + assert instance.verb == HttpVerbEnum.POST + + +def test_endpoints_api_v1_infra_aaa_00110(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPost path + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers" for POST + + ## Classes and Methods + + - EpInfraAaaLocalUsersPost.path + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPost() + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers" + + +def test_endpoints_api_v1_infra_aaa_00120(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPost path with login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers/admin" when login_id is set + + ## Classes and Methods + + - EpInfraAaaLocalUsersPost.path + - EpInfraAaaLocalUsersPost.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPost() + instance.login_id = "admin" + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers/admin" + + +# ============================================================================= +# Test: EpInfraAaaLocalUsersPut +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00200(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPut basic instantiation + + ## Test + + - Instance can be created + - class_name is set correctly + - verb is PUT + + ## Classes and Methods + + - EpInfraAaaLocalUsersPut.__init__() + - EpInfraAaaLocalUsersPut.verb + - EpInfraAaaLocalUsersPut.class_name + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPut() + assert instance.class_name == "EpInfraAaaLocalUsersPut" + assert instance.verb == HttpVerbEnum.PUT + + +def test_endpoints_api_v1_infra_aaa_00210(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPut path with login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers/admin" when login_id is set + + ## Classes and Methods + + - EpInfraAaaLocalUsersPut.path + - EpInfraAaaLocalUsersPut.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPut() + instance.login_id = "admin" + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers/admin" + + +def test_endpoints_api_v1_infra_aaa_00220(): + """ + # Summary + + Verify EpInfraAaaLocalUsersPut with complex login_id + + ## Test + + - login_id with special characters is handled correctly + + ## Classes and Methods + + - EpInfraAaaLocalUsersPut.path + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersPut(login_id="user-name_123") + assert instance.path == "/api/v1/infra/aaa/localUsers/user-name_123" + + +# ============================================================================= +# Test: EpInfraAaaLocalUsersDelete +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00300(): + """ + # Summary + + Verify EpInfraAaaLocalUsersDelete basic instantiation + + ## Test + + - Instance can be created + - class_name is set correctly + - verb is DELETE + + ## Classes and Methods + + - EpInfraAaaLocalUsersDelete.__init__() + - EpInfraAaaLocalUsersDelete.verb + - EpInfraAaaLocalUsersDelete.class_name + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersDelete() + assert instance.class_name == "EpInfraAaaLocalUsersDelete" + assert instance.verb == HttpVerbEnum.DELETE + + +def test_endpoints_api_v1_infra_aaa_00310(): + """ + # Summary + + Verify EpInfraAaaLocalUsersDelete path with login_id + + ## Test + + - path returns "/api/v1/infra/aaa/localUsers/admin" when login_id is set + + ## Classes and Methods + + - EpInfraAaaLocalUsersDelete.path + - EpInfraAaaLocalUsersDelete.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersDelete() + instance.login_id = "admin" + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers/admin" + + +def test_endpoints_api_v1_infra_aaa_00320(): + """ + # Summary + + Verify EpInfraAaaLocalUsersDelete without login_id + + ## Test + + - path returns base path when login_id is None + + ## Classes and Methods + + - EpInfraAaaLocalUsersDelete.path + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersDelete() + result = instance.path + assert result == "/api/v1/infra/aaa/localUsers" + + +# ============================================================================= +# Test: All HTTP methods on same endpoint +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00400(): + """ + # Summary + + Verify all HTTP methods work correctly on same resource + + ## Test + + - GET, POST, PUT, DELETE all return correct paths for same login_id + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet + - EpInfraAaaLocalUsersPost + - EpInfraAaaLocalUsersPut + - EpInfraAaaLocalUsersDelete + """ + login_id = "testuser" + + with does_not_raise(): + get_ep = EpInfraAaaLocalUsersGet(login_id=login_id) + post_ep = EpInfraAaaLocalUsersPost(login_id=login_id) + put_ep = EpInfraAaaLocalUsersPut(login_id=login_id) + delete_ep = EpInfraAaaLocalUsersDelete(login_id=login_id) + + # All should have same path when login_id is set + expected_path = "/api/v1/infra/aaa/localUsers/testuser" + assert get_ep.path == expected_path + assert post_ep.path == expected_path + assert put_ep.path == expected_path + assert delete_ep.path == expected_path + + # But different verbs + assert get_ep.verb == HttpVerbEnum.GET + assert post_ep.verb == HttpVerbEnum.POST + assert put_ep.verb == HttpVerbEnum.PUT + assert delete_ep.verb == HttpVerbEnum.DELETE + + +# ============================================================================= +# Test: Pydantic validation +# ============================================================================= + + +def test_endpoints_api_v1_infra_aaa_00500(): + """ + # Summary + + Verify Pydantic validation for login_id + + ## Test + + - Empty string is rejected for login_id (min_length=1) + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.__init__() + """ + with pytest.raises(ValueError): + EpInfraAaaLocalUsersGet(login_id="") + + +def test_endpoints_api_v1_infra_aaa_00510(): + """ + # Summary + + Verify login_id can be None + + ## Test + + - login_id accepts None as valid value + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.__init__() + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet(login_id=None) + assert instance.login_id is None + + +def test_endpoints_api_v1_infra_aaa_00520(): + """ + # Summary + + Verify login_id can be modified after instantiation + + ## Test + + - login_id can be changed after object creation + + ## Classes and Methods + + - EpInfraAaaLocalUsersGet.login_id + """ + with does_not_raise(): + instance = EpInfraAaaLocalUsersGet() + assert instance.login_id is None + instance.login_id = "newuser" + assert instance.login_id == "newuser" + assert instance.path == "/api/v1/infra/aaa/localUsers/newuser" From 028a123905359a3a56deaeb20e005a82f74b761d Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 12 Mar 2026 14:33:06 -0400 Subject: [PATCH 105/131] [ignore] Update pydantic_compat.py to support extra Pydantic methods and classes. --- .../module_utils/common/pydantic_compat.py | 57 ++++- plugins/module_utils/endpoints/base.py | 1 - .../endpoints/v1/infra/aaa_local_users.py | 12 +- plugins/module_utils/models/base.py | 2 +- plugins/module_utils/models/local_user.py | 2 +- plugins/module_utils/nd_state_machine.py | 2 +- plugins/module_utils/orchestrators/base.py | 5 +- .../module_utils/orchestrators/local_user.py | 3 +- plugins/module_utils/pydantic_compat.py | 238 ------------------ .../module_utils/endpoints/test_base_model.py | 5 - 10 files changed, 61 insertions(+), 266 deletions(-) delete mode 100644 plugins/module_utils/pydantic_compat.py diff --git a/plugins/module_utils/common/pydantic_compat.py b/plugins/module_utils/common/pydantic_compat.py index e1550a18..b26559d2 100644 --- a/plugins/module_utils/common/pydantic_compat.py +++ b/plugins/module_utils/common/pydantic_compat.py @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) +# Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -34,10 +33,6 @@ # fmt: on # isort: on -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - import traceback from typing import TYPE_CHECKING, Any, Callable, Union @@ -51,11 +46,16 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, + model_serializer, field_validator, model_validator, validator, + computed_field, + FieldSerializationInfo, + SerializationInfo, ) HAS_PYDANTIC = True # pylint: disable=invalid-name @@ -71,11 +71,16 @@ Field, PydanticExperimentalWarning, StrictBool, + SecretStr, ValidationError, field_serializer, + model_serializer, field_validator, model_validator, validator, + computed_field, + FieldSerializationInfo, + SerializationInfo, ) except ImportError: HAS_PYDANTIC = False # pylint: disable=invalid-name @@ -127,6 +132,15 @@ def decorator(func): return decorator + # Fallback: model_serializer decorator that does nothing + def model_serializer(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic model_serializer fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + # Fallback: field_validator decorator that does nothing def field_validator(*args, **kwargs) -> Callable[..., Any]: # pylint: disable=unused-argument,invalid-name """Pydantic field_validator fallback when pydantic is not available.""" @@ -136,6 +150,15 @@ def decorator(func): return decorator + # Fallback: computed_field decorator that does nothing + def computed_field(*args, **kwargs): # pylint: disable=unused-argument + """Pydantic computed_field fallback when pydantic is not available.""" + + def decorator(func): + return func + + return decorator + # Fallback: AfterValidator that returns the function unchanged def AfterValidator(func): # pylint: disable=invalid-name """Pydantic AfterValidator fallback when pydantic is not available.""" @@ -152,6 +175,9 @@ def BeforeValidator(func): # pylint: disable=invalid-name # Fallback: StrictBool StrictBool = bool + # Fallback: SecretStr + SecretStr = str + # Fallback: ValidationError class ValidationError(Exception): """ @@ -183,6 +209,20 @@ def decorator(func): return decorator + # Fallback: FieldSerializationInfo placeholder class that does nothing + class FieldSerializationInfo: + """Pydantic FieldSerializationInfo fallback when pydantic is not available.""" + + def __init__(self, **kwargs): + pass + + # Fallback: SerializationInfo placeholder class that does nothing + class SerializationInfo: + """Pydantic SerializationInfo fallback when pydantic is not available.""" + + def __init__(self, **kwargs): + pass + else: HAS_PYDANTIC = True # pylint: disable=invalid-name PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name @@ -234,10 +274,15 @@ def main(): "PYDANTIC_IMPORT_ERROR", "PydanticExperimentalWarning", "StrictBool", + "SecretStr", "ValidationError", "field_serializer", + "model_serializer", "field_validator", "model_validator", "require_pydantic", "validator", + "computed_field", + "FieldSerializationInfo", + "SerializationInfo", ] diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index e5eb8c72..c3d7f4e1 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -133,6 +133,5 @@ def verb(self) -> HttpVerbEnum: """ # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration - @abstractmethod def set_identifiers(self, identifier: IdentifierKey = None): pass diff --git a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py index 925c5548..26660622 100644 --- a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py @@ -10,15 +10,7 @@ from __future__ import absolute_import, annotations, division, print_function -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from typing import Literal - +from typing import Literal from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import Field from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.endpoints.mixins import LoginIdMixin @@ -49,7 +41,7 @@ def path(self) -> str: if self.login_id is not None: return BasePath.path("aaa", "localUsers", self.login_id) return BasePath.path("aaa", "localUsers") - + def set_identifiers(self, identifier: IdentifierKey = None): self.login_id = identifier diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 21fb983e..07b6ee28 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -9,7 +9,7 @@ __metaclass__ = type from abc import ABC -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Set, Tuple, Union, Literal, Optional from ansible_collections.cisco.nd.plugins.module_utils.utils import issubset diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index 38f2b5d2..a47a4a0a 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import List, Dict, Any, Optional, ClassVar, Literal -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ( +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ( Field, SecretStr, model_serializer, diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 3840b360..efed3517 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -9,7 +9,7 @@ __metaclass__ = type from typing import Type -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import ValidationError +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 651a9d30..1f4e3e69 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,7 +8,7 @@ __metaclass__ = type -from ansible_collections.cisco.nd.plugins.module_utils.pydantic_compat import BaseModel, ConfigDict +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule @@ -70,7 +70,8 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: try: - result = self.sender.query_obj(self.query_all_endpoint().path) + api_endpoint = self.query_all_endpoint() + result = self.sender.query_obj(api_endpoint.path) return result or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index db7bbfdc..689ba9dc 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -36,7 +36,8 @@ def query_all(self) -> ResponseType: Custom query_all action to extract 'localusers' from response. """ try: - result = self.sender.query_obj(self.query_all_endpoint().path) + api_endpoint = self.query_all_endpoint() + result = self.sender.query_obj(api_endpoint.path) return result.get("localusers", []) or [] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/pydantic_compat.py b/plugins/module_utils/pydantic_compat.py deleted file mode 100644 index 2596d852..00000000 --- a/plugins/module_utils/pydantic_compat.py +++ /dev/null @@ -1,238 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright: (c) 2026, Allen Robel (@arobel) - -# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) - -# pylint: disable=too-few-public-methods -""" -Pydantic compatibility layer. - -This module provides a single location for Pydantic imports with fallback -implementations when Pydantic is not available. This ensures consistent -behavior across all modules and follows the DRY principle. -""" - -from __future__ import absolute_import, annotations, division, print_function - -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - -import traceback -from typing import TYPE_CHECKING, Any, Callable, Union - -if TYPE_CHECKING: - # Type checkers always see the real Pydantic types - from pydantic import ( - AfterValidator, - BaseModel, - BeforeValidator, - ConfigDict, - Field, - PydanticExperimentalWarning, - StrictBool, - SecretStr, - ValidationError, - field_serializer, - model_serializer, - field_validator, - model_validator, - validator, - computed_field, - FieldSerializationInfo, - SerializationInfo, - ) -else: - # Runtime: try to import, with fallback - try: - from pydantic import ( - AfterValidator, - BaseModel, - BeforeValidator, - ConfigDict, - Field, - PydanticExperimentalWarning, - StrictBool, - SecretStr, - ValidationError, - field_serializer, - model_serializer, - field_validator, - model_validator, - validator, - computed_field, - FieldSerializationInfo, - SerializationInfo, - ) - except ImportError: - HAS_PYDANTIC = False # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR: Union[str, None] = traceback.format_exc() # pylint: disable=invalid-name - - # Fallback: Minimal BaseModel replacement - class BaseModel: - """Fallback BaseModel when pydantic is not available.""" - - model_config = {"validate_assignment": False, "use_enum_values": False} - - def __init__(self, **kwargs): - """Accept keyword arguments and set them as attributes.""" - for key, value in kwargs.items(): - setattr(self, key, value) - - def model_dump(self, exclude_none: bool = False, exclude_defaults: bool = False) -> dict: # pylint: disable=unused-argument - """Return a dictionary of field names and values. - - Args: - exclude_none: If True, exclude fields with None values - exclude_defaults: Accepted for API compatibility but not implemented in fallback - """ - result = {} - for key, value in self.__dict__.items(): - if exclude_none and value is None: - continue - result[key] = value - return result - - # Fallback: ConfigDict that does nothing - def ConfigDict(**kwargs) -> dict: # pylint: disable=unused-argument,invalid-name - """Pydantic ConfigDict fallback when pydantic is not available.""" - return kwargs - - # Fallback: Field that does nothing - def Field(**kwargs) -> Any: # pylint: disable=unused-argument,invalid-name - """Pydantic Field fallback when pydantic is not available.""" - if "default_factory" in kwargs: - return kwargs["default_factory"]() - return kwargs.get("default") - - # Fallback: field_serializer decorator that does nothing - def field_serializer(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic field_serializer fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: model_serializer decorator that does nothing - def model_serializer(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic model_serializer fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: field_validator decorator that does nothing - def field_validator(*args, **kwargs) -> Callable[..., Any]: # pylint: disable=unused-argument,invalid-name - """Pydantic field_validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: computed_field decorator that does nothing - def computed_field(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic computed_field fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: AfterValidator that returns the function unchanged - def AfterValidator(func): # pylint: disable=invalid-name - """Pydantic AfterValidator fallback when pydantic is not available.""" - return func - - # Fallback: BeforeValidator that returns the function unchanged - def BeforeValidator(func): # pylint: disable=invalid-name - """Pydantic BeforeValidator fallback when pydantic is not available.""" - return func - - # Fallback: PydanticExperimentalWarning - PydanticExperimentalWarning = Warning - - # Fallback: StrictBool - StrictBool = bool - - # Fallback: SecretStr - SecretStr = str - - # Fallback: ValidationError - class ValidationError(Exception): - """ - Pydantic ValidationError fallback when pydantic is not available. - """ - - def __init__(self, message="A custom error occurred."): - self.message = message - super().__init__(self.message) - - def __str__(self): - return f"ValidationError: {self.message}" - - # Fallback: model_validator decorator that does nothing - def model_validator(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic model_validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: validator decorator that does nothing - def validator(*args, **kwargs): # pylint: disable=unused-argument - """Pydantic validator fallback when pydantic is not available.""" - - def decorator(func): - return func - - return decorator - - # Fallback: FieldSerializationInfo placeholder class that does nothing - class FieldSerializationInfo: - """Pydantic FieldSerializationInfo fallback when pydantic is not available.""" - - def __init__(self, **kwargs): - pass - - # Fallback: SerializationInfo placeholder class that does nothing - class SerializationInfo: - """Pydantic SerializationInfo fallback when pydantic is not available.""" - - def __init__(self, **kwargs): - pass - - else: - HAS_PYDANTIC = True # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name - -# Set HAS_PYDANTIC for when TYPE_CHECKING is True -if TYPE_CHECKING: - HAS_PYDANTIC = True # pylint: disable=invalid-name - PYDANTIC_IMPORT_ERROR = None # pylint: disable=invalid-name - -__all__ = [ - "AfterValidator", - "BaseModel", - "BeforeValidator", - "ConfigDict", - "Field", - "HAS_PYDANTIC", - "PYDANTIC_IMPORT_ERROR", - "PydanticExperimentalWarning", - "StrictBool", - "SecretStr", - "ValidationError", - "field_serializer", - "model_serializer", - "field_validator", - "model_validator", - "validator", - "computed_field", - "FieldSerializationInfo", - "SerializationInfo", -] diff --git a/tests/unit/module_utils/endpoints/test_base_model.py b/tests/unit/module_utils/endpoints/test_base_model.py index a14da9d8..ce9d1e8d 100644 --- a/tests/unit/module_utils/endpoints/test_base_model.py +++ b/tests/unit/module_utils/endpoints/test_base_model.py @@ -99,7 +99,6 @@ def test_base_model_00200(): with pytest.raises(TypeError, match=match): class _BadEndpoint(NDEndpointBaseModel): - @property def path(self) -> str: return "/api/v1/test/bad" @@ -132,7 +131,6 @@ def test_base_model_00300(): """ class _MiddleABC(NDEndpointBaseModel, ABC): - @property @abstractmethod def extra(self) -> str: @@ -182,7 +180,6 @@ def test_base_model_00310(): """ class _MiddleABC2(NDEndpointBaseModel, ABC): - @property @abstractmethod def extra(self) -> str: @@ -192,7 +189,6 @@ def extra(self) -> str: with pytest.raises(TypeError, match=match): class _BadConcreteFromMiddle(_MiddleABC2): - @property def path(self) -> str: return "/api/v1/test/bad-middle" @@ -229,7 +225,6 @@ def test_base_model_00400(): with pytest.raises(TypeError, match=r'Literal\["_ExampleEndpoint"\]') as exc_info: class _ExampleEndpoint(NDEndpointBaseModel): - @property def path(self) -> str: return "/api/v1/test/example" From 5985f50d922beec12118d441b3cebd36284dcdc1 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Fri, 13 Mar 2026 13:52:45 -0400 Subject: [PATCH 106/131] Model updates for bgp_asn and site_id --- .../nd_manage_fabric/manage_fabric_ibgp.py | 49 ++++++++++++++----- 1 file changed, 36 insertions(+), 13 deletions(-) diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py index c4d93e27..a0baa78d 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py @@ -73,6 +73,13 @@ ``` """ +# Regex from OpenAPI schema: bgpAsn accepts plain integers (1-4294967295) and +# dotted four-byte ASN notation (1-65535).(0-65535) +_BGP_ASN_RE = re.compile( + r"^(([1-9]{1}[0-9]{0,8}|[1-3]{1}[0-9]{1,9}|[4]{1}([0-1]{1}[0-9]{8}|[2]{1}([0-8]{1}[0-9]{7}|[9]{1}([0-3]{1}[0-9]{6}|[4]{1}([0-8]{1}[0-9]{5}|[9]{1}([0-5]{1}[0-9]{4}|[6]{1}([0-6]{1}[0-9]{3}|[7]{1}([0-1]{1}[0-9]{2}|[2]{1}([0-8]{1}[0-9]{1}|[9]{1}[0-5]{1})))))))))|([1-5]\d{4}|[1-9]\d{0,3}|6[0-4]\d{3}|65[0-4]\d{2}|655[0-2]\d|6553[0-5])(\.([1-5]\d{4}|[1-9]\d{0,3}|6[0-4]\d{3}|65[0-4]\d{2}|655[0-2]\d|6553[0-5]|0))?)$" +) + + class LocationModel(NDNestedModel): """ # Summary @@ -535,7 +542,7 @@ class VxlanIbgpManagementModel(NDNestedModel): type: Literal[FabricTypeEnum.VXLAN_IBGP] = Field(description="Fabric management type", default=FabricTypeEnum.VXLAN_IBGP) # Core iBGP Configuration - bgp_asn: str = Field(alias="bgpAsn", description="BGP Autonomous System Number") + bgp_asn: str = Field(alias="bgpAsn", description="BGP Autonomous System Number 1-4294967295 | 1-65535[.0-65535]") site_id: Optional[str] = Field(alias="siteId", description="Site identifier for the fabric", default="") # Missing Fields @@ -1134,17 +1141,21 @@ def validate_bgp_asn(cls, value: str) -> str: Validate BGP ASN format and range. - ## Raises + ## Description - - `ValueError` - If ASN is not numeric or outside valid range (1-4294967295) - """ - if not value.isdigit(): - raise ValueError(f"BGP ASN must be numeric, got: {value}") + Accepts either a plain integer ASN (1-4294967295) or dotted four-byte + ASN notation in the form ``MMMM.NNNN`` where both parts are in the + range 1-65535 / 0-65535 respectively. - asn_int = int(value) - if not (1 <= asn_int <= 4294967295): - raise ValueError(f"BGP ASN must be between 1 and 4294967295, got: {asn_int}") + ## Raises + - `ValueError` - If the value does not match the expected ASN format + """ + if not _BGP_ASN_RE.match(value): + raise ValueError( + f"Invalid BGP ASN '{value}'. " + "Expected a plain integer (1-4294967295) or dotted notation (1-65535.0-65535)." + ) return value @field_validator("site_id") @@ -1159,12 +1170,17 @@ def validate_site_id(cls, value: str) -> str: - `ValueError` - If site ID is not numeric or outside valid range """ + + # If value is empty string (default), skip validation (will be set to BGP ASN later if still empty) + if value == "": + return value + if not value.isdigit(): raise ValueError(f"Site ID must be numeric, got: {value}") site_id_int = int(value) - if not (1 <= site_id_int <= 65535): - raise ValueError(f"Site ID must be between 1 and 65535, got: {site_id_int}") + if not (1 <= site_id_int <= 281474976710655): + raise ValueError(f"Site ID must be between 1 and 281474976710655, got: {site_id_int}") return value @@ -1278,9 +1294,16 @@ def validate_fabric_consistency(self) -> 'FabricModel': if self.management is not None: self.management.name = self.name - # Propgate BGP ASN to Site ID management model if not set + # Propagate BGP ASN to Site ID management model if not set if self.management is not None and self.management.site_id == "": - self.management.site_id = self.management.bgp_asn # Default site ID to BGP ASN if not provided + bgp_asn = self.management.bgp_asn + if "." in bgp_asn: + # asdot notation (High.Low) → convert to asplain decimal: (High × 65536) + Low + high, low = bgp_asn.split(".") + self.management.site_id = str(int(high) * 65536 + int(low)) + else: + # Already plain decimal + self.management.site_id = bgp_asn # Validate telemetry consistency if self.telemetry_collection and self.telemetry_settings is None: From 58f78dff6fa9ac3b60d88d1bf8324915ae33cfed Mon Sep 17 00:00:00 2001 From: mwiebe Date: Fri, 13 Mar 2026 15:06:30 -0400 Subject: [PATCH 107/131] Update model defaults, enums and tests --- .../models/nd_manage_fabric/enums.py | 87 +++++++++++++++++++ .../nd_manage_fabric/manage_fabric_ibgp.py | 49 ++++++++--- .../nd_manage_fabric/tasks/fabric_ibgp.yaml | 12 +-- .../targets/nd_manage_fabric/vars/main.yaml | 2 +- 4 files changed, 132 insertions(+), 18 deletions(-) diff --git a/plugins/module_utils/models/nd_manage_fabric/enums.py b/plugins/module_utils/models/nd_manage_fabric/enums.py index aa084802..1e19ee92 100644 --- a/plugins/module_utils/models/nd_manage_fabric/enums.py +++ b/plugins/module_utils/models/nd_manage_fabric/enums.py @@ -114,3 +114,90 @@ class LinkStateRoutingProtocolEnum(str, Enum): OSPF = "ospf" ISIS = "isis" + + +class CoppPolicyEnum(str, Enum): + """ + # Summary + + Enumeration for CoPP policy options. + """ + + DENSE = "dense" + LENIENT = "lenient" + MODERATE = "moderate" + STRICT = "strict" + MANUAL = "manual" + + +class FabricInterfaceTypeEnum(str, Enum): + """ + # Summary + + Enumeration for fabric interface types. + """ + + P2P = "p2p" + UNNUMBERED = "unNumbered" + + +class GreenfieldDebugFlagEnum(str, Enum): + """ + # Summary + + Enumeration for greenfield debug flag. + """ + + ENABLE = "enable" + DISABLE = "disable" + + +class IsisLevelEnum(str, Enum): + """ + # Summary + + Enumeration for IS-IS levels. + """ + + LEVEL_1 = "level-1" + LEVEL_2 = "level-2" + + +class SecurityGroupStatusEnum(str, Enum): + """ + # Summary + + Enumeration for security group status. + """ + + ENABLED = "enabled" + ENABLED_STRICT = "enabledStrict" + ENABLED_LOOSE = "enabledLoose" + ENABLE_PENDING = "enablePending" + ENABLE_PENDING_STRICT = "enablePendingStrict" + ENABLE_PENDING_LOOSE = "enablePendingLoose" + DISABLE_PENDING = "disablePending" + DISABLED = "disabled" + + +class StpRootOptionEnum(str, Enum): + """ + # Summary + + Enumeration for STP root options. + """ + + RPVST_PLUS = "rpvst+" + MST = "mst" + UNMANAGED = "unmanaged" + + +class VpcPeerKeepAliveOptionEnum(str, Enum): + """ + # Summary + + Enumeration for vPC peer keep-alive options. + """ + + LOOPBACK = "loopback" + MANAGEMENT = "management" diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py index a0baa78d..7a43ef90 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py @@ -31,6 +31,13 @@ OverlayModeEnum, ReplicationModeEnum, LinkStateRoutingProtocolEnum, + CoppPolicyEnum, + FabricInterfaceTypeEnum, + GreenfieldDebugFlagEnum, + IsisLevelEnum, + SecurityGroupStatusEnum, + StpRootOptionEnum, + VpcPeerKeepAliveOptionEnum, ) @@ -621,7 +628,7 @@ class VxlanIbgpManagementModel(NDNestedModel): default=LinkStateRoutingProtocolEnum.OSPF ) ospf_area_id: str = Field(alias="ospfAreaId", description="OSPF area ID", default="0.0.0.0") - fabric_interface_type: str = Field(alias="fabricInterfaceType", description="Fabric interface type", default="p2p") + fabric_interface_type: FabricInterfaceTypeEnum = Field(alias="fabricInterfaceType", description="Fabric interface type", default=FabricInterfaceTypeEnum.P2P) # Advanced Features target_subnet_mask: int = Field(alias="targetSubnetMask", description="Target subnet mask", ge=24, le=31, default=30) @@ -647,10 +654,10 @@ class VxlanIbgpManagementModel(NDNestedModel): description="Enable native VLAN on vPC peer link", default=False ) - vpc_peer_keep_alive_option: str = Field( + vpc_peer_keep_alive_option: VpcPeerKeepAliveOptionEnum = Field( alias="vpcPeerKeepAliveOption", description="vPC peer keep-alive option", - default="loopback" + default=VpcPeerKeepAliveOptionEnum.MANAGEMENT ) vpc_auto_recovery_timer: int = Field( alias="vpcAutoRecoveryTimer", @@ -714,7 +721,7 @@ class VxlanIbgpManagementModel(NDNestedModel): # Management Settings nxapi: bool = Field(description="Enable NX-API", default=False) - nxapi_http: bool = Field(alias="nxapiHttp", description="Enable NX-API HTTP", default=True) + nxapi_http: bool = Field(alias="nxapiHttp", description="Enable NX-API HTTP", default=False) nxapi_https_port: int = Field(alias="nxapiHttpsPort", description="NX-API HTTPS port", ge=1, le=65535, default=443) nxapi_http_port: int = Field(alias="nxapiHttpPort", description="NX-API HTTP port", ge=1, le=65535, default=80) @@ -906,7 +913,7 @@ class VxlanIbgpManagementModel(NDNestedModel): ospf_authentication_key: str = Field(alias="ospfAuthenticationKey", description="OSPF authentication key", default="") # IS-IS - isis_level: str = Field(alias="isisLevel", description="IS-IS level", default="level-2") + isis_level: IsisLevelEnum = Field(alias="isisLevel", description="IS-IS level", default=IsisLevelEnum.LEVEL_2) isis_area_number: str = Field(alias="isisAreaNumber", description="IS-IS area number", default="0001") isis_point_to_point: bool = Field(alias="isisPointToPoint", description="IS-IS point-to-point", default=True) isis_authentication: bool = Field(alias="isisAuthentication", description="Enable IS-IS authentication", default=False) @@ -985,7 +992,7 @@ class VxlanIbgpManagementModel(NDNestedModel): security_group_tag_preprovision: bool = Field( alias="securityGroupTagPreprovision", description="Enable SGT preprovision", default=False ) - security_group_status: str = Field(alias="securityGroupStatus", description="Security group status", default="enabled") + security_group_status: SecurityGroupStatusEnum = Field(alias="securityGroupStatus", description="Security group status", default=SecurityGroupStatusEnum.DISABLED) # Queuing / QoS default_queuing_policy: bool = Field(alias="defaultQueuingPolicy", description="Enable default queuing policy", default=False) @@ -1025,9 +1032,9 @@ class VxlanIbgpManagementModel(NDNestedModel): ptp_vlan_id: int = Field(alias="ptpVlanId", description="PTP VLAN ID", default=2) # STP - stp_root_option: str = Field(alias="stpRootOption", description="STP root option", default="mst") - stp_vlan_range: str = Field(alias="stpVlanRange", description="STP VLAN range", default="") - mst_instance_range: str = Field(alias="mstInstanceRange", description="MST instance range", default="0-3,5,7-9") + stp_root_option: StpRootOptionEnum = Field(alias="stpRootOption", description="STP root option", default=StpRootOptionEnum.UNMANAGED) + stp_vlan_range: str = Field(alias="stpVlanRange", description="STP VLAN range", default="1-3967") + mst_instance_range: str = Field(alias="mstInstanceRange", description="MST instance range", default="0") stp_bridge_priority: int = Field(alias="stpBridgePriority", description="STP bridge priority", default=0) # MPLS Handoff @@ -1097,7 +1104,7 @@ class VxlanIbgpManagementModel(NDNestedModel): anycast_border_gateway_advertise_physical_ip: bool = Field( alias="anycastBorderGatewayAdvertisePhysicalIp", description="Anycast border gateway advertise physical IP", default=False ) - greenfield_debug_flag: str = Field(alias="greenfieldDebugFlag", description="Greenfield debug flag", default="enable") + greenfield_debug_flag: GreenfieldDebugFlagEnum = Field(alias="greenfieldDebugFlag", description="Greenfield debug flag", default=GreenfieldDebugFlagEnum.DISABLE) interface_statistics_load_interval: int = Field( alias="interfaceStatisticsLoadInterval", description="Interface statistics load interval", default=10 ) @@ -1116,7 +1123,7 @@ class VxlanIbgpManagementModel(NDNestedModel): alias="strictConfigComplianceMode", description="Enable strict config compliance mode", default=False ) advanced_ssh_option: bool = Field(alias="advancedSshOption", description="Enable advanced SSH option", default=False) - copp_policy: str = Field(alias="coppPolicy", description="CoPP policy", default="dense") + copp_policy: CoppPolicyEnum = Field(alias="coppPolicy", description="CoPP policy", default=CoppPolicyEnum.STRICT) power_redundancy_mode: str = Field(alias="powerRedundancyMode", description="Power redundancy mode", default="redundant") host_interface_admin_state: bool = Field( alias="hostInterfaceAdminState", description="Host interface admin state", default=True @@ -1133,6 +1140,26 @@ class VxlanIbgpManagementModel(NDNestedModel): alias="allowSmartSwitchOnboarding", description="Allow smart switch onboarding", default=False ) + # Hypershield / Connectivity + connectivity_domain_name: Optional[str] = Field( + alias="connectivityDomainName", description="Domain name to connect to Hypershield", default=None + ) + hypershield_connectivity_proxy_server: Optional[str] = Field( + alias="hypershieldConnectivityProxyServer", + description="IPv4 address, IPv6 address, or DNS name of the proxy server for Hypershield communication", + default=None + ) + hypershield_connectivity_proxy_server_port: Optional[int] = Field( + alias="hypershieldConnectivityProxyServerPort", + description="Proxy port number for communication with Hypershield", + default=None + ) + hypershield_connectivity_source_intf: Optional[str] = Field( + alias="hypershieldConnectivitySourceIntf", + description="Loopback interface on smart switch for communication with Hypershield", + default=None + ) + @field_validator("bgp_asn") @classmethod def validate_bgp_asn(cls, value: str) -> str: diff --git a/tests/integration/targets/nd_manage_fabric/tasks/fabric_ibgp.yaml b/tests/integration/targets/nd_manage_fabric/tasks/fabric_ibgp.yaml index e03294e1..30b77c59 100644 --- a/tests/integration/targets/nd_manage_fabric/tasks/fabric_ibgp.yaml +++ b/tests/integration/targets/nd_manage_fabric/tasks/fabric_ibgp.yaml @@ -739,12 +739,12 @@ success_msg: "✓ SNMP Trap correctly enabled" tags: [test_replaced, test_replaced_validation] -- name: "VALIDATION 2: Verify Greenfield Debug Flag was enabled" +- name: "VALIDATION 2: Verify Greenfield Debug Flag was disabled" assert: that: - - replaced_fabric_config.management.greenfieldDebugFlag == "enable" - fail_msg: "Greenfield Debug Flag validation failed. Expected: enable, Actual: {{ replaced_fabric_config.management.greenfieldDebugFlag }}" - success_msg: "✓ Greenfield Debug Flag correctly enabled" + - replaced_fabric_config.management.greenfieldDebugFlag == "disable" + fail_msg: "Greenfield Debug Flag validation failed. Expected: disable, Actual: {{ replaced_fabric_config.management.greenfieldDebugFlag }}" + success_msg: "✓ Greenfield Debug Flag correctly disabled" tags: [test_replaced, test_replaced_validation] - name: "VALIDATION 2: Verify NXAPI HTTP was enabled" @@ -1138,9 +1138,9 @@ - name: "TEST SUMMARY: Display test results" debug: msg: | - ======================================== + ======================================================== TEST SUMMARY for cisco.nd.nd_manage_fabric_ibgp module: - ======================================== + ======================================================== ✓ TEST 1: STATE MERGED - Create fabric: {{ 'PASSED' if merged_result_1 is changed else 'FAILED' }} - Idempotency: {{ 'PASSED' if merged_result_2 is not changed else 'FAILED' }} diff --git a/tests/integration/targets/nd_manage_fabric/vars/main.yaml b/tests/integration/targets/nd_manage_fabric/vars/main.yaml index 11db15a5..d673cb7d 100644 --- a/tests/integration/targets/nd_manage_fabric/vars/main.yaml +++ b/tests/integration/targets/nd_manage_fabric/vars/main.yaml @@ -16,7 +16,7 @@ common_fabric_config: telemetry_collection: false management: type: vxlanIbgp - bgp_asn: "65001" + bgp_asn: "65001.55" site_id: "65001" target_subnet_mask: 30 anycast_gateway_mac: "2020.0000.00aa" From 39c443e490616be945b9978891f2ab5d54204390 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Fri, 13 Mar 2026 17:34:11 -0400 Subject: [PATCH 108/131] Add ebgp and external fabric models --- .../models/nd_manage_fabric/enums.py | 48 + .../nd_manage_fabric/manage_fabric_ebgp.py | 829 ++++++++++++++++++ .../manage_fabric_external.py | 522 +++++++++++ .../nd_manage_fabric/manage_fabric_ibgp.py | 119 +-- 4 files changed, 1407 insertions(+), 111 deletions(-) create mode 100644 plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py create mode 100644 plugins/module_utils/models/nd_manage_fabric/manage_fabric_external.py diff --git a/plugins/module_utils/models/nd_manage_fabric/enums.py b/plugins/module_utils/models/nd_manage_fabric/enums.py index 1e19ee92..4004b2fb 100644 --- a/plugins/module_utils/models/nd_manage_fabric/enums.py +++ b/plugins/module_utils/models/nd_manage_fabric/enums.py @@ -31,9 +31,12 @@ class FabricTypeEnum(str, Enum): ## Values - `VXLAN_IBGP` - VXLAN fabric with iBGP overlay + - `VXLAN_EBGP` - VXLAN fabric with eBGP overlay """ VXLAN_IBGP = "vxlanIbgp" + VXLAN_EBGP = "vxlanEbgp" + VXLAN_EXTERNAL = "vxlanExternal" class AlertSuspendEnum(str, Enum): @@ -201,3 +204,48 @@ class VpcPeerKeepAliveOptionEnum(str, Enum): LOOPBACK = "loopback" MANAGEMENT = "management" + + +class DhcpProtocolVersionEnum(str, Enum): + """ + # Summary + + Enumeration for DHCP protocol version options. + """ + + DHCPV4 = "dhcpv4" + DHCPV6 = "dhcpv6" + + +class PowerRedundancyModeEnum(str, Enum): + """ + # Summary + + Enumeration for power redundancy mode options. + """ + + REDUNDANT = "redundant" + COMBINED = "combined" + INPUT_SRC_REDUNDANT = "inputSrcRedundant" + + +class BgpAsModeEnum(str, Enum): + """ + # Summary + + Enumeration for eBGP BGP AS mode options. + """ + + MULTI_AS = "multiAS" + SAME_TIER_AS = "sameTierAS" + + +class FirstHopRedundancyProtocolEnum(str, Enum): + """ + # Summary + + Enumeration for first-hop redundancy protocol options. + """ + + HSRP = "hsrp" + VRRP = "vrrp" diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py new file mode 100644 index 00000000..7098f656 --- /dev/null +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py @@ -0,0 +1,829 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +import re +from typing import List, Dict, Any, Optional, ClassVar, Literal + +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ( + BaseModel, + ConfigDict, + Field, + field_validator, + model_validator, +) +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.enums import ( + FabricTypeEnum, + AlertSuspendEnum, + LicenseTierEnum, + OverlayModeEnum, + ReplicationModeEnum, + CoppPolicyEnum, + GreenfieldDebugFlagEnum, + VpcPeerKeepAliveOptionEnum, + BgpAsModeEnum, + FirstHopRedundancyProtocolEnum, +) +# Re-use shared nested models from the iBGP module +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import ( + LocationModel, + NetflowExporterModel, + NetflowRecordModel, + NetflowMonitorModel, + NetflowSettingsModel, + BootstrapSubnetModel, + TelemetryFlowCollectionModel, + TelemetryMicroburstModel, + TelemetryAnalysisSettingsModel, + TelemetryEnergyManagementModel, + TelemetryNasExportSettingsModel, + TelemetryNasModel, + TelemetrySettingsModel, + ExternalStreamingSettingsModel, +) + + +""" +# Comprehensive Pydantic models for eBGP VXLAN fabric management via Nexus Dashboard + +This module provides Pydantic models for creating, updating, and deleting +eBGP VXLAN fabrics through the Nexus Dashboard Fabric Controller (NDFC) API. + +## Models Overview + +- `VxlanEbgpManagementModel` - eBGP VXLAN specific management settings +- `FabricEbgpModel` - Complete fabric creation model for eBGP fabrics +- `FabricEbgpDeleteModel` - Fabric deletion model + +## Usage + +```python +# Create a new eBGP VXLAN fabric +fabric_data = { + "name": "MyEbgpFabric", + "management": { + "type": "vxlanEbgp", + "bgpAsnAutoAllocation": True, + "bgpAsnRange": "65000-65535" + } +} +fabric = FabricEbgpModel(**fabric_data) +``` +""" + +# Regex from OpenAPI schema: bgpAsn accepts plain integers (1-4294967295) and +# dotted four-byte ASN notation (1-65535).(0-65535) +_BGP_ASN_RE = re.compile( + r"^(([1-9]{1}[0-9]{0,8}|[1-3]{1}[0-9]{1,9}|[4]{1}([0-1]{1}[0-9]{8}|[2]{1}([0-8]{1}[0-9]{7}|[9]{1}([0-3]{1}[0-9]{6}|[4]{1}([0-8]{1}[0-9]{5}|[9]{1}([0-5]{1}[0-9]{4}|[6]{1}([0-6]{1}[0-9]{3}|[7]{1}([0-1]{1}[0-9]{2}|[2]{1}([0-8]{1}[0-9]{1}|[9]{1}[0-5]{1})))))))))|([1-5]\d{4}|[1-9]\d{0,3}|6[0-4]\d{3}|65[0-4]\d{2}|655[0-2]\d|6553[0-5])(\.([1-5]\d{4}|[1-9]\d{0,3}|6[0-4]\d{3}|65[0-4]\d{2}|655[0-2]\d|6553[0-5]|0))?)$" +) + + +class VxlanEbgpManagementModel(NDNestedModel): + """ + # Summary + + Comprehensive eBGP VXLAN fabric management configuration. + + This model contains all settings specific to eBGP VXLAN fabric types including + overlay configuration, BGP AS allocation, multicast settings, and advanced features. + + ## Raises + + - `ValueError` - If BGP ASN, VLAN ranges, or IP ranges are invalid + - `TypeError` - If required string fields are not provided + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + # Fabric Type (required for discriminated union) + type: Literal[FabricTypeEnum.VXLAN_EBGP] = Field(description="Fabric management type", default=FabricTypeEnum.VXLAN_EBGP) + + # Core eBGP Configuration + bgp_asn: Optional[str] = Field( + alias="bgpAsn", + description="BGP Autonomous System Number 1-4294967295 | 1-65535[.0-65535]. Optional when bgpAsnAutoAllocation is True.", + default=None + ) + site_id: Optional[str] = Field(alias="siteId", description="Site identifier for the fabric. Defaults to Fabric ASN.", default="") + bgp_as_mode: BgpAsModeEnum = Field( + alias="bgpAsMode", + description="BGP AS mode: multiAS assigns unique AS per leaf tier, sameTierAS assigns same AS within a tier", + default=BgpAsModeEnum.MULTI_AS + ) + bgp_asn_auto_allocation: bool = Field( + alias="bgpAsnAutoAllocation", + description="Enable automatic BGP ASN allocation from bgpAsnRange", + default=True + ) + bgp_asn_range: Optional[str] = Field( + alias="bgpAsnRange", + description="BGP ASN range for automatic allocation (e.g., '65000-65535')", + default=None + ) + bgp_allow_as_in_num: int = Field( + alias="bgpAllowAsInNum", + description="Number of times BGP allows AS-path that contains local AS", + default=1 + ) + bgp_max_path: int = Field(alias="bgpMaxPath", description="Maximum number of BGP equal-cost paths", default=4) + bgp_underlay_failure_protect: bool = Field( + alias="bgpUnderlayFailureProtect", + description="Enable BGP underlay failure protection", + default=False + ) + auto_configure_ebgp_evpn_peering: bool = Field( + alias="autoConfigureEbgpEvpnPeering", + description="Automatically configure eBGP EVPN peering between spine and leaf", + default=True + ) + allow_leaf_same_as: bool = Field( + alias="allowLeafSameAs", + description="Allow leaf switches to have the same BGP AS number", + default=False + ) + assign_ipv4_to_loopback0: bool = Field( + alias="assignIpv4ToLoopback0", + description="Assign IPv4 address to loopback0 interface", + default=True + ) + evpn: bool = Field(description="Enable EVPN control plane", default=True) + route_map_tag: int = Field(alias="routeMapTag", description="Route map tag for redistribution", default=12345) + disable_route_map_tag: bool = Field( + alias="disableRouteMapTag", + description="Disable route map tag usage", + default=False + ) + leaf_bgp_as: Optional[str] = Field( + alias="leafBgpAs", + description="BGP AS number for leaf switches (used with sameTierAS mode)", + default=None + ) + border_bgp_as: Optional[str] = Field( + alias="borderBgpAs", + description="BGP AS number for border switches", + default=None + ) + super_spine_bgp_as: Optional[str] = Field( + alias="superSpineBgpAs", + description="BGP AS number for super-spine switches", + default=None + ) + + # Propagated from FabricEbgpModel + name: Optional[str] = Field(description="Fabric name", min_length=1, max_length=64, default="") + + # Network Addressing + bgp_loopback_id: int = Field(alias="bgpLoopbackId", description="BGP loopback interface ID", ge=0, le=1023, default=0) + bgp_loopback_ip_range: str = Field(alias="bgpLoopbackIpRange", description="BGP loopback IP range", default="10.2.0.0/22") + bgp_loopback_ipv6_range: str = Field(alias="bgpLoopbackIpv6Range", description="BGP loopback IPv6 range", default="fd00::a02:0/119") + nve_loopback_id: int = Field(alias="nveLoopbackId", description="NVE loopback interface ID", ge=0, le=1023, default=1) + nve_loopback_ip_range: str = Field(alias="nveLoopbackIpRange", description="NVE loopback IP range", default="10.3.0.0/22") + nve_loopback_ipv6_range: str = Field(alias="nveLoopbackIpv6Range", description="NVE loopback IPv6 range", default="fd00::a03:0/118") + anycast_loopback_id: int = Field(alias="anycastLoopbackId", description="Anycast loopback ID", default=10) + anycast_rendezvous_point_ip_range: str = Field( + alias="anycastRendezvousPointIpRange", + description="Anycast RP IP range", + default="10.254.254.0/24" + ) + ipv6_anycast_rendezvous_point_ip_range: str = Field( + alias="ipv6AnycastRendezvousPointIpRange", + description="IPv6 anycast RP IP range", + default="fd00::254:254:0/118" + ) + intra_fabric_subnet_range: str = Field( + alias="intraFabricSubnetRange", + description="Intra-fabric subnet range", + default="10.4.0.0/16" + ) + + # VLAN and VNI Ranges + l2_vni_range: str = Field(alias="l2VniRange", description="Layer 2 VNI range", default="30000-49000") + l3_vni_range: str = Field(alias="l3VniRange", description="Layer 3 VNI range", default="50000-59000") + network_vlan_range: str = Field(alias="networkVlanRange", description="Network VLAN range", default="2300-2999") + vrf_vlan_range: str = Field(alias="vrfVlanRange", description="VRF VLAN range", default="2000-2299") + + # Overlay Configuration + overlay_mode: OverlayModeEnum = Field(alias="overlayMode", description="Overlay configuration mode", default=OverlayModeEnum.CLI) + replication_mode: ReplicationModeEnum = Field( + alias="replicationMode", + description="Multicast replication mode", + default=ReplicationModeEnum.MULTICAST + ) + multicast_group_subnet: str = Field(alias="multicastGroupSubnet", description="Multicast group subnet", default="239.1.1.0/25") + auto_generate_multicast_group_address: bool = Field( + alias="autoGenerateMulticastGroupAddress", + description="Auto-generate multicast group addresses", + default=False + ) + underlay_multicast_group_address_limit: int = Field( + alias="underlayMulticastGroupAddressLimit", + description="Underlay multicast group address limit", + ge=1, + le=255, + default=128 + ) + tenant_routed_multicast: bool = Field(alias="tenantRoutedMulticast", description="Enable tenant routed multicast", default=False) + tenant_routed_multicast_ipv6: bool = Field( + alias="tenantRoutedMulticastIpv6", + description="Enable tenant routed multicast IPv6", + default=False + ) + first_hop_redundancy_protocol: FirstHopRedundancyProtocolEnum = Field( + alias="firstHopRedundancyProtocol", + description="First-hop redundancy protocol for tenant networks", + default=FirstHopRedundancyProtocolEnum.HSRP + ) + + # Multicast / Rendezvous Point + rendezvous_point_count: int = Field( + alias="rendezvousPointCount", + description="Number of spines acting as Rendezvous-Points", + default=2 + ) + rendezvous_point_loopback_id: int = Field(alias="rendezvousPointLoopbackId", description="RP loopback ID", default=254) + rendezvous_point_mode: str = Field(alias="rendezvousPointMode", description="Multicast RP mode", default="asm") + phantom_rendezvous_point_loopback_id1: int = Field(alias="phantomRendezvousPointLoopbackId1", description="Phantom RP loopback ID 1", default=2) + phantom_rendezvous_point_loopback_id2: int = Field(alias="phantomRendezvousPointLoopbackId2", description="Phantom RP loopback ID 2", default=3) + phantom_rendezvous_point_loopback_id3: int = Field(alias="phantomRendezvousPointLoopbackId3", description="Phantom RP loopback ID 3", default=4) + phantom_rendezvous_point_loopback_id4: int = Field(alias="phantomRendezvousPointLoopbackId4", description="Phantom RP loopback ID 4", default=5) + l3vni_multicast_group: str = Field(alias="l3vniMulticastGroup", description="Default L3 VNI multicast group IPv4 address", default="239.1.1.0") + l3_vni_ipv6_multicast_group: str = Field(alias="l3VniIpv6MulticastGroup", description="Default L3 VNI multicast group IPv6 address", default="ff1e::") + ipv6_multicast_group_subnet: str = Field(alias="ipv6MulticastGroupSubnet", description="IPv6 multicast group subnet", default="ff1e::/121") + mvpn_vrf_route_import_id: bool = Field(alias="mvpnVrfRouteImportId", description="Enable MVPN VRF route import ID", default=True) + mvpn_vrf_route_import_id_range: Optional[str] = Field( + alias="mvpnVrfRouteImportIdRange", + description="MVPN VRF route import ID range", + default=None + ) + vrf_route_import_id_reallocation: bool = Field( + alias="vrfRouteImportIdReallocation", + description="Enable VRF route import ID reallocation", + default=False + ) + + # Advanced Features + anycast_gateway_mac: str = Field( + alias="anycastGatewayMac", + description="Anycast gateway MAC address", + default="2020.0000.00aa" + ) + target_subnet_mask: int = Field(alias="targetSubnetMask", description="Target subnet mask", ge=24, le=31, default=30) + fabric_mtu: int = Field(alias="fabricMtu", description="Fabric MTU size", ge=1500, le=9216, default=9216) + l2_host_interface_mtu: int = Field(alias="l2HostInterfaceMtu", description="L2 host interface MTU", ge=1500, le=9216, default=9216) + l3_vni_no_vlan_default_option: bool = Field( + alias="l3VniNoVlanDefaultOption", + description="L3 VNI configuration without VLAN", + default=False + ) + underlay_ipv6: bool = Field(alias="underlayIpv6", description="Enable IPv6 underlay", default=False) + static_underlay_ip_allocation: bool = Field( + alias="staticUnderlayIpAllocation", + description="Disable dynamic underlay IP address allocation", + default=False + ) + anycast_border_gateway_advertise_physical_ip: bool = Field( + alias="anycastBorderGatewayAdvertisePhysicalIp", + description="Advertise Anycast Border Gateway PIP as VTEP", + default=False + ) + + # VPC Configuration + vpc_domain_id_range: str = Field(alias="vpcDomainIdRange", description="vPC domain ID range", default="1-1000") + vpc_peer_link_vlan: str = Field(alias="vpcPeerLinkVlan", description="vPC peer link VLAN", default="3600") + vpc_peer_link_enable_native_vlan: bool = Field( + alias="vpcPeerLinkEnableNativeVlan", + description="Enable native VLAN on vPC peer link", + default=False + ) + vpc_peer_keep_alive_option: VpcPeerKeepAliveOptionEnum = Field( + alias="vpcPeerKeepAliveOption", + description="vPC peer keep-alive option", + default=VpcPeerKeepAliveOptionEnum.MANAGEMENT + ) + vpc_auto_recovery_timer: int = Field( + alias="vpcAutoRecoveryTimer", + description="vPC auto recovery timer", + ge=240, + le=3600, + default=360 + ) + vpc_delay_restore_timer: int = Field( + alias="vpcDelayRestoreTimer", + description="vPC delay restore timer", + ge=1, + le=3600, + default=150 + ) + vpc_peer_link_port_channel_id: str = Field(alias="vpcPeerLinkPortChannelId", description="vPC peer link port-channel ID", default="500") + vpc_ipv6_neighbor_discovery_sync: bool = Field( + alias="vpcIpv6NeighborDiscoverySync", + description="Enable vPC IPv6 ND sync", + default=True + ) + vpc_layer3_peer_router: bool = Field(alias="vpcLayer3PeerRouter", description="Enable vPC layer-3 peer router", default=True) + vpc_tor_delay_restore_timer: int = Field(alias="vpcTorDelayRestoreTimer", description="vPC TOR delay restore timer", default=30) + fabric_vpc_domain_id: bool = Field(alias="fabricVpcDomainId", description="Enable fabric vPC domain ID", default=False) + shared_vpc_domain_id: int = Field(alias="sharedVpcDomainId", description="Shared vPC domain ID", default=1) + fabric_vpc_qos: bool = Field(alias="fabricVpcQos", description="Enable fabric vPC QoS", default=False) + fabric_vpc_qos_policy_name: str = Field( + alias="fabricVpcQosPolicyName", + description="Fabric vPC QoS policy name", + default="spine_qos_for_fabric_vpc_peering" + ) + enable_peer_switch: bool = Field(alias="enablePeerSwitch", description="Enable vPC peer-switch feature on ToR switches", default=False) + + # Per-VRF Loopback + per_vrf_loopback_auto_provision: bool = Field( + alias="perVrfLoopbackAutoProvision", + description="Auto provision IPv4 loopback on VRF attachment", + default=False + ) + per_vrf_loopback_ip_range: str = Field( + alias="perVrfLoopbackIpRange", + description="Per-VRF loopback IPv4 prefix pool", + default="10.5.0.0/22" + ) + per_vrf_loopback_auto_provision_ipv6: bool = Field( + alias="perVrfLoopbackAutoProvisionIpv6", + description="Auto provision IPv6 loopback on VRF attachment", + default=False + ) + per_vrf_loopback_ipv6_range: str = Field( + alias="perVrfLoopbackIpv6Range", + description="Per-VRF loopback IPv6 prefix pool", + default="fd00::a05:0/112" + ) + + # Templates + vrf_template: str = Field(alias="vrfTemplate", description="VRF template", default="Default_VRF_Universal") + network_template: str = Field(alias="networkTemplate", description="Network template", default="Default_Network_Universal") + vrf_extension_template: str = Field( + alias="vrfExtensionTemplate", + description="VRF extension template", + default="Default_VRF_Extension_Universal" + ) + network_extension_template: str = Field( + alias="networkExtensionTemplate", + description="Network extension template", + default="Default_Network_Extension_Universal" + ) + + # Optional Advanced Settings + performance_monitoring: bool = Field(alias="performanceMonitoring", description="Enable performance monitoring", default=False) + tenant_dhcp: bool = Field(alias="tenantDhcp", description="Enable tenant DHCP", default=True) + advertise_physical_ip: bool = Field(alias="advertisePhysicalIp", description="Advertise physical IP as VTEP", default=False) + advertise_physical_ip_on_border: bool = Field( + alias="advertisePhysicalIpOnBorder", + description="Advertise physical IP on border switches only", + default=True + ) + + # Protocol Settings — BGP + bgp_authentication: bool = Field(alias="bgpAuthentication", description="Enable BGP authentication", default=False) + bgp_authentication_key_type: str = Field( + alias="bgpAuthenticationKeyType", + description="BGP authentication key type", + default="3des" + ) + bgp_authentication_key: str = Field(alias="bgpAuthenticationKey", description="BGP authentication key", default="") + + # Protocol Settings — BFD + bfd: bool = Field(description="Enable BFD", default=False) + bfd_ibgp: bool = Field(alias="bfdIbgp", description="Enable BFD for iBGP", default=False) + bfd_authentication: bool = Field(alias="bfdAuthentication", description="Enable BFD authentication", default=False) + bfd_authentication_key_id: int = Field(alias="bfdAuthenticationKeyId", description="BFD authentication key ID", default=100) + bfd_authentication_key: str = Field(alias="bfdAuthenticationKey", description="BFD authentication key", default="") + + # Protocol Settings — PIM + pim_hello_authentication: bool = Field(alias="pimHelloAuthentication", description="Enable PIM hello authentication", default=False) + pim_hello_authentication_key: str = Field(alias="pimHelloAuthenticationKey", description="PIM hello authentication key", default="") + + # Management Settings + nxapi: bool = Field(description="Enable NX-API", default=False) + nxapi_http: bool = Field(alias="nxapiHttp", description="Enable NX-API HTTP", default=False) + nxapi_https_port: int = Field(alias="nxapiHttpsPort", description="NX-API HTTPS port", ge=1, le=65535, default=443) + nxapi_http_port: int = Field(alias="nxapiHttpPort", description="NX-API HTTP port", ge=1, le=65535, default=80) + + # Bootstrap / Day-0 / DHCP + day0_bootstrap: bool = Field(alias="day0Bootstrap", description="Enable day-0 bootstrap", default=False) + bootstrap_subnet_collection: List[BootstrapSubnetModel] = Field( + alias="bootstrapSubnetCollection", + description="Bootstrap subnet collection", + default_factory=list + ) + local_dhcp_server: bool = Field(alias="localDhcpServer", description="Enable local DHCP server", default=False) + dhcp_protocol_version: str = Field(alias="dhcpProtocolVersion", description="DHCP protocol version", default="dhcpv4") + dhcp_start_address: str = Field(alias="dhcpStartAddress", description="DHCP start address", default="") + dhcp_end_address: str = Field(alias="dhcpEndAddress", description="DHCP end address", default="") + management_gateway: str = Field(alias="managementGateway", description="Management gateway", default="") + management_ipv4_prefix: int = Field(alias="managementIpv4Prefix", description="Management IPv4 prefix length", default=24) + management_ipv6_prefix: int = Field(alias="managementIpv6Prefix", description="Management IPv6 prefix length", default=64) + + # Netflow Settings + netflow_settings: NetflowSettingsModel = Field( + alias="netflowSettings", + description="Netflow configuration", + default_factory=NetflowSettingsModel + ) + + # Backup / Restore + real_time_backup: Optional[bool] = Field(alias="realTimeBackup", description="Enable real-time backup", default=None) + scheduled_backup: Optional[bool] = Field(alias="scheduledBackup", description="Enable scheduled backup", default=None) + scheduled_backup_time: str = Field(alias="scheduledBackupTime", description="Scheduled backup time", default="") + + # VRF Lite / Sub-Interface + sub_interface_dot1q_range: str = Field(alias="subInterfaceDot1qRange", description="Sub-interface 802.1q range", default="2-511") + vrf_lite_auto_config: str = Field(alias="vrfLiteAutoConfig", description="VRF lite auto-config mode", default="manual") + vrf_lite_subnet_range: str = Field(alias="vrfLiteSubnetRange", description="VRF lite subnet range", default="10.33.0.0/16") + vrf_lite_subnet_target_mask: int = Field(alias="vrfLiteSubnetTargetMask", description="VRF lite subnet target mask", default=30) + auto_unique_vrf_lite_ip_prefix: bool = Field( + alias="autoUniqueVrfLiteIpPrefix", + description="Auto unique VRF lite IP prefix", + default=False + ) + + # Leaf / TOR + leaf_tor_id_range: bool = Field(alias="leafTorIdRange", description="Enable leaf/TOR ID range", default=False) + leaf_tor_vpc_port_channel_id_range: str = Field( + alias="leafTorVpcPortChannelIdRange", + description="Leaf/TOR vPC port-channel ID range", + default="1-499" + ) + allow_vlan_on_leaf_tor_pairing: str = Field( + alias="allowVlanOnLeafTorPairing", + description="Set trunk allowed VLAN on leaf-TOR pairing port-channels", + default="none" + ) + + # DNS / NTP / Syslog Collections + ntp_server_collection: List[str] = Field(default_factory=lambda: ["string"], alias="ntpServerCollection") + ntp_server_vrf_collection: List[str] = Field(default_factory=lambda: ["string"], alias="ntpServerVrfCollection") + dns_collection: List[str] = Field(default_factory=lambda: ["5.192.28.174"], alias="dnsCollection") + dns_vrf_collection: List[str] = Field(default_factory=lambda: ["string"], alias="dnsVrfCollection") + syslog_server_collection: List[str] = Field(default_factory=lambda: ["string"], alias="syslogServerCollection") + syslog_server_vrf_collection: List[str] = Field(default_factory=lambda: ["string"], alias="syslogServerVrfCollection") + syslog_severity_collection: List[int] = Field(default_factory=lambda: [7], alias="syslogSeverityCollection") + + # Extra Config / Pre-Interface Config / AAA / Banner + banner: str = Field(description="Fabric banner text", default="") + extra_config_leaf: str = Field(alias="extraConfigLeaf", description="Extra leaf config", default="") + extra_config_spine: str = Field(alias="extraConfigSpine", description="Extra spine config", default="") + extra_config_tor: str = Field(alias="extraConfigTor", description="Extra TOR config", default="") + extra_config_intra_fabric_links: str = Field( + alias="extraConfigIntraFabricLinks", + description="Extra intra-fabric links config", + default="" + ) + extra_config_aaa: str = Field(alias="extraConfigAaa", description="Extra AAA config", default="") + extra_config_nxos_bootstrap: str = Field(alias="extraConfigNxosBootstrap", description="Extra NX-OS bootstrap config", default="") + aaa: bool = Field(description="Enable AAA", default=False) + pre_interface_config_leaf: str = Field(alias="preInterfaceConfigLeaf", description="Pre-interface leaf config", default="") + pre_interface_config_spine: str = Field(alias="preInterfaceConfigSpine", description="Pre-interface spine config", default="") + pre_interface_config_tor: str = Field(alias="preInterfaceConfigTor", description="Pre-interface TOR config", default="") + + # System / Compliance / OAM / Misc + greenfield_debug_flag: GreenfieldDebugFlagEnum = Field( + alias="greenfieldDebugFlag", + description="Greenfield debug flag", + default=GreenfieldDebugFlagEnum.DISABLE + ) + interface_statistics_load_interval: int = Field( + alias="interfaceStatisticsLoadInterval", + description="Interface statistics load interval in seconds", + default=10 + ) + nve_hold_down_timer: int = Field(alias="nveHoldDownTimer", description="NVE source interface hold-down timer in seconds", default=180) + next_generation_oam: bool = Field(alias="nextGenerationOAM", description="Enable next-generation OAM", default=True) + ngoam_south_bound_loop_detect: bool = Field( + alias="ngoamSouthBoundLoopDetect", + description="Enable NGOAM south bound loop detection", + default=False + ) + ngoam_south_bound_loop_detect_probe_interval: int = Field( + alias="ngoamSouthBoundLoopDetectProbeInterval", + description="NGOAM south bound loop detect probe interval in seconds", + default=300 + ) + ngoam_south_bound_loop_detect_recovery_interval: int = Field( + alias="ngoamSouthBoundLoopDetectRecoveryInterval", + description="NGOAM south bound loop detect recovery interval in seconds", + default=600 + ) + strict_config_compliance_mode: bool = Field( + alias="strictConfigComplianceMode", + description="Enable strict config compliance mode", + default=False + ) + advanced_ssh_option: bool = Field(alias="advancedSshOption", description="Enable advanced SSH option", default=False) + copp_policy: CoppPolicyEnum = Field(alias="coppPolicy", description="CoPP policy", default=CoppPolicyEnum.STRICT) + power_redundancy_mode: str = Field(alias="powerRedundancyMode", description="Power redundancy mode", default="redundant") + heartbeat_interval: int = Field(alias="heartbeatInterval", description="XConnect heartbeat interval", default=190) + snmp_trap: bool = Field(alias="snmpTrap", description="Enable SNMP traps", default=True) + cdp: bool = Field(description="Enable CDP", default=False) + real_time_interface_statistics_collection: bool = Field( + alias="realTimeInterfaceStatisticsCollection", + description="Enable real-time interface statistics collection", + default=False + ) + tcam_allocation: bool = Field(alias="tcamAllocation", description="Enable TCAM allocation", default=True) + allow_smart_switch_onboarding: bool = Field( + alias="allowSmartSwitchOnboarding", + description="Allow smart switch onboarding", + default=False + ) + + # Queuing / QoS + default_queuing_policy: bool = Field(alias="defaultQueuingPolicy", description="Enable default queuing policy", default=False) + default_queuing_policy_cloudscale: str = Field( + alias="defaultQueuingPolicyCloudscale", + description="Default queuing policy for cloudscale switches", + default="queuing_policy_default_8q_cloudscale" + ) + default_queuing_policy_r_series: str = Field( + alias="defaultQueuingPolicyRSeries", + description="Default queuing policy for R-Series switches", + default="queuing_policy_default_r_series" + ) + default_queuing_policy_other: str = Field( + alias="defaultQueuingPolicyOther", + description="Default queuing policy for other switches", + default="queuing_policy_default_other" + ) + aiml_qos: bool = Field(alias="aimlQos", description="Enable AI/ML QoS", default=False) + aiml_qos_policy: str = Field(alias="aimlQosPolicy", description="AI/ML QoS policy", default="400G") + roce_v2: str = Field(alias="roceV2", description="RoCEv2 DSCP value", default="26") + cnp: str = Field(description="CNP DSCP value", default="48") + wred_min: int = Field(alias="wredMin", description="WRED minimum threshold in kbytes", default=950) + wred_max: int = Field(alias="wredMax", description="WRED maximum threshold in kbytes", default=3000) + wred_drop_probability: int = Field(alias="wredDropProbability", description="WRED drop probability %", default=7) + wred_weight: int = Field(alias="wredWeight", description="WRED weight", default=0) + bandwidth_remaining: int = Field(alias="bandwidthRemaining", description="Bandwidth remaining % for AI traffic queues", default=50) + dlb: bool = Field(description="Enable dynamic load balancing", default=False) + dlb_mode: str = Field(alias="dlbMode", description="DLB mode", default="flowlet") + dlb_mixed_mode_default: str = Field(alias="dlbMixedModeDefault", description="DLB mixed mode default", default="ecmp") + flowlet_aging: Optional[int] = Field(alias="flowletAging", description="Flowlet aging timer in microseconds", default=None) + flowlet_dscp: str = Field(alias="flowletDscp", description="Flowlet DSCP value", default="") + per_packet_dscp: str = Field(alias="perPacketDscp", description="Per-packet DSCP value", default="") + ai_load_sharing: bool = Field(alias="aiLoadSharing", description="Enable AI load sharing", default=False) + priority_flow_control_watch_interval: Optional[int] = Field( + alias="priorityFlowControlWatchInterval", + description="Priority flow control watch interval in milliseconds", + default=None + ) + + # PTP + ptp: bool = Field(description="Enable PTP", default=False) + ptp_loopback_id: int = Field(alias="ptpLoopbackId", description="PTP loopback ID", default=0) + ptp_domain_id: int = Field(alias="ptpDomainId", description="PTP domain ID", default=0) + + # Private VLAN + private_vlan: bool = Field(alias="privateVlan", description="Enable private VLAN", default=False) + default_private_vlan_secondary_network_template: str = Field( + alias="defaultPrivateVlanSecondaryNetworkTemplate", + description="Default private VLAN secondary network template", + default="Pvlan_Secondary_Network" + ) + + # MACsec + macsec: bool = Field(description="Enable MACsec", default=False) + macsec_cipher_suite: str = Field( + alias="macsecCipherSuite", + description="MACsec cipher suite", + default="GCM-AES-XPN-256" + ) + macsec_key_string: str = Field(alias="macsecKeyString", description="MACsec primary key string", default="") + macsec_algorithm: str = Field(alias="macsecAlgorithm", description="MACsec primary cryptographic algorithm", default="AES_128_CMAC") + macsec_fallback_key_string: str = Field(alias="macsecFallbackKeyString", description="MACsec fallback key string", default="") + macsec_fallback_algorithm: str = Field( + alias="macsecFallbackAlgorithm", + description="MACsec fallback cryptographic algorithm", + default="AES_128_CMAC" + ) + macsec_report_timer: int = Field(alias="macsecReportTimer", description="MACsec report timer in minutes", default=5) + + # Hypershield / Connectivity + connectivity_domain_name: Optional[str] = Field( + alias="connectivityDomainName", + description="Domain name to connect to Hypershield", + default=None + ) + hypershield_connectivity_proxy_server: Optional[str] = Field( + alias="hypershieldConnectivityProxyServer", + description="IPv4 address, IPv6 address, or DNS name of the proxy server for Hypershield communication", + default=None + ) + hypershield_connectivity_proxy_server_port: Optional[int] = Field( + alias="hypershieldConnectivityProxyServerPort", + description="Proxy port number for communication with Hypershield", + default=None + ) + hypershield_connectivity_source_intf: Optional[str] = Field( + alias="hypershieldConnectivitySourceIntf", + description="Loopback interface on smart switch for communication with Hypershield", + default=None + ) + + @field_validator("bgp_asn") + @classmethod + def validate_bgp_asn(cls, value: Optional[str]) -> Optional[str]: + """ + # Summary + + Validate BGP ASN format and range when provided. + + ## Raises + + - `ValueError` - If value does not match the expected ASN format + """ + if value is None: + return value + if not _BGP_ASN_RE.match(value): + raise ValueError( + f"Invalid BGP ASN '{value}'. " + "Expected a plain integer (1-4294967295) or dotted notation (1-65535.0-65535)." + ) + return value + + @field_validator("site_id") + @classmethod + def validate_site_id(cls, value: str) -> str: + """ + # Summary + + Validate site ID format. + + ## Raises + + - `ValueError` - If site ID is not numeric or outside valid range + """ + if value == "": + return value + if not value.isdigit(): + raise ValueError(f"Site ID must be numeric, got: {value}") + site_id_int = int(value) + if not (1 <= site_id_int <= 281474976710655): + raise ValueError(f"Site ID must be between 1 and 281474976710655, got: {site_id_int}") + return value + + @field_validator("anycast_gateway_mac") + @classmethod + def validate_mac_address(cls, value: str) -> str: + """ + # Summary + + Validate MAC address format. + + ## Raises + + - `ValueError` - If MAC address format is invalid + """ + mac_pattern = re.compile(r'^([0-9a-fA-F]{4}\.){2}[0-9a-fA-F]{4}$') + if not mac_pattern.match(value): + raise ValueError(f"Invalid MAC address format, expected xxxx.xxxx.xxxx, got: {value}") + return value.lower() + + +class FabricEbgpModel(NDBaseModel): + """ + # Summary + + Complete model for creating a new eBGP VXLAN fabric. + + ## Raises + + - `ValueError` - If required fields are missing or invalid + - `TypeError` - If field types don't match expected types + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + identifiers: ClassVar[Optional[List[str]]] = ["name"] + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "single" + + # Basic Fabric Properties + category: Literal["fabric"] = Field(description="Resource category", default="fabric") + name: str = Field(description="Fabric name", min_length=1, max_length=64) + location: Optional[LocationModel] = Field(description="Geographic location of the fabric", default=None) + + # License and Operations + license_tier: LicenseTierEnum = Field(alias="licenseTier", description="License tier", default=LicenseTierEnum.PREMIER) + alert_suspend: AlertSuspendEnum = Field(alias="alertSuspend", description="Alert suspension state", default=AlertSuspendEnum.DISABLED) + telemetry_collection: bool = Field(alias="telemetryCollection", description="Enable telemetry collection", default=False) + telemetry_collection_type: str = Field(alias="telemetryCollectionType", description="Telemetry collection type", default="outOfBand") + telemetry_streaming_protocol: str = Field(alias="telemetryStreamingProtocol", description="Telemetry streaming protocol", default="ipv4") + telemetry_source_interface: str = Field(alias="telemetrySourceInterface", description="Telemetry source interface", default="") + telemetry_source_vrf: str = Field(alias="telemetrySourceVrf", description="Telemetry source VRF", default="") + security_domain: str = Field(alias="securityDomain", description="Security domain", default="all") + + # Core Management Configuration + management: Optional[VxlanEbgpManagementModel] = Field(description="eBGP VXLAN management configuration", default=None) + + # Optional Advanced Settings + telemetry_settings: Optional[TelemetrySettingsModel] = Field( + alias="telemetrySettings", + description="Telemetry configuration", + default=None + ) + external_streaming_settings: ExternalStreamingSettingsModel = Field( + alias="externalStreamingSettings", + description="External streaming settings", + default_factory=ExternalStreamingSettingsModel + ) + + @field_validator("name") + @classmethod + def validate_fabric_name(cls, value: str) -> str: + """ + # Summary + + Validate fabric name format and characters. + + ## Raises + + - `ValueError` - If name contains invalid characters or format + """ + if not re.match(r'^[a-zA-Z0-9_-]+$', value): + raise ValueError(f"Fabric name can only contain letters, numbers, underscores, and hyphens, got: {value}") + return value + + @model_validator(mode='after') + def validate_fabric_consistency(self) -> 'FabricEbgpModel': + """ + # Summary + + Validate consistency between fabric settings and management configuration. + + ## Raises + + - `ValueError` - If fabric settings are inconsistent + """ + if self.management is not None and self.management.type != FabricTypeEnum.VXLAN_EBGP: + raise ValueError(f"Management type must be {FabricTypeEnum.VXLAN_EBGP}") + + # Propagate fabric name to management model + if self.management is not None: + self.management.name = self.name + + # Propagate BGP ASN to site_id if both are set and site_id is empty + if self.management is not None and self.management.site_id == "" and self.management.bgp_asn is not None: + bgp_asn = self.management.bgp_asn + if "." in bgp_asn: + high, low = bgp_asn.split(".") + self.management.site_id = str(int(high) * 65536 + int(low)) + else: + self.management.site_id = bgp_asn + + # Auto-create default telemetry settings if collection is enabled + if self.telemetry_collection and self.telemetry_settings is None: + self.telemetry_settings = TelemetrySettingsModel() + + return self + + @classmethod + def get_argument_spec(cls) -> Dict: + return dict( + state={ + "type": "str", + "default": "merged", + "choices": ["merged", "replaced", "deleted", "overridden", "query"], + }, + config={"required": False, "type": "list", "elements": "dict"}, + ) + + +# Export all models for external use +__all__ = [ + "VxlanEbgpManagementModel", + "FabricEbgpModel", + "FabricEbgpDeleteModel", + "FabricTypeEnum", + "AlertSuspendEnum", + "LicenseTierEnum", + "ReplicationModeEnum", + "OverlayModeEnum", + "BgpAsModeEnum", + "FirstHopRedundancyProtocolEnum", + "VpcPeerKeepAliveOptionEnum", + "CoppPolicyEnum", + "GreenfieldDebugFlagEnum", +] diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_external.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_external.py new file mode 100644 index 00000000..b8e41ddc --- /dev/null +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_external.py @@ -0,0 +1,522 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +# pylint: disable=invalid-name +__metaclass__ = type +# pylint: enable=invalid-name + +import re +from typing import List, Dict, Any, Optional, ClassVar, Literal + +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nested import NDNestedModel +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ( + BaseModel, + ConfigDict, + Field, + field_validator, + model_validator, +) +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.enums import ( + FabricTypeEnum, + AlertSuspendEnum, + LicenseTierEnum, + CoppPolicyEnum, + DhcpProtocolVersionEnum, + PowerRedundancyModeEnum, +) +# Re-use shared nested models from the iBGP module +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import ( + LocationModel, + NetflowSettingsModel, + BootstrapSubnetModel, + TelemetrySettingsModel, + ExternalStreamingSettingsModel, +) + + +""" +# Comprehensive Pydantic models for External VXLAN fabric management via Nexus Dashboard + +This module provides Pydantic models for creating, updating, and deleting +External VXLAN fabrics (border/edge router fabrics) through the Nexus Dashboard +Fabric Controller (NDFC) API. + +## Models Overview + +- `VxlanExternalManagementModel` - External VXLAN fabric-specific management settings +- `FabricExternalModel` - Complete fabric creation model for External fabrics +- `FabricExternalDeleteModel` - Fabric deletion model + +## Usage + +```python +# Create a new External VXLAN fabric +fabric_data = { + "name": "MyExternalFabric", + "management": { + "type": "vxlanExternal", + "bgpAsn": "65001", + } +} +fabric = FabricExternalModel(**fabric_data) +``` +""" + +# Regex from OpenAPI schema: bgpAsn accepts plain integers (1-4294967295) and +# dotted four-byte ASN notation (1-65535).(0-65535) +_BGP_ASN_RE = re.compile( + r"^(([1-9]{1}[0-9]{0,8}|[1-3]{1}[0-9]{1,9}|[4]{1}([0-1]{1}[0-9]{8}|[2]{1}([0-8]{1}[0-9]{7}|[9]{1}([0-3]{1}[0-9]{6}|[4]{1}([0-8]{1}[0-9]{5}|[9]{1}([0-5]{1}[0-9]{4}|[6]{1}([0-6]{1}[0-9]{3}|[7]{1}([0-1]{1}[0-9]{2}|[2]{1}([0-8]{1}[0-9]{1}|[9]{1}[0-5]{1})))))))))|([1-5]\d{4}|[1-9]\d{0,3}|6[0-4]\d{3}|65[0-4]\d{2}|655[0-2]\d|6553[0-5])(\.([1-5]\d{4}|[1-9]\d{0,3}|6[0-4]\d{3}|65[0-4]\d{2}|655[0-2]\d|6553[0-5]|0))?)$" +) + + +class VxlanExternalManagementModel(NDNestedModel): + """ + # Summary + + Comprehensive External VXLAN fabric management configuration. + + This model contains all settings for External VXLAN fabric types, used for + border/edge router fabrics that connect to external networks. + + ## Raises + + - `ValueError` - If BGP ASN or other field validations fail + - `TypeError` - If required string fields are not provided + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + # Fabric Type (required for discriminated union) + type: Literal[FabricTypeEnum.VXLAN_EXTERNAL] = Field( + description="Fabric management type", + default=FabricTypeEnum.VXLAN_EXTERNAL + ) + + # Propagated from FabricExternalModel + name: Optional[str] = Field(description="Fabric name", min_length=1, max_length=64, default="") + + # Core BGP Configuration + bgp_asn: str = Field( + alias="bgpAsn", + description="Autonomous system number 1-4294967295 | 1-65535[.0-65535]", + ) + create_bgp_config: bool = Field( + alias="createBgpConfig", + description="Generate BGP configuration for core and edge routers", + default=True + ) + + # Fabric Behavior + monitored_mode: bool = Field( + alias="monitoredMode", + description="If enabled, fabric is only monitored. No configuration will be deployed", + default=False + ) + mpls_handoff: bool = Field( + alias="mplsHandoff", + description="Enable MPLS Handoff", + default=False + ) + mpls_loopback_identifier: Optional[int] = Field( + alias="mplsLoopbackIdentifier", + description="Underlay MPLS Loopback Identifier", + default=None + ) + mpls_loopback_ip_range: str = Field( + alias="mplsLoopbackIpRange", + description="MPLS Loopback IP Address Range", + default="10.102.0.0/25" + ) + sub_interface_dot1q_range: str = Field( + alias="subInterfaceDot1qRange", + description="Per aggregation dot1q range for VRF-Lite connectivity (minimum: 2, maximum: 4093)", + default="2-511" + ) + inband_management: bool = Field( + alias="inbandManagement", + description="Import switches with reachability over the switch front-panel ports", + default=False + ) + allow_same_loopback_ip_on_switches: bool = Field( + alias="allowSameLoopbackIpOnSwitches", + description="Allow the same loopback IP address to be configured on multiple switches", + default=False + ) + allow_smart_switch_onboarding: bool = Field( + alias="allowSmartSwitchOnboarding", + description="Enable onboarding of smart switches to Hypershield for firewall service", + default=False + ) + + # Bootstrap / Day-0 / DHCP + day0_bootstrap: bool = Field( + alias="day0Bootstrap", + description="Support day 0 touchless switch bringup", + default=False + ) + day0_plug_and_play: bool = Field( + alias="day0PlugAndPlay", + description="Enable Plug n Play for Catalyst 9000 switches", + default=False + ) + inband_day0_bootstrap: bool = Field( + alias="inbandDay0Bootstrap", + description="Support day 0 touchless switch bringup via inband management", + default=False + ) + bootstrap_subnet_collection: List[BootstrapSubnetModel] = Field( + alias="bootstrapSubnetCollection", + description="List of IPv4 or IPv6 subnets to be used for bootstrap", + default_factory=list + ) + local_dhcp_server: bool = Field( + alias="localDhcpServer", + description="Automatic IP Assignment For POAP from Local DHCP Server", + default=False + ) + dhcp_protocol_version: DhcpProtocolVersionEnum = Field( + alias="dhcpProtocolVersion", + description="IP protocol version for Local DHCP Server", + default=DhcpProtocolVersionEnum.DHCPV4 + ) + dhcp_start_address: str = Field( + alias="dhcpStartAddress", + description="DHCP Scope Start Address For Switch POAP", + default="" + ) + dhcp_end_address: str = Field( + alias="dhcpEndAddress", + description="DHCP Scope End Address For Switch POAP", + default="" + ) + domain_name: str = Field( + alias="domainName", + description="Domain name for DHCP server PnP block", + default="" + ) + management_gateway: str = Field( + alias="managementGateway", + description="Default Gateway For Management VRF On The Switch", + default="" + ) + management_ipv4_prefix: int = Field( + alias="managementIpv4Prefix", + description="Switch Mgmt IP Subnet Prefix if ipv4", + default=24 + ) + management_ipv6_prefix: int = Field( + alias="managementIpv6Prefix", + description="Switch Management IP Subnet Prefix if ipv6", + default=64 + ) + + # DNS Collections + dns_collection: List[str] = Field( + alias="dnsCollection", + description="List of IPv4 and IPv6 DNS addresses", + default_factory=list + ) + dns_vrf_collection: List[str] = Field( + alias="dnsVrfCollection", + description="DNS Server VRFs. One VRF for all DNS servers or a list of VRFs, one per DNS server", + default_factory=list + ) + + # Extra Configuration + extra_config_aaa: str = Field( + alias="extraConfigAaa", + description="Additional CLIs for AAA Configuration", + default="" + ) + extra_config_fabric: str = Field( + alias="extraConfigFabric", + description="Additional CLIs for all switches", + default="" + ) + extra_config_nxos_bootstrap: str = Field( + alias="extraConfigNxosBootstrap", + description="Additional CLIs required during device bootup/login e.g. AAA/Radius (NX-OS)", + default="" + ) + extra_config_xe_bootstrap: str = Field( + alias="extraConfigXeBootstrap", + description="Additional CLIs required during device bootup/login e.g. AAA/Radius (IOS-XE)", + default="" + ) + + # Management Protocol Settings + nxapi: bool = Field( + description="Enable NX-API over HTTPS", + default=False + ) + nxapi_http: bool = Field( + alias="nxapiHttp", + description="Enable NX-API over HTTP", + default=False + ) + nxapi_http_port: int = Field( + alias="nxapiHttpPort", + description="HTTP port for NX-API", + default=80 + ) + nxapi_https_port: int = Field( + alias="nxapiHttpsPort", + description="HTTPS port for NX-API", + default=443 + ) + cdp: bool = Field( + description="Enable CDP on management interface", + default=False + ) + aaa: bool = Field( + description="Include AAA configs from Advanced tab during device bootup", + default=False + ) + advanced_ssh_option: bool = Field( + alias="advancedSshOption", + description="Enable only, when IP Authorization is enabled in the AAA Server", + default=False + ) + snmp_trap: bool = Field( + alias="snmpTrap", + description="Configure Nexus Dashboard as a receiver for SNMP traps", + default=True + ) + copp_policy: CoppPolicyEnum = Field( + alias="coppPolicy", + description="Fabric wide CoPP policy. Customized CoPP policy should be provided when 'manual'", + default=CoppPolicyEnum.MANUAL + ) + power_redundancy_mode: PowerRedundancyModeEnum = Field( + alias="powerRedundancyMode", + description="Default Power Supply Mode for NX-OS Switches", + default=PowerRedundancyModeEnum.REDUNDANT + ) + interface_statistics_load_interval: int = Field( + alias="interfaceStatisticsLoadInterval", + description="Interface Statistics Load Interval Time in seconds", + default=10 + ) + performance_monitoring: bool = Field( + alias="performanceMonitoring", + description="If enabled, switch metrics are collected through periodic SNMP polling", + default=False + ) + real_time_interface_statistics_collection: bool = Field( + alias="realTimeInterfaceStatisticsCollection", + description="Enable Real Time Interface Statistics Collection. Valid for NX-OS only", + default=False + ) + + # PTP Settings + ptp: bool = Field( + description="Enable Precision Time Protocol (PTP)", + default=False + ) + ptp_domain_id: int = Field( + alias="ptpDomainId", + description="Multiple Independent PTP Clocking Subdomains on a Single Network", + default=0 + ) + ptp_loopback_id: int = Field( + alias="ptpLoopbackId", + description="Precision Time Protocol Source Loopback Id", + default=0 + ) + + # Netflow Settings + netflow_settings: NetflowSettingsModel = Field( + alias="netflowSettings", + description="Settings associated with netflow", + default_factory=NetflowSettingsModel + ) + + # Backup / Restore + real_time_backup: Optional[bool] = Field( + alias="realTimeBackup", + description="Hourly Fabric Backup only if there is any config deployment since last backup", + default=None + ) + scheduled_backup: Optional[bool] = Field( + alias="scheduledBackup", + description="Enable backup at the specified time daily", + default=None + ) + scheduled_backup_time: str = Field( + alias="scheduledBackupTime", + description="Time (UTC) in 24 hour format to take a daily backup if enabled (00:00 to 23:59)", + default="" + ) + + # Hypershield / Connectivity + connectivity_domain_name: Optional[str] = Field( + alias="connectivityDomainName", + description="Domain name to connect to Hypershield", + default=None + ) + hypershield_connectivity_proxy_server: Optional[str] = Field( + alias="hypershieldConnectivityProxyServer", + description="IPv4 address, IPv6 address, or DNS name of the proxy server for Hypershield communication", + default=None + ) + hypershield_connectivity_proxy_server_port: Optional[int] = Field( + alias="hypershieldConnectivityProxyServerPort", + description="Proxy port number for communication with Hypershield", + default=None + ) + hypershield_connectivity_source_intf: Optional[str] = Field( + alias="hypershieldConnectivitySourceIntf", + description="Loopback interface on smart switch for communication with Hypershield", + default=None + ) + + @field_validator("bgp_asn") + @classmethod + def validate_bgp_asn(cls, value: str) -> str: + """ + # Summary + + Validate BGP ASN format and range. + + ## Raises + + - `ValueError` - If value does not match the expected ASN format + """ + if not _BGP_ASN_RE.match(value): + raise ValueError( + f"Invalid BGP ASN '{value}'. " + "Expected a plain integer (1-4294967295) or dotted notation (1-65535.0-65535)." + ) + return value + + +class FabricExternalModel(NDBaseModel): + """ + # Summary + + Complete model for creating a new External VXLAN fabric. + + ## Raises + + - `ValueError` - If required fields are missing or invalid + - `TypeError` - If field types don't match expected types + """ + + model_config = ConfigDict( + str_strip_whitespace=True, + validate_assignment=True, + populate_by_name=True, + extra="allow" + ) + + identifiers: ClassVar[Optional[List[str]]] = ["name"] + identifier_strategy: ClassVar[Optional[Literal["single", "composite", "hierarchical", "singleton"]]] = "single" + + # Basic Fabric Properties + category: Literal["fabric"] = Field(description="Resource category", default="fabric") + name: str = Field(description="Fabric name", min_length=1, max_length=64) + location: Optional[LocationModel] = Field(description="Geographic location of the fabric", default=None) + + # License and Operations + license_tier: LicenseTierEnum = Field(alias="licenseTier", description="License tier", default=LicenseTierEnum.PREMIER) + alert_suspend: AlertSuspendEnum = Field(alias="alertSuspend", description="Alert suspension state", default=AlertSuspendEnum.DISABLED) + telemetry_collection: bool = Field(alias="telemetryCollection", description="Enable telemetry collection", default=False) + telemetry_collection_type: str = Field(alias="telemetryCollectionType", description="Telemetry collection type", default="outOfBand") + telemetry_streaming_protocol: str = Field(alias="telemetryStreamingProtocol", description="Telemetry streaming protocol", default="ipv4") + telemetry_source_interface: str = Field(alias="telemetrySourceInterface", description="Telemetry source interface", default="") + telemetry_source_vrf: str = Field(alias="telemetrySourceVrf", description="Telemetry source VRF", default="") + security_domain: str = Field(alias="securityDomain", description="Security domain", default="all") + + # Core Management Configuration + management: Optional[VxlanExternalManagementModel] = Field( + description="External VXLAN management configuration", + default=None + ) + + # Optional Advanced Settings + telemetry_settings: Optional[TelemetrySettingsModel] = Field( + alias="telemetrySettings", + description="Telemetry configuration", + default=None + ) + external_streaming_settings: ExternalStreamingSettingsModel = Field( + alias="externalStreamingSettings", + description="External streaming settings", + default_factory=ExternalStreamingSettingsModel + ) + + @field_validator("name") + @classmethod + def validate_fabric_name(cls, value: str) -> str: + """ + # Summary + + Validate fabric name format and characters. + + ## Raises + + - `ValueError` - If name contains invalid characters or format + """ + if not re.match(r'^[a-zA-Z0-9_-]+$', value): + raise ValueError(f"Fabric name can only contain letters, numbers, underscores, and hyphens, got: {value}") + return value + + @model_validator(mode='after') + def validate_fabric_consistency(self) -> 'FabricExternalModel': + """ + # Summary + + Validate consistency between fabric settings and management configuration. + + ## Raises + + - `ValueError` - If fabric settings are inconsistent + """ + if self.management is not None and self.management.type != FabricTypeEnum.VXLAN_EXTERNAL: + raise ValueError(f"Management type must be {FabricTypeEnum.VXLAN_EXTERNAL}") + + # Propagate fabric name to management model + if self.management is not None: + self.management.name = self.name + + # Auto-create default telemetry settings if collection is enabled + if self.telemetry_collection and self.telemetry_settings is None: + self.telemetry_settings = TelemetrySettingsModel() + + return self + + @classmethod + def get_argument_spec(cls) -> Dict: + return dict( + state={ + "type": "str", + "default": "merged", + "choices": ["merged", "replaced", "deleted", "overridden", "query"], + }, + config={"required": False, "type": "list", "elements": "dict"}, + ) + + +# Export all models for external use +__all__ = [ + "VxlanExternalManagementModel", + "FabricExternalModel", + "FabricExternalDeleteModel", + "FabricTypeEnum", + "AlertSuspendEnum", + "LicenseTierEnum", + "CoppPolicyEnum", + "DhcpProtocolVersionEnum", + "PowerRedundancyModeEnum", +] diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py index 7a43ef90..7387e5f9 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py @@ -55,7 +55,6 @@ - `NetflowMonitorModel` - Netflow monitor configuration - `NetflowSettingsModel` - Complete netflow settings - `BootstrapSubnetModel` - Bootstrap subnet configuration -- `FabricDesignSettingsModel` - Fabric designer settings - `TelemetryFlowCollectionModel` - Telemetry flow collection settings - `TelemetrySettingsModel` - Complete telemetry configuration - `ExternalStreamingSettingsModel` - External streaming configuration @@ -249,66 +248,6 @@ class BootstrapSubnetModel(NDNestedModel): subnet_prefix: int = Field(alias="subnetPrefix", description="Subnet prefix length", ge=8, le=30) -class FabricDesignSettingsModel(NDNestedModel): - """ - # Summary - - Fabric designer settings for automated fabric deployment. - - ## Raises - - - `ValueError` - If leaf/spine/border counts are invalid - """ - - model_config = ConfigDict( - str_strip_whitespace=True, - validate_assignment=True, - populate_by_name=True, - extra="allow" - ) - - link_capacity: str = Field(alias="linkCapacity", description="Link capacity (e.g., '400Gb')", default="400Gb") - leaf_count: int = Field(alias="leafCount", description="Number of leaf switches", ge=1, le=128) - leaf_model: str = Field(alias="leafModel", description="Leaf switch model") - spine_count: int = Field(alias="spineCount", description="Number of spine switches", ge=1, le=32) - spine_model: str = Field(alias="spineModel", description="Spine switch model") - border_count: int = Field(alias="borderCount", description="Number of border switches", ge=0, le=32, default=0) - border_model: Optional[str] = Field(alias="borderModel", description="Border switch model", default=None) - leaf_vpc_pair_policy: str = Field( - alias="leafVpcPairPolicy", - description="Leaf vPC pairing policy", - default="pairWithPhysicalPeerLink" - ) - border_vpc_pair_policy: str = Field( - alias="borderVpcPairPolicy", - description="Border vPC pairing policy", - default="pairWithPhysicalPeerLink" - ) - designer_management_ip_pool: str = Field( - alias="designerManagementIPPool", - description="Management IP pool for designer" - ) - designer_management_gateway: str = Field( - alias="designerManagementGateway", - description="Management gateway for designer" - ) - spine_to_leaf_distance: int = Field( - alias="spineToLeafDistance", - description="Cable distance from spine to leaf", - ge=1, - le=100, - default=20 - ) - airflow_direction: str = Field(alias="airflowDirection", description="Airflow direction", default="frontToBack") - breakout_spine_interfaces: bool = Field( - alias="breakoutSpineInterfaces", - description="Enable spine interface breakout", - default=False - ) - cabling_type: str = Field(alias="cablingType", description="Cabling type", default="fiber") - designer_password: Optional[str] = Field(alias="designerPassword", description="Designer password", default=None) - - class TelemetryFlowCollectionModel(NDNestedModel): """ # Summary @@ -552,15 +491,15 @@ class VxlanIbgpManagementModel(NDNestedModel): bgp_asn: str = Field(alias="bgpAsn", description="BGP Autonomous System Number 1-4294967295 | 1-65535[.0-65535]") site_id: Optional[str] = Field(alias="siteId", description="Site identifier for the fabric", default="") - # Missing Fields + # Name under management section is optional for backward compatibility, but if provided must be non-empty string name: Optional[str] = Field(description="Fabric name", min_length=1, max_length=64, default="") - border_count: Optional[int] = Field(alias="borderCount", description="Number of border switches", ge=0, le=32, default=0) - breakout_spine_interfaces: Optional[bool] = Field(alias="breakoutSpineInterfaces", description="Enable breakout spine interfaces", default=False) - designer_use_robot_password: Optional[bool] = Field(alias="designerUseRobotPassword", description="Use robot password for designer", default=False) - leaf_count: Optional[int] = Field(alias="leafCount", description="Number of leaf switches", ge=1, le=128, default=1) - spine_count: Optional[int] = Field(alias="spineCount", description="Number of spine switches", ge=1, le=32, default=1) - vrf_lite_ipv6_subnet_range: Optional[str] = Field(alias="vrfLiteIpv6SubnetRange", description="VRF Lite IPv6 subnet range", default="fd00::a33:0/112") - vrf_lite_ipv6_subnet_target_mask: Optional[int] = Field(alias="vrfLiteIpv6SubnetTargetMask", description="VRF Lite IPv6 subnet target mask", ge=112, le=128, default=126) + # border_count: Optional[int] = Field(alias="borderCount", description="Number of border switches", ge=0, le=32, default=0) + # breakout_spine_interfaces: Optional[bool] = Field(alias="breakoutSpineInterfaces", description="Enable breakout spine interfaces", default=False) + # designer_use_robot_password: Optional[bool] = Field(alias="designerUseRobotPassword", description="Use robot password for designer", default=False) + # leaf_count: Optional[int] = Field(alias="leafCount", description="Number of leaf switches", ge=1, le=128, default=1) + # spine_count: Optional[int] = Field(alias="spineCount", description="Number of spine switches", ge=1, le=32, default=1) + # vrf_lite_ipv6_subnet_range: Optional[str] = Field(alias="vrfLiteIpv6SubnetRange", description="VRF Lite IPv6 subnet range", default="fd00::a33:0/112") + # vrf_lite_ipv6_subnet_target_mask: Optional[int] = Field(alias="vrfLiteIpv6SubnetTargetMask", description="VRF Lite IPv6 subnet target mask", ge=112, le=128, default=126) # Network Addressing @@ -1352,47 +1291,6 @@ def get_argument_spec(cls) -> Dict: ) -class FabricDeleteModel(BaseModel): - """ - # Summary - - Model for deleting an iBGP VXLAN fabric. - - Only requires the fabric name for identification. - - ## Raises - - - `ValueError` - If fabric name is invalid - """ - - model_config = ConfigDict( - str_strip_whitespace=True, - validate_assignment=True, - populate_by_name=True, - extra="allow" - ) - - name: str = Field(description="Name of the fabric to delete", min_length=1, max_length=64) - - @field_validator("name") - @classmethod - def validate_fabric_name(cls, value: str) -> str: - """ - # Summary - - Validate fabric name format for deletion. - - ## Raises - - - `ValueError` - If name format is invalid - """ - if not re.match(r'^[a-zA-Z0-9_-]+$', value): - raise ValueError(f"Fabric name can only contain letters, numbers, underscores, and hyphens, got: {value}") - - return value - - - # Export all models for external use __all__ = [ "LocationModel", @@ -1401,7 +1299,6 @@ def validate_fabric_name(cls, value: str) -> str: "NetflowMonitorModel", "NetflowSettingsModel", "BootstrapSubnetModel", - "FabricDesignSettingsModel", "TelemetryFlowCollectionModel", "TelemetryMicroburstModel", "TelemetryAnalysisSettingsModel", From 255507d8cf00552d95e4fdb823e614ac069b2384 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Mon, 16 Mar 2026 12:22:37 -0400 Subject: [PATCH 109/131] New ebgp fabric module --- .../nd_manage_fabric/manage_fabric_ebgp.py | 9 + .../nd_manage_fabric/manage_fabric_ibgp.py | 2 +- .../orchestrators/manage_fabric.py | 35 +- plugins/modules/nd_manage_fabric_ebgp.py | 1176 ++++++++++++++++ plugins/modules/nd_manage_fabric_ibgp.py | 8 +- .../nd_manage_fabric/tasks/fabric_ebgp.yaml | 1209 +++++++++++++++++ .../targets/nd_manage_fabric/tasks/main.yaml | 3 + .../targets/nd_manage_fabric/vars/main.yaml | 91 +- 8 files changed, 2520 insertions(+), 13 deletions(-) create mode 100644 plugins/modules/nd_manage_fabric_ebgp.py create mode 100644 tests/integration/targets/nd_manage_fabric/tasks/fabric_ebgp.yaml diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py index 7098f656..be3abcc1 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py @@ -799,6 +799,15 @@ def validate_fabric_consistency(self) -> 'FabricEbgpModel': return self + def to_diff_dict(self, **kwargs) -> Dict[str, Any]: + """Export for diff comparison, excluding fields that ND overrides for eBGP fabrics.""" + d = super().to_diff_dict(**kwargs) + # ND always returns nxapiHttp=True for eBGP fabrics regardless of the configured value, + # so exclude it from diff comparison to prevent a persistent false-positive diff. + if "management" in d: + d["management"].pop("nxapiHttp", None) + return d + @classmethod def get_argument_spec(cls) -> Dict: return dict( diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py index 7387e5f9..703f9fd7 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py +++ b/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py @@ -1169,7 +1169,7 @@ def validate_mac_address(cls, value: str) -> str: return value.lower() -class FabricModel(NDBaseModel): +class FabricIbgpModel(NDBaseModel): """ # Summary diff --git a/plugins/module_utils/orchestrators/manage_fabric.py b/plugins/module_utils/orchestrators/manage_fabric.py index 5a57cb0c..d936353c 100644 --- a/plugins/module_utils/orchestrators/manage_fabric.py +++ b/plugins/module_utils/orchestrators/manage_fabric.py @@ -11,7 +11,8 @@ from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricIbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ebgp import FabricEbgpModel from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.manage.manage_fabrics import ( @@ -23,8 +24,8 @@ ) -class ManageFabricOrchestrator(NDBaseOrchestrator): - model_class: Type[NDBaseModel] = FabricModel +class ManageIbgpFabricOrchestrator(NDBaseOrchestrator): + model_class: Type[NDBaseModel] = FabricIbgpModel create_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPost update_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPut @@ -34,11 +35,35 @@ class ManageFabricOrchestrator(NDBaseOrchestrator): def query_all(self) -> ResponseType: """ - Custom query_all action to extract 'fabrics' from response. + Custom query_all action to extract 'fabrics' from response, + filtered to only vxlanIbgp fabric types. """ try: api_endpoint = self.query_all_endpoint() result = self.sender.query_obj(api_endpoint.path) - return result.get("fabrics", []) or [] + fabrics = result.get("fabrics", []) or [] + return [f for f in fabrics if f.get("management", {}).get("type") == "vxlanIbgp"] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e + +class ManageEbgpFabricOrchestrator(NDBaseOrchestrator): + model_class: Type[NDBaseModel] = FabricEbgpModel + + create_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPost + update_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPut + delete_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsDelete + query_one_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsGet + query_all_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsListGet + + def query_all(self) -> ResponseType: + """ + Custom query_all action to extract 'fabrics' from response, + filtered to only vxlanEbgp fabric types. + """ + try: + api_endpoint = self.query_all_endpoint() + result = self.sender.query_obj(api_endpoint.path) + fabrics = result.get("fabrics", []) or [] + return [f for f in fabrics if f.get("management", {}).get("type") == "vxlanEbgp"] except Exception as e: raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/modules/nd_manage_fabric_ebgp.py b/plugins/modules/nd_manage_fabric_ebgp.py new file mode 100644 index 00000000..adc3e122 --- /dev/null +++ b/plugins/modules/nd_manage_fabric_ebgp.py @@ -0,0 +1,1176 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"} + +DOCUMENTATION = r""" +--- +module: nd_manage_fabric_ebgp +version_added: "1.4.0" +short_description: Manage eBGP VXLAN fabrics on Cisco Nexus Dashboard +description: +- Manage eBGP VXLAN fabrics on Cisco Nexus Dashboard (ND). +- It supports creating, updating, replacing, and deleting eBGP VXLAN fabrics. +author: +- Mike Wiebe (@mwiebe) +options: + config: + description: + - The list of eBGP VXLAN fabrics to configure. + type: list + elements: dict + suboptions: + name: + description: + - The name of the fabric. + - Only letters, numbers, underscores, and hyphens are allowed. + - The O(config.name) must be defined when creating, updating or deleting a fabric. + type: str + required: true + category: + description: + - The resource category. + type: str + default: fabric + location: + description: + - The geographic location of the fabric. + type: dict + suboptions: + latitude: + description: + - Latitude coordinate of the fabric location (-90 to 90). + type: float + required: true + longitude: + description: + - Longitude coordinate of the fabric location (-180 to 180). + type: float + required: true + license_tier: + description: + - The license tier for the fabric. + type: str + default: premier + choices: [ essentials, premier ] + alert_suspend: + description: + - The alert suspension state for the fabric. + type: str + default: disabled + choices: [ enabled, disabled ] + telemetry_collection: + description: + - Enable telemetry collection for the fabric. + type: bool + default: false + telemetry_collection_type: + description: + - The telemetry collection type. + type: str + default: outOfBand + telemetry_streaming_protocol: + description: + - The telemetry streaming protocol. + type: str + default: ipv4 + telemetry_source_interface: + description: + - The telemetry source interface. + type: str + default: "" + telemetry_source_vrf: + description: + - The telemetry source VRF. + type: str + default: "" + security_domain: + description: + - The security domain associated with the fabric. + type: str + default: all + management: + description: + - The eBGP VXLAN management configuration for the fabric. + type: dict + suboptions: + type: + description: + - The fabric management type. Must be C(vxlanEbgp) for eBGP VXLAN fabrics. + type: str + default: vxlanEbgp + choices: [ vxlanEbgp ] + bgp_asn: + description: + - The BGP Autonomous System Number for the fabric. + - Must be a numeric value between 1 and 4294967295, or dotted notation (1-65535.0-65535). + - Optional when O(config.management.bgp_asn_auto_allocation) is C(true). + type: str + bgp_asn_auto_allocation: + description: + - Enable automatic BGP ASN allocation from the O(config.management.bgp_asn_range) pool. + type: bool + default: true + bgp_asn_range: + description: + - The BGP ASN range to use for automatic ASN allocation (e.g. C(65000-65535)). + - Required when O(config.management.bgp_asn_auto_allocation) is C(true). + type: str + bgp_as_mode: + description: + - The BGP AS mode for the fabric. + - C(multiAS) assigns a unique AS number to each leaf tier. + - C(sameTierAS) assigns the same AS number within a tier. + type: str + default: multiAS + choices: [ multiAS, sameTierAS ] + bgp_allow_as_in_num: + description: + - The number of times BGP allows an AS-path containing the local AS number. + type: int + default: 1 + bgp_max_path: + description: + - The maximum number of BGP equal-cost paths. + type: int + default: 4 + bgp_underlay_failure_protect: + description: + - Enable BGP underlay failure protection. + type: bool + default: false + auto_configure_ebgp_evpn_peering: + description: + - Automatically configure eBGP EVPN peering between spine and leaf switches. + type: bool + default: true + allow_leaf_same_as: + description: + - Allow leaf switches to share the same BGP AS number. + type: bool + default: false + assign_ipv4_to_loopback0: + description: + - Assign an IPv4 address to the loopback0 interface. + type: bool + default: true + evpn: + description: + - Enable the EVPN control plane. + type: bool + default: true + route_map_tag: + description: + - The route map tag used for redistribution. + type: int + default: 12345 + disable_route_map_tag: + description: + - Disable route map tag usage. + type: bool + default: false + leaf_bgp_as: + description: + - The BGP AS number for leaf switches (used with C(sameTierAS) mode). + type: str + border_bgp_as: + description: + - The BGP AS number for border switches. + type: str + super_spine_bgp_as: + description: + - The BGP AS number for super-spine switches. + type: str + site_id: + description: + - The site identifier for the fabric. + - Defaults to the value of O(config.management.bgp_asn) if not provided. + type: str + default: "" + target_subnet_mask: + description: + - The target subnet mask for intra-fabric links (24-31). + type: int + default: 30 + anycast_gateway_mac: + description: + - The anycast gateway MAC address in xxxx.xxxx.xxxx format. + type: str + default: 2020.0000.00aa + replication_mode: + description: + - The multicast replication mode. + type: str + default: multicast + choices: [ multicast, ingress ] + multicast_group_subnet: + description: + - The multicast group subnet. + type: str + default: "239.1.1.0/25" + auto_generate_multicast_group_address: + description: + - Automatically generate multicast group addresses. + type: bool + default: false + underlay_multicast_group_address_limit: + description: + - The underlay multicast group address limit (1-255). + type: int + default: 128 + tenant_routed_multicast: + description: + - Enable tenant routed multicast. + type: bool + default: false + tenant_routed_multicast_ipv6: + description: + - Enable tenant routed multicast for IPv6. + type: bool + default: false + first_hop_redundancy_protocol: + description: + - The first-hop redundancy protocol for tenant networks. + type: str + default: hsrp + choices: [ hsrp, vrrp ] + rendezvous_point_count: + description: + - The number of rendezvous points (1-4). + type: int + default: 2 + rendezvous_point_loopback_id: + description: + - The rendezvous point loopback interface ID (0-1023). + type: int + default: 254 + overlay_mode: + description: + - The overlay configuration mode. + type: str + default: cli + choices: [ cli, config-profile ] + bgp_loopback_id: + description: + - The BGP loopback interface ID (0-1023). + type: int + default: 0 + nve_loopback_id: + description: + - The NVE loopback interface ID (0-1023). + type: int + default: 1 + anycast_loopback_id: + description: + - The anycast loopback interface ID. + type: int + default: 10 + bgp_loopback_ip_range: + description: + - The BGP loopback IP address pool. + type: str + default: "10.2.0.0/22" + bgp_loopback_ipv6_range: + description: + - The BGP loopback IPv6 address pool. + type: str + default: "fd00::a02:0/119" + nve_loopback_ip_range: + description: + - The NVE loopback IP address pool. + type: str + default: "10.3.0.0/22" + nve_loopback_ipv6_range: + description: + - The NVE loopback IPv6 address pool. + type: str + default: "fd00::a03:0/118" + anycast_rendezvous_point_ip_range: + description: + - The anycast rendezvous point IP address pool. + type: str + default: "10.254.254.0/24" + ipv6_anycast_rendezvous_point_ip_range: + description: + - The IPv6 anycast rendezvous point IP address pool. + type: str + default: "fd00::254:254:0/118" + intra_fabric_subnet_range: + description: + - The intra-fabric subnet IP address pool. + type: str + default: "10.4.0.0/16" + l2_vni_range: + description: + - The Layer 2 VNI range. + type: str + default: "30000-49000" + l3_vni_range: + description: + - The Layer 3 VNI range. + type: str + default: "50000-59000" + network_vlan_range: + description: + - The network VLAN range. + type: str + default: "2300-2999" + vrf_vlan_range: + description: + - The VRF VLAN range. + type: str + default: "2000-2299" + sub_interface_dot1q_range: + description: + - The sub-interface 802.1q range. + type: str + default: "2-511" + l3_vni_no_vlan_default_option: + description: + - Enable L3 VNI no-VLAN default option. + type: bool + default: false + fabric_mtu: + description: + - The fabric MTU size (1500-9216). + type: int + default: 9216 + l2_host_interface_mtu: + description: + - The L2 host interface MTU size (1500-9216). + type: int + default: 9216 + underlay_ipv6: + description: + - Enable IPv6 underlay. + type: bool + default: false + static_underlay_ip_allocation: + description: + - Disable dynamic underlay IP address allocation. + type: bool + default: false + vpc_domain_id_range: + description: + - The vPC domain ID range. + type: str + default: "1-1000" + vpc_peer_link_vlan: + description: + - The vPC peer link VLAN ID. + type: str + default: "3600" + vpc_peer_link_enable_native_vlan: + description: + - Enable native VLAN on the vPC peer link. + type: bool + default: false + vpc_peer_keep_alive_option: + description: + - The vPC peer keep-alive option. + type: str + default: management + choices: [ loopback, management ] + vpc_auto_recovery_timer: + description: + - The vPC auto recovery timer in seconds (240-3600). + type: int + default: 360 + vpc_delay_restore_timer: + description: + - The vPC delay restore timer in seconds (1-3600). + type: int + default: 150 + vpc_peer_link_port_channel_id: + description: + - The vPC peer link port-channel ID. + type: str + default: "500" + vpc_ipv6_neighbor_discovery_sync: + description: + - Enable vPC IPv6 neighbor discovery synchronization. + type: bool + default: true + vpc_layer3_peer_router: + description: + - Enable vPC layer-3 peer router. + type: bool + default: true + vpc_tor_delay_restore_timer: + description: + - The vPC TOR delay restore timer. + type: int + default: 30 + fabric_vpc_domain_id: + description: + - Enable fabric vPC domain ID. + type: bool + default: false + shared_vpc_domain_id: + description: + - The shared vPC domain ID. + type: int + default: 1 + fabric_vpc_qos: + description: + - Enable fabric vPC QoS. + type: bool + default: false + fabric_vpc_qos_policy_name: + description: + - The fabric vPC QoS policy name. + type: str + default: spine_qos_for_fabric_vpc_peering + enable_peer_switch: + description: + - Enable peer switch. + type: bool + default: false + per_vrf_loopback_auto_provision: + description: + - Enable per-VRF loopback auto-provisioning. + type: bool + default: false + per_vrf_loopback_ip_range: + description: + - The per-VRF loopback IP address pool. + type: str + default: "10.5.0.0/22" + per_vrf_loopback_auto_provision_ipv6: + description: + - Enable per-VRF loopback auto-provisioning for IPv6. + type: bool + default: false + per_vrf_loopback_ipv6_range: + description: + - The per-VRF loopback IPv6 address pool. + type: str + default: "fd00::a05:0/112" + vrf_template: + description: + - The VRF template name. + type: str + default: Default_VRF_Universal + network_template: + description: + - The network template name. + type: str + default: Default_Network_Universal + vrf_extension_template: + description: + - The VRF extension template name. + type: str + default: Default_VRF_Extension_Universal + network_extension_template: + description: + - The network extension template name. + type: str + default: Default_Network_Extension_Universal + performance_monitoring: + description: + - Enable performance monitoring. + type: bool + default: false + tenant_dhcp: + description: + - Enable tenant DHCP. + type: bool + default: true + advertise_physical_ip: + description: + - Advertise physical IP address for NVE loopback. + type: bool + default: false + advertise_physical_ip_on_border: + description: + - Advertise physical IP address on border switches. + type: bool + default: true + anycast_border_gateway_advertise_physical_ip: + description: + - Enable anycast border gateway to advertise physical IP. + type: bool + default: false + snmp_trap: + description: + - Enable SNMP traps. + type: bool + default: true + cdp: + description: + - Enable CDP. + type: bool + default: false + tcam_allocation: + description: + - Enable TCAM allocation. + type: bool + default: true + real_time_interface_statistics_collection: + description: + - Enable real-time interface statistics collection. + type: bool + default: false + interface_statistics_load_interval: + description: + - The interface statistics load interval in seconds. + type: int + default: 10 + greenfield_debug_flag: + description: + - The greenfield debug flag. + type: str + default: disable + choices: [ enable, disable ] + nxapi: + description: + - Enable NX-API (HTTPS). + type: bool + default: false + nxapi_https_port: + description: + - The NX-API HTTPS port (1-65535). + type: int + default: 443 + nxapi_http: + description: + - Enable NX-API HTTP. + type: bool + default: false + nxapi_http_port: + description: + - The NX-API HTTP port (1-65535). + type: int + default: 80 + bgp_authentication: + description: + - Enable BGP authentication. + type: bool + default: false + bgp_authentication_key_type: + description: + - The BGP authentication key type. + type: str + default: 3des + bgp_authentication_key: + description: + - The BGP authentication key. + type: str + default: "" + bfd: + description: + - Enable BFD globally. + type: bool + default: false + bfd_ibgp: + description: + - Enable BFD for iBGP sessions. + type: bool + default: false + bfd_authentication: + description: + - Enable BFD authentication. + type: bool + default: false + bfd_authentication_key_id: + description: + - The BFD authentication key ID. + type: int + default: 100 + bfd_authentication_key: + description: + - The BFD authentication key. + type: str + default: "" + pim_hello_authentication: + description: + - Enable PIM hello authentication. + type: bool + default: false + pim_hello_authentication_key: + description: + - The PIM hello authentication key. + type: str + default: "" + macsec: + description: + - Enable MACsec on intra-fabric links. + type: bool + default: false + macsec_cipher_suite: + description: + - The MACsec cipher suite. + type: str + default: GCM-AES-XPN-256 + macsec_key_string: + description: + - The MACsec primary key string. + type: str + default: "" + macsec_algorithm: + description: + - The MACsec algorithm. + type: str + default: AES_128_CMAC + macsec_fallback_key_string: + description: + - The MACsec fallback key string. + type: str + default: "" + macsec_fallback_algorithm: + description: + - The MACsec fallback algorithm. + type: str + default: AES_128_CMAC + macsec_report_timer: + description: + - The MACsec report timer in minutes. + type: int + default: 5 + vrf_lite_auto_config: + description: + - The VRF lite auto-configuration mode. + type: str + default: manual + vrf_lite_subnet_range: + description: + - The VRF lite subnet IP address pool. + type: str + default: "10.33.0.0/16" + vrf_lite_subnet_target_mask: + description: + - The VRF lite subnet target mask. + type: int + default: 30 + auto_unique_vrf_lite_ip_prefix: + description: + - Enable auto unique VRF lite IP prefix. + type: bool + default: false + default_queuing_policy: + description: + - Enable default queuing policy. + type: bool + default: false + aiml_qos: + description: + - Enable AI/ML QoS. + type: bool + default: false + aiml_qos_policy: + description: + - The AI/ML QoS policy. + type: str + default: 400G + dlb: + description: + - Enable dynamic load balancing. + type: bool + default: false + dlb_mode: + description: + - The DLB mode. + type: str + default: flowlet + ptp: + description: + - Enable Precision Time Protocol (PTP). + type: bool + default: false + ptp_loopback_id: + description: + - The PTP loopback ID. + type: int + default: 0 + ptp_domain_id: + description: + - The PTP domain ID. + type: int + default: 0 + private_vlan: + description: + - Enable private VLAN support. + type: bool + default: false + day0_bootstrap: + description: + - Enable day-0 bootstrap (POAP). + type: bool + default: false + local_dhcp_server: + description: + - Enable local DHCP server for bootstrap. + type: bool + default: false + dhcp_protocol_version: + description: + - The DHCP protocol version for bootstrap. + type: str + default: dhcpv4 + dhcp_start_address: + description: + - The DHCP start address for bootstrap. + type: str + default: "" + dhcp_end_address: + description: + - The DHCP end address for bootstrap. + type: str + default: "" + management_gateway: + description: + - The management gateway for bootstrap. + type: str + default: "" + management_ipv4_prefix: + description: + - The management IPv4 prefix length for bootstrap. + type: int + default: 24 + management_ipv6_prefix: + description: + - The management IPv6 prefix length for bootstrap. + type: int + default: 64 + real_time_backup: + description: + - Enable real-time backup. + type: bool + scheduled_backup: + description: + - Enable scheduled backup. + type: bool + scheduled_backup_time: + description: + - The scheduled backup time. + type: str + default: "" + nve_hold_down_timer: + description: + - The NVE hold-down timer in seconds. + type: int + default: 180 + next_generation_oam: + description: + - Enable next-generation OAM. + type: bool + default: true + strict_config_compliance_mode: + description: + - Enable strict configuration compliance mode. + type: bool + default: false + copp_policy: + description: + - The CoPP policy. + type: str + default: strict + power_redundancy_mode: + description: + - The power redundancy mode. + type: str + default: redundant + heartbeat_interval: + description: + - The heartbeat interval. + type: int + default: 190 + allow_smart_switch_onboarding: + description: + - Allow smart switch onboarding. + type: bool + default: false + aaa: + description: + - Enable AAA. + type: bool + default: false + extra_config_leaf: + description: + - Extra freeform configuration applied to leaf switches. + type: str + default: "" + extra_config_spine: + description: + - Extra freeform configuration applied to spine switches. + type: str + default: "" + extra_config_tor: + description: + - Extra freeform configuration applied to TOR switches. + type: str + default: "" + extra_config_intra_fabric_links: + description: + - Extra freeform configuration applied to intra-fabric links. + type: str + default: "" + extra_config_aaa: + description: + - Extra freeform AAA configuration. + type: str + default: "" + banner: + description: + - The fabric banner text displayed on switch login. + type: str + default: "" + ntp_server_collection: + description: + - The list of NTP server IP addresses. + type: list + elements: str + dns_collection: + description: + - The list of DNS server IP addresses. + type: list + elements: str + syslog_server_collection: + description: + - The list of syslog server IP addresses. + type: list + elements: str + syslog_server_vrf_collection: + description: + - The list of VRFs for syslog servers. + type: list + elements: str + syslog_severity_collection: + description: + - The list of syslog severity levels (0-7). + type: list + elements: int + state: + description: + - The desired state of the fabric resources on the Cisco Nexus Dashboard. + - Use O(state=merged) to create new fabrics and update existing ones as defined in the configuration. + Resources on ND that are not specified in the configuration will be left unchanged. + - Use O(state=replaced) to replace the fabric configuration specified in the configuration. + Any settings not explicitly provided will revert to their defaults. + - Use O(state=overridden) to enforce the configuration as the single source of truth. + Any fabric existing on ND but not present in the configuration will be deleted. Use with extra caution. + - Use O(state=deleted) to remove the fabrics specified in the configuration from the Cisco Nexus Dashboard. + type: str + default: merged + choices: [ merged, replaced, overridden, deleted ] +extends_documentation_fragment: +- cisco.nd.modules +- cisco.nd.check_mode +notes: +- This module is only supported on Nexus Dashboard having version 4.1.0 or higher. +- Only eBGP VXLAN fabric type (C(vxlanEbgp)) is supported by this module. +- When using O(state=replaced) with only required fields, all optional management settings revert to their defaults. +- The O(config.management.bgp_asn) field is optional when O(config.management.bgp_asn_auto_allocation) is C(true). +- The O(config.management.bgp_asn) field is required when O(config.management.bgp_asn_auto_allocation) is C(false). +- O(config.management.site_id) defaults to the value of O(config.management.bgp_asn) if not provided. +- The default O(config.management.vpc_peer_keep_alive_option) for eBGP fabrics is C(management), unlike iBGP fabrics. +""" + +EXAMPLES = r""" +- name: Create an eBGP VXLAN fabric using state merged (with auto ASN allocation) + cisco.nd.nd_manage_fabric_ebgp: + state: merged + config: + - name: my_ebgp_fabric + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn_auto_allocation: true + bgp_asn_range: "65000-65535" + bgp_as_mode: multiAS + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00aa" + performance_monitoring: false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: management + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: false + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: disable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.2.0.0/22" + nve_loopback_ip_range: "10.3.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.254.0/24" + intra_fabric_subnet_range: "10.4.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.33.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.5.0.0/22" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + register: result + +- name: Create an eBGP VXLAN fabric with a static BGP ASN + cisco.nd.nd_manage_fabric_ebgp: + state: merged + config: + - name: my_ebgp_fabric_static + category: fabric + management: + type: vxlanEbgp + bgp_asn: "65001" + bgp_asn_auto_allocation: false + site_id: "65001" + bgp_as_mode: multiAS + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00aa" + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + bgp_loopback_ip_range: "10.2.0.0/22" + nve_loopback_ip_range: "10.3.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.254.0/24" + intra_fabric_subnet_range: "10.4.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + register: result + +- name: Update specific fields on an existing eBGP fabric using state merged (partial update) + cisco.nd.nd_manage_fabric_ebgp: + state: merged + config: + - name: my_ebgp_fabric + category: fabric + management: + bgp_asn_range: "65100-65199" + anycast_gateway_mac: "2020.0000.00bb" + performance_monitoring: true + register: result + +- name: Create or fully replace an eBGP VXLAN fabric using state replaced + cisco.nd.nd_manage_fabric_ebgp: + state: replaced + config: + - name: my_ebgp_fabric + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65004" + bgp_asn_auto_allocation: false + site_id: "65004" + bgp_as_mode: multiAS + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00dd" + performance_monitoring: true + replication_mode: multicast + multicast_group_subnet: "239.1.3.0/25" + rendezvous_point_count: 3 + rendezvous_point_loopback_id: 253 + vpc_peer_link_vlan: "3700" + vpc_peer_keep_alive_option: management + vpc_auto_recovery_timer: 300 + vpc_delay_restore_timer: 120 + vpc_peer_link_port_channel_id: "600" + advertise_physical_ip: true + vpc_domain_id_range: "1-800" + fabric_mtu: 9000 + l2_host_interface_mtu: 9000 + tenant_dhcp: false + snmp_trap: false + anycast_border_gateway_advertise_physical_ip: true + greenfield_debug_flag: disable + tcam_allocation: false + real_time_interface_statistics_collection: true + interface_statistics_load_interval: 30 + bgp_loopback_ip_range: "10.22.0.0/22" + nve_loopback_ip_range: "10.23.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.252.0/24" + intra_fabric_subnet_range: "10.24.0.0/16" + l2_vni_range: "40000-59000" + l3_vni_range: "60000-69000" + network_vlan_range: "2400-3099" + vrf_vlan_range: "2100-2399" + banner: "^ Managed by Ansible ^" + register: result + +- name: Replace fabric with only required fields (all optional settings revert to defaults) + cisco.nd.nd_manage_fabric_ebgp: + state: replaced + config: + - name: my_ebgp_fabric + category: fabric + management: + type: vxlanEbgp + bgp_asn: "65004" + bgp_asn_auto_allocation: false + site_id: "65004" + banner: "^ Managed by Ansible ^" + register: result + +- name: Enforce exact fabric inventory using state overridden (deletes unlisted fabrics) + cisco.nd.nd_manage_fabric_ebgp: + state: overridden + config: + - name: fabric_east + category: fabric + location: + latitude: 40.7128 + longitude: -74.0060 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65010" + bgp_asn_auto_allocation: false + site_id: "65010" + bgp_as_mode: multiAS + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.0010" + replication_mode: multicast + multicast_group_subnet: "239.1.10.0/25" + bgp_loopback_ip_range: "10.10.0.0/22" + nve_loopback_ip_range: "10.11.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.10.0/24" + intra_fabric_subnet_range: "10.12.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + - name: fabric_west + category: fabric + location: + latitude: 34.0522 + longitude: -118.2437 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65020" + bgp_asn_auto_allocation: false + site_id: "65020" + bgp_as_mode: multiAS + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.0020" + replication_mode: multicast + multicast_group_subnet: "239.1.20.0/25" + bgp_loopback_ip_range: "10.20.0.0/22" + nve_loopback_ip_range: "10.21.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.20.0/24" + intra_fabric_subnet_range: "10.22.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + register: result + +- name: Delete a specific eBGP fabric using state deleted + cisco.nd.nd_manage_fabric_ebgp: + state: deleted + config: + - name: my_ebgp_fabric + register: result + +- name: Delete multiple eBGP fabrics in a single task + cisco.nd.nd_manage_fabric_ebgp: + state: deleted + config: + - name: fabric_east + - name: fabric_west + - name: fabric_old + register: result +""" + +RETURN = r""" +""" + +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec +from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ebgp import FabricEbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric import ManageEbgpFabricOrchestrator + + +def main(): + argument_spec = nd_argument_spec() + argument_spec.update(FabricEbgpModel.get_argument_spec()) + + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + ) + + try: + # Initialize StateMachine + nd_state_machine = NDStateMachine( + module=module, + model_orchestrator=ManageEbgpFabricOrchestrator, + ) + + # Manage state + nd_state_machine.manage_state() + + module.exit_json(**nd_state_machine.output.format()) + + except Exception as e: + module.fail_json(msg=f"Module execution failed: {str(e)}") + +if __name__ == "__main__": + main() diff --git a/plugins/modules/nd_manage_fabric_ibgp.py b/plugins/modules/nd_manage_fabric_ibgp.py index 15d8460a..d0786b28 100644 --- a/plugins/modules/nd_manage_fabric_ibgp.py +++ b/plugins/modules/nd_manage_fabric_ibgp.py @@ -1358,13 +1358,13 @@ from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricModel -from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric import ManageFabricOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricIbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric import ManageIbgpFabricOrchestrator def main(): argument_spec = nd_argument_spec() - argument_spec.update(FabricModel.get_argument_spec()) + argument_spec.update(FabricIbgpModel.get_argument_spec()) module = AnsibleModule( argument_spec=argument_spec, @@ -1375,7 +1375,7 @@ def main(): # Initialize StateMachine nd_state_machine = NDStateMachine( module=module, - model_orchestrator=ManageFabricOrchestrator, + model_orchestrator=ManageIbgpFabricOrchestrator, ) # Manage state diff --git a/tests/integration/targets/nd_manage_fabric/tasks/fabric_ebgp.yaml b/tests/integration/targets/nd_manage_fabric/tasks/fabric_ebgp.yaml new file mode 100644 index 00000000..f8cf517e --- /dev/null +++ b/tests/integration/targets/nd_manage_fabric/tasks/fabric_ebgp.yaml @@ -0,0 +1,1209 @@ +--- +# Test code for the ND modules +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +- name: Test that we have a Nexus Dashboard host, username and password + ansible.builtin.fail: + msg: 'Please define the following variables: ansible_host, ansible_user and ansible_password.' + when: ansible_host is not defined or ansible_user is not defined or ansible_password is not defined + +############################################################################# +# CLEANUP - Ensure clean state before tests +############################################################################# +- name: Clean up any existing test fabrics before starting tests + cisco.nd.nd_manage_fabric_ebgp: + state: deleted + config: + - name: "{{ ebgp_test_fabric_merged }}" + - name: "{{ ebgp_test_fabric_replaced }}" + - name: "{{ ebgp_test_fabric_deleted }}" + tags: always + +############################################################################# +# TEST 1: STATE MERGED - Create fabric using merged state +############################################################################# +- name: "TEST 1a: Create eBGP fabric using state merged (first run)" + cisco.nd.nd_manage_fabric_ebgp: + state: merged + config: + - "{{ {'name': ebgp_test_fabric_merged} | combine(common_ebgp_fabric_config) }}" + register: ebgp_merged_result_1 + tags: [test_merged, test_merged_create] + +- name: "TEST 1a: Verify eBGP fabric was created using merged state" + assert: + that: + - ebgp_merged_result_1 is changed + - ebgp_merged_result_1 is not failed + fail_msg: "eBGP fabric creation with state merged failed" + success_msg: "eBGP fabric successfully created with state merged" + tags: [test_merged, test_merged_create] + +- name: "TEST 1b: Create eBGP fabric using state merged (second run - idempotency test)" + cisco.nd.nd_manage_fabric_ebgp: + state: merged + config: + - "{{ {'name': ebgp_test_fabric_merged} | combine(common_ebgp_fabric_config) }}" + register: ebgp_merged_result_2 + tags: [test_merged, test_merged_idempotent] + +- name: "TEST 1b: Verify merged state is idempotent" + assert: + that: + - ebgp_merged_result_2 is not changed + - ebgp_merged_result_2 is not failed + fail_msg: "Merged state is not idempotent - should not change when run twice with same config" + success_msg: "Merged state is idempotent - no changes on second run" + tags: [test_merged, test_merged_idempotent] + +- name: "TEST 1c: Update eBGP fabric using state merged (modify existing)" + cisco.nd.nd_manage_fabric_ebgp: + state: merged + config: + - name: "{{ ebgp_test_fabric_merged }}" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65002" # Changed from 65001 + bgp_asn_auto_allocation: false + site_id: "65002" # Changed from 65001 + bgp_as_mode: multiAS + bgp_allow_as_in_num: 1 + bgp_max_path: 4 + auto_configure_ebgp_evpn_peering: true + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00bb" # Changed from 00aa + performance_monitoring: true # Changed from false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: management + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: false + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: disable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.2.0.0/22" + nve_loopback_ip_range: "10.3.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.254.0/24" + intra_fabric_subnet_range: "10.4.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.33.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.5.0.0/22" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + register: ebgp_merged_result_3 + tags: [test_merged, test_merged_update] + +- name: "TEST 1c: Verify eBGP fabric was updated using merged state" + assert: + that: + - ebgp_merged_result_3 is changed + - ebgp_merged_result_3 is not failed + fail_msg: "eBGP fabric update with state merged failed" + success_msg: "eBGP fabric successfully updated with state merged" + tags: [test_merged, test_merged_update] + +############################################################################# +# VALIDATION: Query ebgp_test_fabric_merged and validate expected changes +############################################################################# +- name: "VALIDATION 1: Authenticate with ND to get token" + ansible.builtin.uri: + url: "https://{{ ansible_host }}:{{ ansible_httpapi_port | default(443) }}/login" + method: POST + headers: + Content-Type: "application/json" + body_format: json + body: + domain: "{{ ansible_httpapi_login_domain | default('local') }}" + userName: "{{ ansible_user }}" + userPasswd: "{{ ansible_password }}" + validate_certs: false + return_content: true + status_code: + - 200 + register: nd_auth_response + tags: [test_merged, test_merged_validation] + delegate_to: localhost + +- name: "VALIDATION 1: Query ebgp_test_fabric_merged configuration from ND" + ansible.builtin.uri: + url: "https://{{ ansible_host }}:{{ ansible_httpapi_port | default(443) }}/api/v1/manage/fabrics/{{ ebgp_test_fabric_merged }}" + method: GET + headers: + Authorization: "Bearer {{ nd_auth_response.json.jwttoken }}" + Content-Type: "application/json" + validate_certs: false + return_content: true + status_code: + - 200 + - 404 + register: ebgp_merged_fabric_query + tags: [test_merged, test_merged_validation] + delegate_to: localhost + +- name: "VALIDATION 1: Parse eBGP fabric configuration response" + set_fact: + ebgp_merged_fabric_config: "{{ ebgp_merged_fabric_query.json }}" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify BGP ASN was updated to 65002" + assert: + that: + - ebgp_merged_fabric_config.management.bgpAsn == "65002" + fail_msg: "BGP ASN validation failed. Expected: 65002, Actual: {{ ebgp_merged_fabric_config.management.bgpAsn }}" + success_msg: "✓ BGP ASN correctly updated to 65002" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify Site ID was updated to 65002" + assert: + that: + - ebgp_merged_fabric_config.management.siteId == "65002" + fail_msg: "Site ID validation failed. Expected: 65002, Actual: {{ ebgp_merged_fabric_config.management.siteId }}" + success_msg: "✓ Site ID correctly updated to 65002" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify Anycast Gateway MAC was updated to 2020.0000.00bb" + assert: + that: + - ebgp_merged_fabric_config.management.anycastGatewayMac == "2020.0000.00bb" + fail_msg: "Anycast Gateway MAC validation failed. Expected: 2020.0000.00bb, Actual: {{ ebgp_merged_fabric_config.management.anycastGatewayMac }}" + success_msg: "✓ Anycast Gateway MAC correctly updated to 2020.0000.00bb" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify Performance Monitoring was enabled" + assert: + that: + - ebgp_merged_fabric_config.management.performanceMonitoring == true + fail_msg: "Performance Monitoring validation failed. Expected: true, Actual: {{ ebgp_merged_fabric_config.management.performanceMonitoring }}" + success_msg: "✓ Performance Monitoring correctly enabled" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Verify BGP AS Mode is multiAS" + assert: + that: + - ebgp_merged_fabric_config.management.bgpAsMode == "multiAS" + fail_msg: "BGP AS Mode validation failed. Expected: multiAS, Actual: {{ ebgp_merged_fabric_config.management.bgpAsMode }}" + success_msg: "✓ BGP AS Mode correctly set to multiAS" + tags: [test_merged, test_merged_validation] + +- name: "VALIDATION 1: Display successful validation summary for ebgp_test_fabric_merged" + debug: + msg: | + ======================================== + VALIDATION SUMMARY for ebgp_test_fabric_merged: + ======================================== + ✓ BGP ASN: {{ ebgp_merged_fabric_config.management.bgpAsn }} + ✓ Site ID: {{ ebgp_merged_fabric_config.management.siteId }} + ✓ Anycast Gateway MAC: {{ ebgp_merged_fabric_config.management.anycastGatewayMac }} + ✓ Performance Monitoring: {{ ebgp_merged_fabric_config.management.performanceMonitoring }} + ✓ BGP AS Mode: {{ ebgp_merged_fabric_config.management.bgpAsMode }} + + All 5 expected changes validated successfully! + ======================================== + tags: [test_merged, test_merged_validation] + +############################################################################# +# TEST 2: STATE REPLACED - Create and manage fabric using replaced state +############################################################################# +- name: "TEST 2a: Create eBGP fabric using state replaced (first run)" + cisco.nd.nd_manage_fabric_ebgp: + state: replaced + config: + - name: "{{ ebgp_test_fabric_replaced }}" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65004" # Different from default ASN + bgp_asn_auto_allocation: true + bgp_asn_range: "65000-65100" + site_id: "65004" # Different from default site_id + bgp_as_mode: multiAS # Different from default multiAS + bgp_allow_as_in_num: 2 # Different from default 1 + bgp_max_path: 8 # Different from default 4 + auto_configure_ebgp_evpn_peering: true + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00dd" # Different from default MAC + performance_monitoring: true # Different from default false + replication_mode: multicast + multicast_group_subnet: "239.1.3.0/25" # Different from default subnet + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 3 # Different from default 2 + rendezvous_point_loopback_id: 253 # Different from default 254 + vpc_peer_link_vlan: "3700" # Different from default 3600 + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: management + vpc_auto_recovery_timer: 300 # Different from default 360 + vpc_delay_restore_timer: 120 # Different from default 150 + vpc_peer_link_port_channel_id: "600" # Different from default 500 + vpc_ipv6_neighbor_discovery_sync: false # Different from default true + advertise_physical_ip: true # Different from default false + vpc_domain_id_range: "1-800" # Different from default 1-1000 + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9000 # Different from default 9216 + l2_host_interface_mtu: 9000 # Different from default 9216 + tenant_dhcp: false # Different from default true + nxapi: false + nxapi_https_port: 443 + nxapi_http: true # Different from default false + nxapi_http_port: 80 + snmp_trap: false # Different from default true + anycast_border_gateway_advertise_physical_ip: true # Different from default false + greenfield_debug_flag: enable # Different from default disable + tcam_allocation: false # Different from default true + real_time_interface_statistics_collection: true # Different from default false + interface_statistics_load_interval: 30 # Different from default 10 + bgp_loopback_ip_range: "10.22.0.0/22" # Different from default range + nve_loopback_ip_range: "10.23.0.0/22" # Different from default range + anycast_rendezvous_point_ip_range: "10.254.252.0/24" # Different from default range + intra_fabric_subnet_range: "10.24.0.0/16" # Different from default range + l2_vni_range: "40000-59000" # Different from default range + l3_vni_range: "60000-69000" # Different from default range + network_vlan_range: "2400-3099" # Different from default range + vrf_vlan_range: "2100-2399" # Different from default range + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.53.0.0/16" # Different from default range + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.25.0.0/22" # Different from default range + per_vrf_loopback_auto_provision_ipv6: true + per_vrf_loopback_ipv6_range: "fd00::a25:0/112" # Different from default range + banner: "^ Updated via replaced state ^" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + management_ipv6_prefix: 64 + register: ebgp_replaced_result_1 + tags: [test_replaced, test_replaced_create] + +- name: "TEST 2a: Verify eBGP fabric was created using replaced state" + assert: + that: + - ebgp_replaced_result_1 is changed + - ebgp_replaced_result_1 is not failed + fail_msg: "eBGP fabric creation with state replaced failed" + success_msg: "eBGP fabric successfully created with state replaced" + tags: [test_replaced, test_replaced_create] + +- name: "TEST 2b: Create eBGP fabric using state replaced (second run - idempotency test)" + cisco.nd.nd_manage_fabric_ebgp: + state: replaced + config: + - name: "{{ ebgp_test_fabric_replaced }}" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65004" # Different from default ASN + bgp_asn_auto_allocation: true + bgp_asn_range: "65000-65100" + site_id: "65004" + bgp_as_mode: multiAS # Different from default multiAS + bgp_allow_as_in_num: 2 + bgp_max_path: 8 + auto_configure_ebgp_evpn_peering: true + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00dd" + performance_monitoring: true + replication_mode: multicast + multicast_group_subnet: "239.1.3.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 3 + rendezvous_point_loopback_id: 253 + vpc_peer_link_vlan: "3700" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: management + vpc_auto_recovery_timer: 300 + vpc_delay_restore_timer: 120 + vpc_peer_link_port_channel_id: "600" + vpc_ipv6_neighbor_discovery_sync: false + advertise_physical_ip: true + vpc_domain_id_range: "1-800" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9000 + l2_host_interface_mtu: 9000 + tenant_dhcp: false + nxapi: false + nxapi_https_port: 443 + nxapi_http: true + nxapi_http_port: 80 + snmp_trap: false + anycast_border_gateway_advertise_physical_ip: true + greenfield_debug_flag: enable + tcam_allocation: false + real_time_interface_statistics_collection: true + interface_statistics_load_interval: 30 + bgp_loopback_ip_range: "10.22.0.0/22" + nve_loopback_ip_range: "10.23.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.252.0/24" + intra_fabric_subnet_range: "10.24.0.0/16" + l2_vni_range: "40000-59000" + l3_vni_range: "60000-69000" + network_vlan_range: "2400-3099" + vrf_vlan_range: "2100-2399" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.53.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.25.0.0/22" + per_vrf_loopback_auto_provision_ipv6: true + per_vrf_loopback_ipv6_range: "fd00::a25:0/112" + banner: "^ Updated via replaced state ^" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + management_ipv6_prefix: 64 + register: ebgp_replaced_result_2 + tags: [test_replaced, test_replaced_idempotent] + +- name: "TEST 2b: Verify replaced state is idempotent" + assert: + that: + - ebgp_replaced_result_2 is not changed + - ebgp_replaced_result_2 is not failed + fail_msg: "Replaced state is not idempotent - should not change when run twice with same config" + success_msg: "Replaced state is idempotent - no changes on second run" + tags: [test_replaced, test_replaced_idempotent] + +- name: "TEST 2c: Update eBGP fabric using state replaced (complete replacement with minimal config)" + cisco.nd.nd_manage_fabric_ebgp: + state: replaced + config: + - name: "{{ ebgp_test_fabric_replaced }}" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65004" # Different from default ASN + bgp_asn_auto_allocation: true + bgp_asn_range: "65000-65100" + site_id: "65004" + banner: "^ Updated via replaced state ^" + register: ebgp_replaced_result_3 + tags: [test_replaced, test_replaced_update] + +- name: "TEST 2c: Verify eBGP fabric was completely replaced (defaults restored)" + assert: + that: + - ebgp_replaced_result_3 is changed + - ebgp_replaced_result_3 is not failed + fail_msg: "eBGP fabric replacement with state replaced failed" + success_msg: "eBGP fabric successfully replaced with state replaced" + tags: [test_replaced, test_replaced_update] + +############################################################################# +# VALIDATION: Query ebgp_test_fabric_replaced and validate defaults are restored +############################################################################# +- name: "VALIDATION 2: Authenticate with ND to get token" + ansible.builtin.uri: + url: "https://{{ ansible_host }}:{{ ansible_httpapi_port | default(443) }}/login" + method: POST + headers: + Content-Type: "application/json" + body_format: json + body: + domain: "{{ ansible_httpapi_login_domain | default('local') }}" + userName: "{{ ansible_user }}" + userPasswd: "{{ ansible_password }}" + validate_certs: false + return_content: true + status_code: + - 200 + register: nd_auth_response_2 + tags: [test_replaced, test_replaced_validation] + delegate_to: localhost + +- name: "VALIDATION 2: Query ebgp_test_fabric_replaced configuration from ND" + ansible.builtin.uri: + url: "https://{{ ansible_host }}:{{ ansible_httpapi_port | default(443) }}/api/v1/manage/fabrics/{{ ebgp_test_fabric_replaced }}" + method: GET + headers: + Authorization: "Bearer {{ nd_auth_response_2.json.jwttoken }}" + Content-Type: "application/json" + validate_certs: false + return_content: true + status_code: + - 200 + - 404 + register: ebgp_replaced_fabric_query + tags: [test_replaced, test_replaced_validation] + delegate_to: localhost + +- name: "VALIDATION 2: Parse eBGP fabric configuration response" + set_fact: + ebgp_replaced_fabric_config: "{{ ebgp_replaced_fabric_query.json }}" + tags: [test_replaced, test_replaced_validation] + +# Network Range Validations - verify defaults were restored +- name: "VALIDATION 2: Verify L3 VNI Range was standardized to 50000-59000" + assert: + that: + - ebgp_replaced_fabric_config.management.l3VniRange == "50000-59000" + fail_msg: "L3 VNI Range validation failed. Expected: 50000-59000, Actual: {{ ebgp_replaced_fabric_config.management.l3VniRange }}" + success_msg: "✓ L3 VNI Range correctly standardized to 50000-59000" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify L2 VNI Range was standardized to 30000-49000" + assert: + that: + - ebgp_replaced_fabric_config.management.l2VniRange == "30000-49000" + fail_msg: "L2 VNI Range validation failed. Expected: 30000-49000, Actual: {{ ebgp_replaced_fabric_config.management.l2VniRange }}" + success_msg: "✓ L2 VNI Range correctly standardized to 30000-49000" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify BGP Loopback IP Range was standardized to 10.2.0.0/22" + assert: + that: + - ebgp_replaced_fabric_config.management.bgpLoopbackIpRange == "10.2.0.0/22" + fail_msg: "BGP Loopback IP Range validation failed. Expected: 10.2.0.0/22, Actual: {{ ebgp_replaced_fabric_config.management.bgpLoopbackIpRange }}" + success_msg: "✓ BGP Loopback IP Range correctly standardized to 10.2.0.0/22" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify NVE Loopback IP Range was standardized to 10.3.0.0/22" + assert: + that: + - ebgp_replaced_fabric_config.management.nveLoopbackIpRange == "10.3.0.0/22" + fail_msg: "NVE Loopback IP Range validation failed. Expected: 10.3.0.0/22, Actual: {{ ebgp_replaced_fabric_config.management.nveLoopbackIpRange }}" + success_msg: "✓ NVE Loopback IP Range correctly standardized to 10.3.0.0/22" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Intra-Fabric Subnet Range was standardized to 10.4.0.0/16" + assert: + that: + - ebgp_replaced_fabric_config.management.intraFabricSubnetRange == "10.4.0.0/16" + fail_msg: "Intra-Fabric Subnet Range validation failed. Expected: 10.4.0.0/16, Actual: {{ ebgp_replaced_fabric_config.management.intraFabricSubnetRange }}" + success_msg: "✓ Intra-Fabric Subnet Range correctly standardized to 10.4.0.0/16" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VRF Lite Subnet Range was standardized to 10.33.0.0/16" + assert: + that: + - ebgp_replaced_fabric_config.management.vrfLiteSubnetRange == "10.33.0.0/16" + fail_msg: "VRF Lite Subnet Range validation failed. Expected: 10.33.0.0/16, Actual: {{ ebgp_replaced_fabric_config.management.vrfLiteSubnetRange }}" + success_msg: "✓ VRF Lite Subnet Range correctly standardized to 10.33.0.0/16" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Anycast RP IP Range was standardized to 10.254.254.0/24" + assert: + that: + - ebgp_replaced_fabric_config.management.anycastRendezvousPointIpRange == "10.254.254.0/24" + fail_msg: "Anycast RP IP Range validation failed. Expected: 10.254.254.0/24, Actual: {{ ebgp_replaced_fabric_config.management.anycastRendezvousPointIpRange }}" + success_msg: "✓ Anycast RP IP Range correctly standardized to 10.254.254.0/24" + tags: [test_replaced, test_replaced_validation] + +# VLAN Range Validations +- name: "VALIDATION 2: Verify Network VLAN Range was standardized to 2300-2999" + assert: + that: + - ebgp_replaced_fabric_config.management.networkVlanRange == "2300-2999" + fail_msg: "Network VLAN Range validation failed. Expected: 2300-2999, Actual: {{ ebgp_replaced_fabric_config.management.networkVlanRange }}" + success_msg: "✓ Network VLAN Range correctly standardized to 2300-2999" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VRF VLAN Range was standardized to 2000-2299" + assert: + that: + - ebgp_replaced_fabric_config.management.vrfVlanRange == "2000-2299" + fail_msg: "VRF VLAN Range validation failed. Expected: 2000-2299, Actual: {{ ebgp_replaced_fabric_config.management.vrfVlanRange }}" + success_msg: "✓ VRF VLAN Range correctly standardized to 2000-2299" + tags: [test_replaced, test_replaced_validation] + +# MTU Validations +- name: "VALIDATION 2: Verify Fabric MTU was restored to 9216" + assert: + that: + - ebgp_replaced_fabric_config.management.fabricMtu == 9216 + fail_msg: "Fabric MTU validation failed. Expected: 9216, Actual: {{ ebgp_replaced_fabric_config.management.fabricMtu }}" + success_msg: "✓ Fabric MTU correctly restored to 9216" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify L2 Host Interface MTU was restored to 9216" + assert: + that: + - ebgp_replaced_fabric_config.management.l2HostInterfaceMtu == 9216 + fail_msg: "L2 Host Interface MTU validation failed. Expected: 9216, Actual: {{ ebgp_replaced_fabric_config.management.l2HostInterfaceMtu }}" + success_msg: "✓ L2 Host Interface MTU correctly restored to 9216" + tags: [test_replaced, test_replaced_validation] + +# Gateway and Multicast Validations +- name: "VALIDATION 2: Verify Anycast Gateway MAC was standardized to 2020.0000.00aa" + assert: + that: + - ebgp_replaced_fabric_config.management.anycastGatewayMac == "2020.0000.00aa" + fail_msg: "Anycast Gateway MAC validation failed. Expected: 2020.0000.00aa, Actual: {{ ebgp_replaced_fabric_config.management.anycastGatewayMac }}" + success_msg: "✓ Anycast Gateway MAC correctly standardized to 2020.0000.00aa" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Multicast Group Subnet was standardized to 239.1.1.0/25" + assert: + that: + - ebgp_replaced_fabric_config.management.multicastGroupSubnet == "239.1.1.0/25" + fail_msg: "Multicast Group Subnet validation failed. Expected: 239.1.1.0/25, Actual: {{ ebgp_replaced_fabric_config.management.multicastGroupSubnet }}" + success_msg: "✓ Multicast Group Subnet correctly standardized to 239.1.1.0/25" + tags: [test_replaced, test_replaced_validation] + +# VPC Configuration Validations +- name: "VALIDATION 2: Verify VPC Auto Recovery Timer was standardized to 360" + assert: + that: + - ebgp_replaced_fabric_config.management.vpcAutoRecoveryTimer == 360 + fail_msg: "VPC Auto Recovery Timer validation failed. Expected: 360, Actual: {{ ebgp_replaced_fabric_config.management.vpcAutoRecoveryTimer }}" + success_msg: "✓ VPC Auto Recovery Timer correctly standardized to 360" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC Delay Restore Timer was standardized to 150" + assert: + that: + - ebgp_replaced_fabric_config.management.vpcDelayRestoreTimer == 150 + fail_msg: "VPC Delay Restore Timer validation failed. Expected: 150, Actual: {{ ebgp_replaced_fabric_config.management.vpcDelayRestoreTimer }}" + success_msg: "✓ VPC Delay Restore Timer correctly standardized to 150" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC Peer Link Port Channel ID was standardized to 500" + assert: + that: + - ebgp_replaced_fabric_config.management.vpcPeerLinkPortChannelId == "500" + fail_msg: "VPC Peer Link Port Channel ID validation failed. Expected: 500, Actual: {{ ebgp_replaced_fabric_config.management.vpcPeerLinkPortChannelId }}" + success_msg: "✓ VPC Peer Link Port Channel ID correctly standardized to 500" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC Peer Link VLAN was standardized to 3600" + assert: + that: + - ebgp_replaced_fabric_config.management.vpcPeerLinkVlan == "3600" + fail_msg: "VPC Peer Link VLAN validation failed. Expected: 3600, Actual: {{ ebgp_replaced_fabric_config.management.vpcPeerLinkVlan }}" + success_msg: "✓ VPC Peer Link VLAN correctly standardized to 3600" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC Domain ID Range was standardized to 1-1000" + assert: + that: + - ebgp_replaced_fabric_config.management.vpcDomainIdRange == "1-1000" + fail_msg: "VPC Domain ID Range validation failed. Expected: 1-1000, Actual: {{ ebgp_replaced_fabric_config.management.vpcDomainIdRange }}" + success_msg: "✓ VPC Domain ID Range correctly standardized to 1-1000" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify VPC IPv6 Neighbor Discovery Sync was enabled" + assert: + that: + - ebgp_replaced_fabric_config.management.vpcIpv6NeighborDiscoverySync == true + fail_msg: "VPC IPv6 Neighbor Discovery Sync validation failed. Expected: true, Actual: {{ ebgp_replaced_fabric_config.management.vpcIpv6NeighborDiscoverySync }}" + success_msg: "✓ VPC IPv6 Neighbor Discovery Sync correctly enabled" + tags: [test_replaced, test_replaced_validation] + +# Multicast Settings Validations +- name: "VALIDATION 2: Verify Rendezvous Point Count was standardized to 2" + assert: + that: + - ebgp_replaced_fabric_config.management.rendezvousPointCount == 2 + fail_msg: "Rendezvous Point Count validation failed. Expected: 2, Actual: {{ ebgp_replaced_fabric_config.management.rendezvousPointCount }}" + success_msg: "✓ Rendezvous Point Count correctly standardized to 2" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Rendezvous Point Loopback ID was standardized to 254" + assert: + that: + - ebgp_replaced_fabric_config.management.rendezvousPointLoopbackId == 254 + fail_msg: "Rendezvous Point Loopback ID validation failed. Expected: 254, Actual: {{ ebgp_replaced_fabric_config.management.rendezvousPointLoopbackId }}" + success_msg: "✓ Rendezvous Point Loopback ID correctly standardized to 254" + tags: [test_replaced, test_replaced_validation] + +# eBGP-specific Validations +- name: "VALIDATION 2: Verify BGP AS Mode was standardized to multiAS" + assert: + that: + - ebgp_replaced_fabric_config.management.bgpAsMode == "multiAS" + fail_msg: "BGP AS Mode validation failed. Expected: multiAS, Actual: {{ ebgp_replaced_fabric_config.management.bgpAsMode }}" + success_msg: "✓ BGP AS Mode correctly standardized to multiAS" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify BGP Allow AS In Num was standardized to 1" + assert: + that: + - ebgp_replaced_fabric_config.management.bgpAllowAsInNum == 1 + fail_msg: "BGP Allow AS In Num validation failed. Expected: 1, Actual: {{ ebgp_replaced_fabric_config.management.bgpAllowAsInNum }}" + success_msg: "✓ BGP Allow AS In Num correctly standardized to 1" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify BGP Max Path was standardized to 4" + assert: + that: + - ebgp_replaced_fabric_config.management.bgpMaxPath == 4 + fail_msg: "BGP Max Path validation failed. Expected: 4, Actual: {{ ebgp_replaced_fabric_config.management.bgpMaxPath }}" + success_msg: "✓ BGP Max Path correctly standardized to 4" + tags: [test_replaced, test_replaced_validation] + +# Feature Flag Validations +- name: "VALIDATION 2: Verify TCAM Allocation was re-enabled" + assert: + that: + - ebgp_replaced_fabric_config.management.tcamAllocation == true + fail_msg: "TCAM Allocation validation failed. Expected: true, Actual: {{ ebgp_replaced_fabric_config.management.tcamAllocation }}" + success_msg: "✓ TCAM Allocation correctly re-enabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Real Time Interface Statistics Collection was disabled" + assert: + that: + - ebgp_replaced_fabric_config.management.realTimeInterfaceStatisticsCollection == false + fail_msg: "Real Time Interface Statistics Collection validation failed. Expected: false, Actual: {{ ebgp_replaced_fabric_config.management.realTimeInterfaceStatisticsCollection }}" + success_msg: "✓ Real Time Interface Statistics Collection correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Performance Monitoring was disabled" + assert: + that: + - ebgp_replaced_fabric_config.management.performanceMonitoring == false + fail_msg: "Performance Monitoring validation failed. Expected: false, Actual: {{ ebgp_replaced_fabric_config.management.performanceMonitoring }}" + success_msg: "✓ Performance Monitoring correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Tenant DHCP was re-enabled" + assert: + that: + - ebgp_replaced_fabric_config.management.tenantDhcp == true + fail_msg: "Tenant DHCP validation failed. Expected: true, Actual: {{ ebgp_replaced_fabric_config.management.tenantDhcp }}" + success_msg: "✓ Tenant DHCP correctly re-enabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify SNMP Trap was re-enabled" + assert: + that: + - ebgp_replaced_fabric_config.management.snmpTrap == true + fail_msg: "SNMP Trap validation failed. Expected: true, Actual: {{ ebgp_replaced_fabric_config.management.snmpTrap }}" + success_msg: "✓ SNMP Trap correctly re-enabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Greenfield Debug Flag was set to disable (eBGP default)" + assert: + that: + - ebgp_replaced_fabric_config.management.greenfieldDebugFlag == "disable" + fail_msg: "Greenfield Debug Flag validation failed. Expected: disable, Actual: {{ ebgp_replaced_fabric_config.management.greenfieldDebugFlag }}" + success_msg: "✓ Greenfield Debug Flag correctly set to disable (eBGP default)" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify NXAPI HTTP is always true for eBGP (ND enforced behavior)" + assert: + that: + - ebgp_replaced_fabric_config.management.nxapiHttp == true + fail_msg: "NXAPI HTTP validation failed. ND enforces nxapiHttp=true for eBGP fabrics, Actual: {{ ebgp_replaced_fabric_config.management.nxapiHttp }}" + success_msg: "✓ NXAPI HTTP is true (ND enforces this for eBGP fabrics regardless of configured value)" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify NXAPI was disabled" + assert: + that: + - ebgp_replaced_fabric_config.management.nxapi == false + fail_msg: "NXAPI validation failed. Expected: false, Actual: {{ ebgp_replaced_fabric_config.management.nxapi }}" + success_msg: "✓ NXAPI correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Per VRF Loopback Auto Provision was disabled" + assert: + that: + - ebgp_replaced_fabric_config.management.perVrfLoopbackAutoProvision == false + fail_msg: "Per VRF Loopback Auto Provision validation failed. Expected: false, Actual: {{ ebgp_replaced_fabric_config.management.perVrfLoopbackAutoProvision }}" + success_msg: "✓ Per VRF Loopback Auto Provision correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Per VRF Loopback Auto Provision IPv6 was disabled" + assert: + that: + - ebgp_replaced_fabric_config.management.perVrfLoopbackAutoProvisionIpv6 == false + fail_msg: "Per VRF Loopback Auto Provision IPv6 validation failed. Expected: false, Actual: {{ ebgp_replaced_fabric_config.management.perVrfLoopbackAutoProvisionIpv6 }}" + success_msg: "✓ Per VRF Loopback Auto Provision IPv6 correctly disabled" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Verify Banner was preserved" + assert: + that: + - ebgp_replaced_fabric_config.management.banner == "^ Updated via replaced state ^" + fail_msg: "Banner validation failed. Expected: '^ Updated via replaced state ^', Actual: {{ ebgp_replaced_fabric_config.management.banner }}" + success_msg: "✓ Banner correctly preserved: '{{ ebgp_replaced_fabric_config.management.banner }}'" + tags: [test_replaced, test_replaced_validation] + +- name: "VALIDATION 2: Display successful validation summary for ebgp_test_fabric_replaced" + debug: + msg: | + ======================================== + VALIDATION SUMMARY for ebgp_test_fabric_replaced: + ======================================== + Network Ranges (restored to defaults): + ✓ L3 VNI Range: {{ ebgp_replaced_fabric_config.management.l3VniRange }} + ✓ L2 VNI Range: {{ ebgp_replaced_fabric_config.management.l2VniRange }} + ✓ BGP Loopback IP Range: {{ ebgp_replaced_fabric_config.management.bgpLoopbackIpRange }} + ✓ NVE Loopback IP Range: {{ ebgp_replaced_fabric_config.management.nveLoopbackIpRange }} + ✓ Intra-Fabric Subnet Range: {{ ebgp_replaced_fabric_config.management.intraFabricSubnetRange }} + ✓ VRF Lite Subnet Range: {{ ebgp_replaced_fabric_config.management.vrfLiteSubnetRange }} + ✓ Anycast RP IP Range: {{ ebgp_replaced_fabric_config.management.anycastRendezvousPointIpRange }} + + VLAN Ranges: + ✓ Network VLAN Range: {{ ebgp_replaced_fabric_config.management.networkVlanRange }} + ✓ VRF VLAN Range: {{ ebgp_replaced_fabric_config.management.vrfVlanRange }} + + MTU Settings: + ✓ Fabric MTU: {{ ebgp_replaced_fabric_config.management.fabricMtu }} + ✓ L2 Host Interface MTU: {{ ebgp_replaced_fabric_config.management.l2HostInterfaceMtu }} + + VPC Configuration: + ✓ VPC Auto Recovery Timer: {{ ebgp_replaced_fabric_config.management.vpcAutoRecoveryTimer }} + ✓ VPC Delay Restore Timer: {{ ebgp_replaced_fabric_config.management.vpcDelayRestoreTimer }} + ✓ VPC Peer Link Port Channel ID: {{ ebgp_replaced_fabric_config.management.vpcPeerLinkPortChannelId }} + ✓ VPC Peer Link VLAN: {{ ebgp_replaced_fabric_config.management.vpcPeerLinkVlan }} + ✓ VPC Domain ID Range: {{ ebgp_replaced_fabric_config.management.vpcDomainIdRange }} + ✓ VPC IPv6 Neighbor Discovery Sync: {{ ebgp_replaced_fabric_config.management.vpcIpv6NeighborDiscoverySync }} + + Gateway & Multicast: + ✓ Anycast Gateway MAC: {{ ebgp_replaced_fabric_config.management.anycastGatewayMac }} + ✓ Multicast Group Subnet: {{ ebgp_replaced_fabric_config.management.multicastGroupSubnet }} + ✓ Rendezvous Point Count: {{ ebgp_replaced_fabric_config.management.rendezvousPointCount }} + ✓ Rendezvous Point Loopback ID: {{ ebgp_replaced_fabric_config.management.rendezvousPointLoopbackId }} + + eBGP-specific: + ✓ BGP AS Mode: {{ ebgp_replaced_fabric_config.management.bgpAsMode }} + ✓ BGP Allow AS In Num: {{ ebgp_replaced_fabric_config.management.bgpAllowAsInNum }} + ✓ BGP Max Path: {{ ebgp_replaced_fabric_config.management.bgpMaxPath }} + + Feature Flags: + ✓ TCAM Allocation: {{ ebgp_replaced_fabric_config.management.tcamAllocation }} + ✓ Real Time Interface Statistics Collection: {{ ebgp_replaced_fabric_config.management.realTimeInterfaceStatisticsCollection }} + ✓ Performance Monitoring: {{ ebgp_replaced_fabric_config.management.performanceMonitoring }} + ✓ Tenant DHCP: {{ ebgp_replaced_fabric_config.management.tenantDhcp }} + ✓ SNMP Trap: {{ ebgp_replaced_fabric_config.management.snmpTrap }} + ✓ Greenfield Debug Flag (eBGP default): {{ ebgp_replaced_fabric_config.management.greenfieldDebugFlag }} + ✓ NXAPI HTTP (ND enforces true for eBGP): {{ ebgp_replaced_fabric_config.management.nxapiHttp }} + ✓ NXAPI: {{ ebgp_replaced_fabric_config.management.nxapi }} + + Auto-Provisioning: + ✓ Per VRF Loopback Auto Provision: {{ ebgp_replaced_fabric_config.management.perVrfLoopbackAutoProvision }} + ✓ Per VRF Loopback Auto Provision IPv6: {{ ebgp_replaced_fabric_config.management.perVrfLoopbackAutoProvisionIpv6 }} + + Preserved Settings: + ✓ Banner: "{{ ebgp_replaced_fabric_config.management.banner }}" + + All 35+ expected changes validated successfully! + ======================================== + tags: [test_replaced, test_replaced_validation] + +############################################################################# +# TEST 3: Demonstrate difference between merged and replaced states +############################################################################# +- name: "TEST 3: Create eBGP fabric for merged vs replaced comparison" + cisco.nd.nd_manage_fabric_ebgp: + state: replaced + config: + - "{{ {'name': ebgp_test_fabric_deleted} | combine(common_ebgp_fabric_config) }}" + register: ebgp_comparison_fabric_creation + tags: [test_comparison] + +- name: "TEST 3a: Partial update using merged state (should merge changes)" + cisco.nd.nd_manage_fabric_ebgp: + state: merged + config: + - name: "{{ ebgp_test_fabric_deleted }}" + category: fabric + management: + bgp_asn: "65004" # Different from default ASN + # bgp_asn_auto_allocation: true + bgp_asn_range: "65000-65100" + fabric_mtu: 8000 # Only updating MTU + register: ebgp_merged_partial_result + tags: [test_comparison, test_merged_partial] + +- name: "TEST 3a: Verify merged state preserves existing configuration" + assert: + that: + - ebgp_merged_partial_result is changed + - ebgp_merged_partial_result is not failed + fail_msg: "Partial update with merged state failed" + success_msg: "Merged state successfully performed partial update" + tags: [test_comparison, test_merged_partial] + +- name: "TEST 3b: Partial update using replaced state (should replace entire config)" + cisco.nd.nd_manage_fabric_ebgp: + state: replaced + config: + - name: "{{ ebgp_test_fabric_deleted }}" + category: fabric + management: + type: vxlanEbgp + bgp_asn: "65100" + bgp_asn_auto_allocation: true + bgp_asn_range: "65000-65100" + target_subnet_mask: 30 + register: ebgp_replaced_partial_result + tags: [test_comparison, test_replaced_partial] + +- name: "TEST 3b: Verify replaced state performs complete replacement" + assert: + that: + - ebgp_replaced_partial_result is changed + - ebgp_replaced_partial_result is not failed + fail_msg: "Partial replacement with replaced state failed" + success_msg: "Replaced state successfully performed complete replacement" + tags: [test_comparison, test_replaced_partial] + +############################################################################# +# TEST 4: STATE DELETED - Delete fabrics +############################################################################# +- name: "TEST 4a: Delete eBGP fabric using state deleted" + cisco.nd.nd_manage_fabric_ebgp: + state: deleted + config: + - name: "{{ ebgp_test_fabric_deleted }}" + register: ebgp_deleted_result_1 + tags: [test_deleted, test_deleted_delete] + +- name: "TEST 4a: Verify eBGP fabric was deleted" + assert: + that: + - ebgp_deleted_result_1 is changed + - ebgp_deleted_result_1 is not failed + fail_msg: "eBGP fabric deletion with state deleted failed" + success_msg: "eBGP fabric successfully deleted with state deleted" + tags: [test_deleted, test_deleted_delete] + +- name: "TEST 4b: Delete eBGP fabric using state deleted (second run - idempotency test)" + cisco.nd.nd_manage_fabric_ebgp: + state: deleted + config: + - name: "{{ ebgp_test_fabric_deleted }}" + register: ebgp_deleted_result_2 + tags: [test_deleted, test_deleted_idempotent] + +- name: "TEST 4b: Verify deleted state is idempotent" + assert: + that: + - ebgp_deleted_result_2 is not changed + - ebgp_deleted_result_2 is not failed + fail_msg: "Deleted state is not idempotent - should not change when deleting non-existent fabric" + success_msg: "Deleted state is idempotent - no changes when deleting non-existent fabric" + tags: [test_deleted, test_deleted_idempotent] + +############################################################################# +# TEST 5: Multiple fabric operations in single task +############################################################################# +- name: "TEST 5: Multiple eBGP fabric operations in single task" + cisco.nd.nd_manage_fabric_ebgp: + state: merged + config: + - name: "multi_ebgp_fabric_1" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65101" + bgp_asn_auto_allocation: false + site_id: "65101" + bgp_as_mode: sameTierAS + bgp_allow_as_in_num: 1 + bgp_max_path: 4 + auto_configure_ebgp_evpn_peering: true + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.0001" + performance_monitoring: false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: management + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: false + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: disable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.101.0.0/22" + nve_loopback_ip_range: "10.103.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.101.0/24" + intra_fabric_subnet_range: "10.104.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.133.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.105.0.0/22" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + - name: "multi_ebgp_fabric_2" + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65102" + bgp_asn_auto_allocation: false + site_id: "65102" + bgp_as_mode: sameTierAS + bgp_allow_as_in_num: 1 + bgp_max_path: 4 + auto_configure_ebgp_evpn_peering: true + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.0002" + performance_monitoring: false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: management + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: false + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: disable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.102.0.0/22" + nve_loopback_ip_range: "10.103.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.102.0/24" + intra_fabric_subnet_range: "10.104.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.134.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.106.0.0/22" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 + register: ebgp_multi_fabric_result + tags: [test_multi, test_multi_create] + +- name: "TEST 5: Verify multiple eBGP fabrics were created" + assert: + that: + - ebgp_multi_fabric_result is changed + - ebgp_multi_fabric_result is not failed + fail_msg: "Multiple eBGP fabric creation failed" + success_msg: "Multiple eBGP fabrics successfully created" + tags: [test_multi, test_multi_create] + +############################################################################# +# FINAL CLEANUP - Clean up all test fabrics +############################################################################# +- name: "CLEANUP: Delete all test eBGP fabrics" + cisco.nd.nd_manage_fabric_ebgp: + state: deleted + config: + - name: "{{ ebgp_test_fabric_merged }}" + - name: "{{ ebgp_test_fabric_replaced }}" + - name: "{{ ebgp_test_fabric_deleted }}" + - name: "multi_ebgp_fabric_1" + - name: "multi_ebgp_fabric_2" + ignore_errors: true + tags: [cleanup, always] + +############################################################################# +# TEST SUMMARY +############################################################################# +- name: "TEST SUMMARY: Display eBGP test results" + debug: + msg: | + ======================================================== + TEST SUMMARY for cisco.nd.nd_manage_fabric_ebgp module: + ======================================================== + ✓ TEST 1: STATE MERGED + - Create fabric: {{ 'PASSED' if ebgp_merged_result_1 is changed else 'FAILED' }} + - Idempotency: {{ 'PASSED' if ebgp_merged_result_2 is not changed else 'FAILED' }} + - Update fabric: {{ 'PASSED' if ebgp_merged_result_3 is changed else 'FAILED' }} + + ✓ TEST 2: STATE REPLACED + - Create fabric: {{ 'PASSED' if ebgp_replaced_result_1 is changed else 'FAILED' }} + - Idempotency: {{ 'PASSED' if ebgp_replaced_result_2 is not changed else 'FAILED' }} + - Replace fabric: {{ 'PASSED' if ebgp_replaced_result_3 is changed else 'FAILED' }} + + ✓ TEST 3: MERGED vs REPLACED Comparison + - Merged partial: {{ 'PASSED' if ebgp_merged_partial_result is changed else 'FAILED' }} + - Replaced partial: {{ 'PASSED' if ebgp_replaced_partial_result is changed else 'FAILED' }} + + ✓ TEST 4: STATE DELETED + - Delete fabric: {{ 'PASSED' if ebgp_deleted_result_1 is changed else 'FAILED' }} + - Idempotency: {{ 'PASSED' if ebgp_deleted_result_2 is not changed else 'FAILED' }} + + ✓ TEST 5: MULTIPLE FABRICS + - Multi-create: {{ 'PASSED' if ebgp_multi_fabric_result is changed else 'FAILED' }} + + All tests validate: + - State merged: Creates and updates eBGP fabrics by merging changes + - State replaced: Creates and completely replaces eBGP fabric configuration + - State deleted: Removes eBGP fabrics + - Idempotency: All operations are idempotent when run multiple times + - Difference: Merged preserves existing config, replaced overwrites completely + - eBGP-specific: bgpAsMode, bgpAllowAsInNum, bgpMaxPath defaults validated + ======================================== + tags: [summary, always] diff --git a/tests/integration/targets/nd_manage_fabric/tasks/main.yaml b/tests/integration/targets/nd_manage_fabric/tasks/main.yaml index 579ac793..77d2f524 100644 --- a/tests/integration/targets/nd_manage_fabric/tasks/main.yaml +++ b/tests/integration/targets/nd_manage_fabric/tasks/main.yaml @@ -1,3 +1,6 @@ --- - name: Run nd_manage_fabric iBGP tests ansible.builtin.include_tasks: fabric_ibgp.yaml + +- name: Run nd_manage_fabric eBGP tests + ansible.builtin.include_tasks: fabric_ebgp.yaml diff --git a/tests/integration/targets/nd_manage_fabric/vars/main.yaml b/tests/integration/targets/nd_manage_fabric/vars/main.yaml index d673cb7d..61304ace 100644 --- a/tests/integration/targets/nd_manage_fabric/vars/main.yaml +++ b/tests/integration/targets/nd_manage_fabric/vars/main.yaml @@ -1,8 +1,12 @@ --- -test_fabric_merged: "test_fabric_merged" -test_fabric_replaced: "test_fabric_replaced" -test_fabric_deleted: "test_fabric_deleted" +test_fabric_merged: "ibgp_test_fabric_merged" +test_fabric_replaced: "ibgp_test_fabric_replaced" +test_fabric_deleted: "ibgp_test_fabric_deleted" + +ebgp_test_fabric_merged: "ebgp_test_fabric_merged" +ebgp_test_fabric_replaced: "ebgp_test_fabric_replaced" +ebgp_test_fabric_deleted: "ebgp_test_fabric_deleted" # Common fabric configuration for all tests common_fabric_config: @@ -79,3 +83,84 @@ common_fabric_config: dhcp_end_address: "" management_gateway: "" management_ipv4_prefix: 24 + +# Common eBGP fabric configuration for all eBGP tests +common_ebgp_fabric_config: + category: fabric + location: + latitude: 37.7749 + longitude: -122.4194 + license_tier: premier + alert_suspend: disabled + security_domain: all + telemetry_collection: false + management: + type: vxlanEbgp + bgp_asn: "65001" + bgp_asn_auto_allocation: false + site_id: "65001" + bgp_as_mode: multiAS + bgp_allow_as_in_num: 1 + bgp_max_path: 4 + auto_configure_ebgp_evpn_peering: true + target_subnet_mask: 30 + anycast_gateway_mac: "2020.0000.00aa" + performance_monitoring: false + replication_mode: multicast + multicast_group_subnet: "239.1.1.0/25" + auto_generate_multicast_group_address: false + underlay_multicast_group_address_limit: 128 + tenant_routed_multicast: false + rendezvous_point_count: 2 + rendezvous_point_loopback_id: 254 + vpc_peer_link_vlan: "3600" + vpc_peer_link_enable_native_vlan: false + vpc_peer_keep_alive_option: management + vpc_auto_recovery_timer: 360 + vpc_delay_restore_timer: 150 + vpc_peer_link_port_channel_id: "500" + advertise_physical_ip: false + vpc_domain_id_range: "1-1000" + bgp_loopback_id: 0 + nve_loopback_id: 1 + vrf_template: Default_VRF_Universal + network_template: Default_Network_Universal + vrf_extension_template: Default_VRF_Extension_Universal + network_extension_template: Default_Network_Extension_Universal + l3_vni_no_vlan_default_option: false + fabric_mtu: 9216 + l2_host_interface_mtu: 9216 + tenant_dhcp: true + nxapi: false + nxapi_https_port: 443 + nxapi_http: false + nxapi_http_port: 80 + snmp_trap: true + anycast_border_gateway_advertise_physical_ip: false + greenfield_debug_flag: disable + tcam_allocation: true + real_time_interface_statistics_collection: false + interface_statistics_load_interval: 10 + bgp_loopback_ip_range: "10.2.0.0/22" + nve_loopback_ip_range: "10.3.0.0/22" + anycast_rendezvous_point_ip_range: "10.254.254.0/24" + intra_fabric_subnet_range: "10.4.0.0/16" + l2_vni_range: "30000-49000" + l3_vni_range: "50000-59000" + network_vlan_range: "2300-2999" + vrf_vlan_range: "2000-2299" + sub_interface_dot1q_range: "2-511" + vrf_lite_auto_config: manual + vrf_lite_subnet_range: "10.33.0.0/16" + vrf_lite_subnet_target_mask: 30 + auto_unique_vrf_lite_ip_prefix: false + per_vrf_loopback_auto_provision: true + per_vrf_loopback_ip_range: "10.5.0.0/22" + banner: "" + day0_bootstrap: false + local_dhcp_server: false + dhcp_protocol_version: dhcpv4 + dhcp_start_address: "" + dhcp_end_address: "" + management_gateway: "" + management_ipv4_prefix: 24 From 71ebe3abbf69343bd31b502fedd999940984231c Mon Sep 17 00:00:00 2001 From: mwiebe Date: Mon, 16 Mar 2026 13:54:25 -0400 Subject: [PATCH 110/131] Update exception granularity in main --- plugins/modules/nd_manage_fabric_ebgp.py | 3 +++ plugins/modules/nd_manage_fabric_ibgp.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/plugins/modules/nd_manage_fabric_ebgp.py b/plugins/modules/nd_manage_fabric_ebgp.py index adc3e122..31caac93 100644 --- a/plugins/modules/nd_manage_fabric_ebgp.py +++ b/plugins/modules/nd_manage_fabric_ebgp.py @@ -1146,6 +1146,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ebgp import FabricEbgpModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric import ManageEbgpFabricOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError def main(): @@ -1169,6 +1170,8 @@ def main(): module.exit_json(**nd_state_machine.output.format()) + except NDStateMachineError as e: + module.fail_json(msg=str(e)) except Exception as e: module.fail_json(msg=f"Module execution failed: {str(e)}") diff --git a/plugins/modules/nd_manage_fabric_ibgp.py b/plugins/modules/nd_manage_fabric_ibgp.py index d0786b28..dcd638cc 100644 --- a/plugins/modules/nd_manage_fabric_ibgp.py +++ b/plugins/modules/nd_manage_fabric_ibgp.py @@ -1360,6 +1360,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricIbgpModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric import ManageIbgpFabricOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError def main(): @@ -1383,6 +1384,8 @@ def main(): module.exit_json(**nd_state_machine.output.format()) + except NDStateMachineError as e: + module.fail_json(msg=str(e)) except Exception as e: module.fail_json(msg=f"Module execution failed: {str(e)}") From 7658313af573c6fea53953883514c7d65bbf26e5 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 17 Mar 2026 12:01:56 -0400 Subject: [PATCH 111/131] [ignore] Remove Python 2.7 compatibilities. --- plugins/module_utils/common/exceptions.py | 4 ---- plugins/module_utils/common/log.py | 6 ------ plugins/module_utils/constants.py | 4 ---- plugins/module_utils/endpoints/enums.py | 4 ---- plugins/module_utils/endpoints/mixins.py | 1 - plugins/module_utils/endpoints/query_params.py | 2 -- plugins/module_utils/endpoints/v1/infra/base_path.py | 2 -- plugins/module_utils/endpoints/v1/infra/login.py | 2 -- plugins/module_utils/endpoints/v1/manage/base_path.py | 2 -- plugins/module_utils/enums.py | 5 ----- plugins/module_utils/models/base.py | 4 ---- plugins/module_utils/models/local_user.py | 4 ---- plugins/module_utils/models/nested.py | 4 ---- plugins/module_utils/nd.py | 4 ---- plugins/module_utils/nd_argument_specs.py | 4 ---- plugins/module_utils/nd_config_collection.py | 4 ---- plugins/module_utils/nd_output.py | 4 ---- plugins/module_utils/nd_state_machine.py | 4 ---- plugins/module_utils/nd_v2.py | 6 ------ plugins/module_utils/ndi.py | 3 --- plugins/module_utils/ndi_argument_specs.py | 4 ---- plugins/module_utils/orchestrators/base.py | 4 ---- plugins/module_utils/orchestrators/local_user.py | 4 ---- plugins/module_utils/orchestrators/types.py | 4 ---- plugins/module_utils/rest/protocols/response_handler.py | 3 +-- plugins/module_utils/rest/protocols/response_validation.py | 4 +--- plugins/module_utils/rest/protocols/sender.py | 4 ++-- plugins/module_utils/rest/response_handler_nd.py | 4 +--- .../rest/response_strategies/nd_v1_strategy.py | 4 +--- plugins/module_utils/rest/rest_send.py | 1 - plugins/module_utils/rest/results.py | 2 -- plugins/module_utils/rest/sender_nd.py | 4 +--- plugins/module_utils/types.py | 4 ---- plugins/module_utils/utils.py | 4 ---- plugins/modules/nd_local_user.py | 7 +------ 35 files changed, 8 insertions(+), 122 deletions(-) diff --git a/plugins/module_utils/common/exceptions.py b/plugins/module_utils/common/exceptions.py index 0d7b7bcc..0c53c2c2 100644 --- a/plugins/module_utils/common/exceptions.py +++ b/plugins/module_utils/common/exceptions.py @@ -15,10 +15,6 @@ # fmt: on # isort: on -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - from typing import Any, Optional from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ( diff --git a/plugins/module_utils/common/log.py b/plugins/module_utils/common/log.py index 29182539..f43d9018 100644 --- a/plugins/module_utils/common/log.py +++ b/plugins/module_utils/common/log.py @@ -1,15 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - import json import logging from enum import Enum diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 563041a0..adbe345e 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2022, Akini Ross (@akinross) # Copyright: (c) 2024, Gaspard Micol (@gmicol) @@ -7,8 +5,6 @@ from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import Dict from types import MappingProxyType from copy import deepcopy diff --git a/plugins/module_utils/endpoints/enums.py b/plugins/module_utils/endpoints/enums.py index 802b8fe8..92ae5783 100644 --- a/plugins/module_utils/endpoints/enums.py +++ b/plugins/module_utils/endpoints/enums.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@allenrobel) # Copyright: (c) 2026, Gaspard Micol (@gmicol) @@ -10,8 +8,6 @@ from __future__ import absolute_import, division, print_function -__metaclass__ = type - from enum import Enum diff --git a/plugins/module_utils/endpoints/mixins.py b/plugins/module_utils/endpoints/mixins.py index 22d9a2dc..e7f0620c 100644 --- a/plugins/module_utils/endpoints/mixins.py +++ b/plugins/module_utils/endpoints/mixins.py @@ -11,7 +11,6 @@ from __future__ import absolute_import, annotations, division, print_function - from typing import Optional from ansible_collections.cisco.nd.plugins.module_utils.enums import BooleanStringEnum diff --git a/plugins/module_utils/endpoints/query_params.py b/plugins/module_utils/endpoints/query_params.py index 5bf8ff08..2cddd97d 100644 --- a/plugins/module_utils/endpoints/query_params.py +++ b/plugins/module_utils/endpoints/query_params.py @@ -11,12 +11,10 @@ from __future__ import absolute_import, annotations, division, print_function - from enum import Enum from typing import Optional, Protocol from urllib.parse import quote - from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ( BaseModel, Field, diff --git a/plugins/module_utils/endpoints/v1/infra/base_path.py b/plugins/module_utils/endpoints/v1/infra/base_path.py index f0612025..0db15ae9 100644 --- a/plugins/module_utils/endpoints/v1/infra/base_path.py +++ b/plugins/module_utils/endpoints/v1/infra/base_path.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/module_utils/endpoints/v1/infra/login.py b/plugins/module_utils/endpoints/v1/infra/login.py index 70968615..6fff9159 100644 --- a/plugins/module_utils/endpoints/v1/infra/login.py +++ b/plugins/module_utils/endpoints/v1/infra/login.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/module_utils/endpoints/v1/manage/base_path.py b/plugins/module_utils/endpoints/v1/manage/base_path.py index 5f043ced..52bb4e56 100644 --- a/plugins/module_utils/endpoints/v1/manage/base_path.py +++ b/plugins/module_utils/endpoints/v1/manage/base_path.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/module_utils/enums.py b/plugins/module_utils/enums.py index 55d1f1ac..83f1f76d 100644 --- a/plugins/module_utils/enums.py +++ b/plugins/module_utils/enums.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # pylint: disable=wrong-import-position # pylint: disable=missing-module-docstring # Copyright: (c) 2026, Allen Robel (@allenrobel) @@ -21,10 +20,6 @@ # fmt: on # isort: on -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - from enum import Enum diff --git a/plugins/module_utils/models/base.py b/plugins/module_utils/models/base.py index 07b6ee28..a62a12b1 100644 --- a/plugins/module_utils/models/base.py +++ b/plugins/module_utils/models/base.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from abc import ABC from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict from typing import List, Dict, Any, ClassVar, Set, Tuple, Union, Literal, Optional diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user.py index a47a4a0a..6d4960f3 100644 --- a/plugins/module_utils/models/local_user.py +++ b/plugins/module_utils/models/local_user.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import List, Dict, Any, Optional, ClassVar, Literal from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ( Field, diff --git a/plugins/module_utils/models/nested.py b/plugins/module_utils/models/nested.py index 0573e5f8..c3af1d71 100644 --- a/plugins/module_utils/models/nested.py +++ b/plugins/module_utils/models/nested.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import List, ClassVar from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index 42b1b118..50a5eeb2 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2021, Lionel Hercot (@lhercot) # Copyright: (c) 2022, Cindy Zhao (@cizhao) # Copyright: (c) 2022, Akini Ross (@akinross) @@ -9,8 +7,6 @@ from __future__ import absolute_import, division, print_function from functools import reduce -__metaclass__ = type - from copy import deepcopy import os import shutil diff --git a/plugins/module_utils/nd_argument_specs.py b/plugins/module_utils/nd_argument_specs.py index 7ef10d04..798ca90f 100644 --- a/plugins/module_utils/nd_argument_specs.py +++ b/plugins/module_utils/nd_argument_specs.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2023, Shreyas Srish (@shrsr) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - def ntp_server_spec(): return dict( diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index abcfc0f7..0da7247f 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import Optional, List, Dict, Any, Literal from copy import deepcopy from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py index 0e5ed6ef..8088b09b 100644 --- a/plugins/module_utils/nd_output.py +++ b/plugins/module_utils/nd_output.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import Dict, Any, Optional, List, Union from ansible_collections.cisco.nd.plugins.module_utils.nd_config_collection import NDConfigCollection diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index efed3517..e3ea328c 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule diff --git a/plugins/module_utils/nd_v2.py b/plugins/module_utils/nd_v2.py index 0a3fe61a..a622d77f 100644 --- a/plugins/module_utils/nd_v2.py +++ b/plugins/module_utils/nd_v2.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -47,10 +45,6 @@ def main(): # fmt: on # isort: on -# pylint: disable=invalid-name -__metaclass__ = type -# pylint: enable=invalid-name - import logging from typing import Any, Optional diff --git a/plugins/module_utils/ndi.py b/plugins/module_utils/ndi.py index 37e7ec56..6ff912aa 100644 --- a/plugins/module_utils/ndi.py +++ b/plugins/module_utils/ndi.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2021, Lionel Hercot (@lhercot) # Copyright: (c) 2022, Cindy Zhao (@cizhao) # Copyright: (c) 2022, Akini Ross (@akinross) @@ -16,7 +14,6 @@ HAS_JSONPATH_NG_PARSE = True except ImportError: HAS_JSONPATH_NG_PARSE = False -__metaclass__ = type from ansible_collections.cisco.nd.plugins.module_utils.constants import OBJECT_TYPES, MATCH_TYPES diff --git a/plugins/module_utils/ndi_argument_specs.py b/plugins/module_utils/ndi_argument_specs.py index 641e675c..a367e3c5 100644 --- a/plugins/module_utils/ndi_argument_specs.py +++ b/plugins/module_utils/ndi_argument_specs.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2022, Akini Ross (@akinross) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from ansible_collections.cisco.nd.plugins.module_utils.constants import MATCH_TYPES, OPERATORS, TCP_FLAGS diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 1f4e3e69..fe16a524 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 689ba9dc..332719bf 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel diff --git a/plugins/module_utils/orchestrators/types.py b/plugins/module_utils/orchestrators/types.py index b721c65b..415526c7 100644 --- a/plugins/module_utils/orchestrators/types.py +++ b/plugins/module_utils/orchestrators/types.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import Any, Union, List, Dict ResponseType = Union[List[Dict[str, Any]], Dict[str, Any], None] diff --git a/plugins/module_utils/rest/protocols/response_handler.py b/plugins/module_utils/rest/protocols/response_handler.py index 487e12cf..ab658c99 100644 --- a/plugins/module_utils/rest/protocols/response_handler.py +++ b/plugins/module_utils/rest/protocols/response_handler.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # pylint: disable=missing-module-docstring # pylint: disable=unnecessary-ellipsis # pylint: disable=wrong-import-position @@ -13,7 +12,7 @@ # isort: on # pylint: disable=invalid-name -__metaclass__ = type + # pylint: enable=invalid-name """ diff --git a/plugins/module_utils/rest/protocols/response_validation.py b/plugins/module_utils/rest/protocols/response_validation.py index d1ec5ef0..30a81b97 100644 --- a/plugins/module_utils/rest/protocols/response_validation.py +++ b/plugins/module_utils/rest/protocols/response_validation.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -26,7 +24,7 @@ # isort: on # pylint: disable=invalid-name -__metaclass__ = type + # pylint: enable=invalid-name try: diff --git a/plugins/module_utils/rest/protocols/sender.py b/plugins/module_utils/rest/protocols/sender.py index 5e55047c..df9f4d1b 100644 --- a/plugins/module_utils/rest/protocols/sender.py +++ b/plugins/module_utils/rest/protocols/sender.py @@ -1,7 +1,7 @@ # pylint: disable=wrong-import-position # pylint: disable=missing-module-docstring # pylint: disable=unnecessary-ellipsis -# -*- coding: utf-8 -*- + # Copyright: (c) 2026, Allen Robel (@arobel) @@ -15,7 +15,7 @@ # isort: on # pylint: disable=invalid-name -__metaclass__ = type + # pylint: enable=invalid-name try: diff --git a/plugins/module_utils/rest/response_handler_nd.py b/plugins/module_utils/rest/response_handler_nd.py index e7026d30..f0f30b94 100644 --- a/plugins/module_utils/rest/response_handler_nd.py +++ b/plugins/module_utils/rest/response_handler_nd.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -62,7 +60,7 @@ class (e.g. `NdV2Strategy`) conforming to `ResponseValidationStrategy` and injec # isort: on # pylint: disable=invalid-name -__metaclass__ = type + # pylint: enable=invalid-name import copy diff --git a/plugins/module_utils/rest/response_strategies/nd_v1_strategy.py b/plugins/module_utils/rest/response_strategies/nd_v1_strategy.py index 58c7784f..a5953789 100644 --- a/plugins/module_utils/rest/response_strategies/nd_v1_strategy.py +++ b/plugins/module_utils/rest/response_strategies/nd_v1_strategy.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -25,7 +23,7 @@ # isort: on # pylint: disable=invalid-name -__metaclass__ = type + # pylint: enable=invalid-name from typing import Any, Optional diff --git a/plugins/module_utils/rest/rest_send.py b/plugins/module_utils/rest/rest_send.py index c87009a5..7631b0dd 100644 --- a/plugins/module_utils/rest/rest_send.py +++ b/plugins/module_utils/rest/rest_send.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # pylint: disable=wrong-import-position # pylint: disable=missing-module-docstring # Copyright: (c) 2026, Allen Robel (@arobel) diff --git a/plugins/module_utils/rest/results.py b/plugins/module_utils/rest/results.py index 59281683..faee00dc 100644 --- a/plugins/module_utils/rest/results.py +++ b/plugins/module_utils/rest/results.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) diff --git a/plugins/module_utils/rest/sender_nd.py b/plugins/module_utils/rest/sender_nd.py index ae333dd0..b5ed9b85 100644 --- a/plugins/module_utils/rest/sender_nd.py +++ b/plugins/module_utils/rest/sender_nd.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Allen Robel (@arobel) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) @@ -17,7 +15,7 @@ # isort: on # pylint: disable=invalid-name -__metaclass__ = type + # pylint: enable=invalid-name import copy diff --git a/plugins/module_utils/types.py b/plugins/module_utils/types.py index 3111a095..b0056d5a 100644 --- a/plugins/module_utils/types.py +++ b/plugins/module_utils/types.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from typing import Any, Union, Tuple IdentifierKey = Union[str, int, Tuple[Any, ...]] diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py index 2e62c6eb..7d05e4af 100644 --- a/plugins/module_utils/utils.py +++ b/plugins/module_utils/utils.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - from copy import deepcopy from typing import Any, Dict, List, Union diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index f5efea03..25d04fb5 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -1,14 +1,9 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- - # Copyright: (c) 2026, Gaspard Micol (@gmicol) # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function -__metaclass__ = type - ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"} DOCUMENTATION = r""" @@ -112,7 +107,7 @@ - cisco.nd.modules - cisco.nd.check_mode notes: -- This module is only supported on Nexus Dashboard having version 4.1.0 or higher. +- This module is only supported on Nexus Dashboard having version 4.2.1 or higher. - This module is not idempotent when creating or updating a local user object when O(config.user_password) is used. """ From fea8adcea0cce49e82ab19cabb77e51a69820611 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 17 Mar 2026 12:50:52 -0400 Subject: [PATCH 112/131] [ignore] Fix comments and docstrings. made and static methods for class. --- .../endpoints/v1/infra/aaa_local_users.py | 12 ++++++------ plugins/module_utils/nd_config_collection.py | 13 ++++++------- plugins/module_utils/nd_output.py | 2 +- plugins/module_utils/nd_state_machine.py | 4 ++-- plugins/modules/nd_local_user.py | 2 +- .../targets/nd_local_user/tasks/main.yml | 4 ++-- 6 files changed, 18 insertions(+), 19 deletions(-) diff --git a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py index 26660622..ea3b1f4b 100644 --- a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py +++ b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py @@ -32,7 +32,7 @@ def path(self) -> str: """ # Summary - Build the endpoint path. + Build the /api/v1/infra/aaa/localUsers endpoint path. ## Returns @@ -70,12 +70,12 @@ class EpInfraAaaLocalUsersGet(_EpInfraAaaLocalUsersBase): ```python # Get all local users - request = EpApiV1InfraAaaLocalUsersGet() + request = EpInfraAaaLocalUsersGet() path = request.path verb = request.verb # Get specific local user - request = EpApiV1InfraAaaLocalUsersGet() + request = EpInfraAaaLocalUsersGet() request.login_id = "admin" path = request.path verb = request.verb @@ -111,7 +111,7 @@ class EpInfraAaaLocalUsersPost(_EpInfraAaaLocalUsersBase): ## Usage ```python - request = EpApiV1InfraAaaLocalUsersPost() + request = EpInfraAaaLocalUsersPost() path = request.path verb = request.verb ``` @@ -148,7 +148,7 @@ class EpInfraAaaLocalUsersPut(_EpInfraAaaLocalUsersBase): ## Usage ```python - request = EpApiV1InfraAaaLocalUsersPut() + request = EpInfraAaaLocalUsersPut() request.login_id = "admin" path = request.path verb = request.verb @@ -184,7 +184,7 @@ class EpInfraAaaLocalUsersDelete(_EpInfraAaaLocalUsersBase): ## Usage ```python - request = EpApiV1InfraAaaLocalUsersDelete() + request = EpInfraAaaLocalUsersDelete() request.login_id = "admin" path = request.path verb = request.verb diff --git a/plugins/module_utils/nd_config_collection.py b/plugins/module_utils/nd_config_collection.py index 0da7247f..832cc132 100644 --- a/plugins/module_utils/nd_config_collection.py +++ b/plugins/module_utils/nd_config_collection.py @@ -119,7 +119,6 @@ def delete(self, key: IdentifierKey) -> bool: # Diff Operations - # NOTE: Maybe add a similar one in the NDBaseModel (-> but is it necessary?) def get_diff_config(self, new_item: NDBaseModel) -> Literal["new", "no_diff", "changed"]: """ Compare single item against collection. @@ -198,18 +197,18 @@ def to_payload_list(self, **kwargs) -> List[Dict[str, Any]]: """ return [item.to_payload(**kwargs) for item in self._items] - @classmethod - def from_ansible_config(cls, data: List[Dict], model_class: type[NDBaseModel], **kwargs) -> "NDConfigCollection": + @staticmethod + def from_ansible_config(data: List[Dict], model_class: type[NDBaseModel], **kwargs) -> "NDConfigCollection": """ Create collection from Ansible config. """ items = [model_class.from_config(item_data, **kwargs) for item_data in data] - return cls(model_class=model_class, items=items) + return NDConfigCollection(model_class=model_class, items=items) - @classmethod - def from_api_response(cls, response_data: List[Dict[str, Any]], model_class: type[NDBaseModel], **kwargs) -> "NDConfigCollection": + @staticmethod + def from_api_response(response_data: List[Dict[str, Any]], model_class: type[NDBaseModel], **kwargs) -> "NDConfigCollection": """ Create collection from API response. """ items = [model_class.from_response(item_data, **kwargs) for item_data in response_data] - return cls(model_class=model_class, items=items) + return NDConfigCollection(model_class=model_class, items=items) diff --git a/plugins/module_utils/nd_output.py b/plugins/module_utils/nd_output.py index 8088b09b..09759b96 100644 --- a/plugins/module_utils/nd_output.py +++ b/plugins/module_utils/nd_output.py @@ -34,7 +34,7 @@ def format(self, **kwargs) -> Dict[str, Any]: if self._output_level in ("debug", "info"): output["proposed"] = self._proposed.to_ansible_config() if isinstance(self._proposed, NDConfigCollection) else self._proposed if self._output_level == "debug": - output["logs"] = "Not yet implemented" + output["logs"] = self._logs if self._extra: output.update(self._extra) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index e3ea328c..d6af1c6f 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -16,12 +16,12 @@ class NDStateMachine: """ - Generic Network Resource Module for Nexus Dashboard. + Generic State Machine for Nexus Dashboard. """ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchestrator]): """ - Initialize the Network Resource Module. + Initialize the ND State Machine. """ self.module = module self.nd_module = NDModule(self.module) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 25d04fb5..f672cc91 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -9,7 +9,7 @@ DOCUMENTATION = r""" --- module: nd_local_user -version_added: "1.4.0" +version_added: "1.6.0" short_description: Manage local users on Cisco Nexus Dashboard description: - Manage local users on Cisco Nexus Dashboard (ND). diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml index b7f205ae..c4540568 100644 --- a/tests/integration/targets/nd_local_user/tasks/main.yml +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -536,7 +536,7 @@ # DELETE -- name: Delete local user by name (check mode) +- name: Delete local user (check mode) cisco.nd.nd_local_user: &delete_local_user <<: *nd_info config: @@ -545,7 +545,7 @@ check_mode: true register: cm_delete_local_user -- name: Delete local user by name (normal mode) +- name: Delete local user (normal mode) cisco.nd.nd_local_user: <<: *delete_local_user register: nm_delete_local_user From 704b671791b5d30b0fe82f11fce322b1d48e3404 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:18:35 -0500 Subject: [PATCH 113/131] [ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues. --- plugins/module_utils/nd_state_machine.py | 1 + plugins/module_utils/orchestrators/base.py | 5 +++++ .../module_utils/orchestrators/local_user.py | 18 ++++++++++++++++++ 3 files changed, 24 insertions(+) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index d6af1c6f..57850521 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -80,6 +80,7 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier + response = {} identifier = proposed_item.get_identifier_value() try: # Determine diff status diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index fe16a524..75a4443a 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,8 +8,13 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType +======= +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +>>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) class NDBaseOrchestrator(BaseModel): diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 332719bf..0f35a712 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -8,6 +8,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( @@ -15,17 +16,34 @@ EpInfraAaaLocalUsersPut, EpInfraAaaLocalUsersDelete, EpInfraAaaLocalUsersGet, +======= +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( + V1InfraAaaLocalUsersPost, + V1InfraAaaLocalUsersPut, + V1InfraAaaLocalUsersDelete, + V1InfraAaaLocalUsersGet, +>>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel +<<<<<<< HEAD create_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPost update_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPut delete_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersDelete query_one_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet query_all_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet +======= + create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost + update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut + delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete + query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet + query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet +>>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) def query_all(self) -> ResponseType: """ From ac380feeb9f72a56eb20d2f2c5489c9e7fde071a Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 17:19:47 -0500 Subject: [PATCH 114/131] [ignore] Rename NDBaseSmartEndpoint to NDBaseEndpoint. Fix importation issues. --- plugins/module_utils/endpoints/base.py | 2 +- plugins/module_utils/orchestrators/base.py | 7 +------ .../module_utils/orchestrators/local_user.py | 20 +------------------ plugins/modules/nd_local_user.py | 3 +++ 4 files changed, 6 insertions(+), 26 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index c3d7f4e1..f2f16dbc 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -134,4 +134,4 @@ def verb(self) -> HttpVerbEnum: # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration def set_identifiers(self, identifier: IdentifierKey = None): - pass + pass \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 75a4443a..439fdca4 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,13 +8,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -======= -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType ->>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) class NDBaseOrchestrator(BaseModel): @@ -75,4 +70,4 @@ def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> R result = self.sender.query_obj(api_endpoint.path) return result or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 0f35a712..c2fffb45 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -8,7 +8,6 @@ from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel -<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( @@ -16,34 +15,17 @@ EpInfraAaaLocalUsersPut, EpInfraAaaLocalUsersDelete, EpInfraAaaLocalUsersGet, -======= -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra_aaa_local_users import ( - V1InfraAaaLocalUsersPost, - V1InfraAaaLocalUsersPut, - V1InfraAaaLocalUsersDelete, - V1InfraAaaLocalUsersGet, ->>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) ) class LocalUserOrchestrator(NDBaseOrchestrator): model_class: Type[NDBaseModel] = LocalUserModel -<<<<<<< HEAD create_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPost update_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPut delete_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersDelete query_one_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet query_all_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersGet -======= - create_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPost - update_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersPut - delete_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersDelete - query_one_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet - query_all_endpoint: Type[NDBaseSmartEndpoint] = V1InfraAaaLocalUsersGet ->>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) def query_all(self) -> ResponseType: """ @@ -54,4 +36,4 @@ def query_all(self) -> ResponseType: result = self.sender.query_obj(api_endpoint.path) return result.get("localusers", []) or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index f672cc91..0d1844d1 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -194,6 +194,9 @@ def main(): ) # Manage state + # TODO: return module output class object: + # output = nd_state_machine.manage_state() + # module.exit_json(**output) nd_state_machine.manage_state() module.exit_json(**nd_state_machine.output.format()) From d47089535f6fcfd8f3d29e5bdd21030351c404b3 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 6 Mar 2026 13:35:38 -0500 Subject: [PATCH 115/131] [ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly --- plugins/module_utils/nd_state_machine.py | 1 - tests/integration/inventory.networking | 10 +++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 57850521..d6af1c6f 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -80,7 +80,6 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier - response = {} identifier = proposed_item.get_identifier_value() try: # Determine diff status diff --git a/tests/integration/inventory.networking b/tests/integration/inventory.networking index 6b37d8f3..2aa818d7 100644 --- a/tests/integration/inventory.networking +++ b/tests/integration/inventory.networking @@ -1,15 +1,15 @@ [nd] -nd ansible_host= +nd-test ansible_host=10.48.161.120 [nd:vars] ansible_connection=ansible.netcommon.httpapi -ansible_python_interpreter=/usr/bin/python3.9 +ansible_python_interpreter=/usr/bin/python3.12 ansible_network_os=cisco.nd.nd ansible_httpapi_validate_certs=False ansible_httpapi_use_ssl=True ansible_httpapi_use_proxy=True -ansible_user=ansible_github_ci -ansible_password= +ansible_user=admin +ansible_password=C1sco123 insights_group= site_name= site_host= @@ -28,4 +28,4 @@ external_management_service_ip= external_data_service_ip= data_ip= data_gateway= -service_package_host=173.36.219.254 +service_package_host=173.36.219.254 From 6b6732f8d44d746d4704aaebaf0a0bbe08a1a465 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 10 Mar 2026 13:36:50 -0400 Subject: [PATCH 116/131] [ignore] Update NDOutput class. Remove all fail_json dependencies in NDStateMachineand add custom Exception for it in common/exceptions dir. Set json mode for to_diff_dict method in NDBaseModel. --- plugins/module_utils/nd_state_machine.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index d6af1c6f..f6293b03 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -169,4 +169,4 @@ def _manage_delete_state(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" if not self.module.params.get("ignore_errors", False): - raise NDStateMachineError(error_msg) + raise NDStateMachineError(error_msg) \ No newline at end of file From 67c410e65e9d5ea8bf5760f4e7fb493bc6852095 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 18 Mar 2026 13:25:47 -0400 Subject: [PATCH 117/131] [ignore] Slightly modify Exceptions handling in NDStateMachine. Remove self.send from check_mode guards in NDStateMachine. Fix documentation for nd_local_user. --- plugins/module_utils/nd_state_machine.py | 25 ++++++++++++------------ plugins/modules/nd_local_user.py | 2 +- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index d6af1c6f..37324020 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -45,16 +45,18 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest self.sent = NDConfigCollection(model_class=self.model_class) # Collection of configuration objects given by user self.proposed = NDConfigCollection(model_class=self.model_class) + for config in self.module.params.get("config", []): - try: - # Parse config into model - item = self.model_class.from_config(config) - self.proposed.add(item) - except ValidationError as e: - raise NDStateMachineError(f"Invalid configuration. for config {config}: {str(e)}") + # Parse config into model + item = self.model_class.from_config(config) + self.proposed.add(item) + self.output.assign(after=self.existing, before=self.before, proposed=self.proposed) + + except ValidationError as e: + raise NDStateMachineError(f"Invalid configuration. for config {config}: {str(e)}") from e except Exception as e: - raise NDStateMachineError(f"Initialization failed: {str(e)}") + raise NDStateMachineError(f"Initialization failed: {str(e)}") from e # State Management (core function) def manage_state(self) -> None: @@ -105,11 +107,10 @@ def _manage_create_update_state(self) -> None: if diff_status == "changed": if not self.module.check_mode: self.model_orchestrator.update(final_item) - self.sent.add(final_item) elif diff_status == "new": if not self.module.check_mode: self.model_orchestrator.create(final_item) - self.sent.add(final_item) + self.sent.add(final_item) # Log operation self.output.assign(after=self.existing) @@ -117,7 +118,7 @@ def _manage_create_update_state(self) -> None: except Exception as e: error_msg = f"Failed to process {identifier}: {e}" if not self.module.params.get("ignore_errors", False): - raise NDStateMachineError(error_msg) + raise NDStateMachineError(error_msg) from e def _manage_override_deletions(self) -> None: """ @@ -144,7 +145,7 @@ def _manage_override_deletions(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" if not self.module.params.get("ignore_errors", False): - raise NDStateMachineError(error_msg) + raise NDStateMachineError(error_msg) from e def _manage_delete_state(self) -> None: """Handle deleted state.""" @@ -169,4 +170,4 @@ def _manage_delete_state(self) -> None: except Exception as e: error_msg = f"Failed to delete {identifier}: {e}" if not self.module.params.get("ignore_errors", False): - raise NDStateMachineError(error_msg) + raise NDStateMachineError(error_msg) from e diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index f672cc91..d6c02d00 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -13,7 +13,7 @@ short_description: Manage local users on Cisco Nexus Dashboard description: - Manage local users on Cisco Nexus Dashboard (ND). -- It supports creating, updating, querying, and deleting local users. +- It supports creating, updating, and deleting local users. author: - Gaspard Micol (@gmicol) options: From 95af9c0483a8291efd5b4777b9a284bb7f370a2d Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Wed, 18 Mar 2026 13:31:33 -0400 Subject: [PATCH 118/131] [ignore] Rename aaa_local_users.py to infra_aaa_local_users.py. Move models/local_user.py to new dir models/local_user. --- .../v1/infra/{aaa_local_users.py => infra_aaa_local_users.py} | 0 plugins/module_utils/models/{ => local_user}/local_user.py | 0 plugins/module_utils/orchestrators/local_user.py | 4 ++-- plugins/modules/nd_local_user.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename plugins/module_utils/endpoints/v1/infra/{aaa_local_users.py => infra_aaa_local_users.py} (100%) rename plugins/module_utils/models/{ => local_user}/local_user.py (100%) diff --git a/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra/infra_aaa_local_users.py similarity index 100% rename from plugins/module_utils/endpoints/v1/infra/aaa_local_users.py rename to plugins/module_utils/endpoints/v1/infra/infra_aaa_local_users.py diff --git a/plugins/module_utils/models/local_user.py b/plugins/module_utils/models/local_user/local_user.py similarity index 100% rename from plugins/module_utils/models/local_user.py rename to plugins/module_utils/models/local_user/local_user.py diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 332719bf..0c2a6bf8 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -7,10 +7,10 @@ from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +from ansible_collections.cisco.nd.plugins.module_utils.models.local_user.local_user import LocalUserModel from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.infra_aaa_local_users import ( EpInfraAaaLocalUsersPost, EpInfraAaaLocalUsersPut, EpInfraAaaLocalUsersDelete, diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index d6c02d00..53680e99 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -173,7 +173,7 @@ from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine -from ansible_collections.cisco.nd.plugins.module_utils.models.local_user import LocalUserModel +from ansible_collections.cisco.nd.plugins.module_utils.models.local_user.local_user import LocalUserModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator From d1a1110485155e7a899696782b37879b030f57bd Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:18:35 -0500 Subject: [PATCH 119/131] [ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues. --- plugins/module_utils/nd_state_machine.py | 1 + plugins/module_utils/orchestrators/base.py | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 37324020..9860434a 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -82,6 +82,7 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier + response = {} identifier = proposed_item.get_identifier_value() try: # Determine diff status diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index fe16a524..75a4443a 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,8 +8,13 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType +======= +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +>>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) class NDBaseOrchestrator(BaseModel): From f9616f94e2e24df7162eea9fbf7ea5763a488ced Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 17:19:47 -0500 Subject: [PATCH 120/131] [ignore] Rename NDBaseSmartEndpoint to NDBaseEndpoint. Fix importation issues. --- plugins/module_utils/endpoints/base.py | 2 +- plugins/module_utils/orchestrators/base.py | 7 +------ plugins/module_utils/orchestrators/local_user.py | 2 +- plugins/modules/nd_local_user.py | 3 +++ 4 files changed, 6 insertions(+), 8 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index c3d7f4e1..f2f16dbc 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -134,4 +134,4 @@ def verb(self) -> HttpVerbEnum: # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration def set_identifiers(self, identifier: IdentifierKey = None): - pass + pass \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 75a4443a..439fdca4 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,13 +8,8 @@ from typing import ClassVar, Type, Optional from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -======= -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType ->>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) class NDBaseOrchestrator(BaseModel): @@ -75,4 +70,4 @@ def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> R result = self.sender.query_obj(api_endpoint.path) return result or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 0c2a6bf8..fe25d438 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -36,4 +36,4 @@ def query_all(self) -> ResponseType: result = self.sender.query_obj(api_endpoint.path) return result.get("localusers", []) or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 53680e99..8feef19a 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -194,6 +194,9 @@ def main(): ) # Manage state + # TODO: return module output class object: + # output = nd_state_machine.manage_state() + # module.exit_json(**output) nd_state_machine.manage_state() module.exit_json(**nd_state_machine.output.format()) From 69f308e04a8fc5aca59a2c2cdd0d793fcf83490b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 6 Mar 2026 13:35:38 -0500 Subject: [PATCH 121/131] [ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly --- plugins/module_utils/nd_state_machine.py | 1 - tests/integration/inventory.networking | 10 +++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 9860434a..37324020 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -82,7 +82,6 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier - response = {} identifier = proposed_item.get_identifier_value() try: # Determine diff status diff --git a/tests/integration/inventory.networking b/tests/integration/inventory.networking index 6b37d8f3..2aa818d7 100644 --- a/tests/integration/inventory.networking +++ b/tests/integration/inventory.networking @@ -1,15 +1,15 @@ [nd] -nd ansible_host= +nd-test ansible_host=10.48.161.120 [nd:vars] ansible_connection=ansible.netcommon.httpapi -ansible_python_interpreter=/usr/bin/python3.9 +ansible_python_interpreter=/usr/bin/python3.12 ansible_network_os=cisco.nd.nd ansible_httpapi_validate_certs=False ansible_httpapi_use_ssl=True ansible_httpapi_use_proxy=True -ansible_user=ansible_github_ci -ansible_password= +ansible_user=admin +ansible_password=C1sco123 insights_group= site_name= site_host= @@ -28,4 +28,4 @@ external_management_service_ip= external_data_service_ip= data_ip= data_gateway= -service_package_host=173.36.219.254 +service_package_host=173.36.219.254 From 4365d955aa40efaa56589dff64e65b6e13060731 Mon Sep 17 00:00:00 2001 From: mwiebe Date: Wed, 18 Mar 2026 21:55:02 -0400 Subject: [PATCH 122/131] Enforce proper naming and directory structure --- .../enums.py | 0 .../manage_fabric_ebgp.py | 4 +- .../manage_fabric_external.py | 4 +- .../manage_fabric_ibgp.py | 2 +- .../orchestrators/manage_fabric_ebgp.py | 46 +++++++++++++++++++ ...manage_fabric.py => manage_fabric_ibgp.py} | 26 +---------- plugins/modules/nd_manage_fabric_ebgp.py | 4 +- plugins/modules/nd_manage_fabric_ibgp.py | 4 +- 8 files changed, 57 insertions(+), 33 deletions(-) rename plugins/module_utils/models/{nd_manage_fabric => manage_fabric}/enums.py (100%) rename plugins/module_utils/models/{nd_manage_fabric => manage_fabric}/manage_fabric_ebgp.py (99%) rename plugins/module_utils/models/{nd_manage_fabric => manage_fabric}/manage_fabric_external.py (98%) rename plugins/module_utils/models/{nd_manage_fabric => manage_fabric}/manage_fabric_ibgp.py (99%) create mode 100644 plugins/module_utils/orchestrators/manage_fabric_ebgp.py rename plugins/module_utils/orchestrators/{manage_fabric.py => manage_fabric_ibgp.py} (60%) diff --git a/plugins/module_utils/models/nd_manage_fabric/enums.py b/plugins/module_utils/models/manage_fabric/enums.py similarity index 100% rename from plugins/module_utils/models/nd_manage_fabric/enums.py rename to plugins/module_utils/models/manage_fabric/enums.py diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py b/plugins/module_utils/models/manage_fabric/manage_fabric_ebgp.py similarity index 99% rename from plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py rename to plugins/module_utils/models/manage_fabric/manage_fabric_ebgp.py index be3abcc1..8894941c 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ebgp.py +++ b/plugins/module_utils/models/manage_fabric/manage_fabric_ebgp.py @@ -22,7 +22,7 @@ field_validator, model_validator, ) -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.enums import ( +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.enums import ( FabricTypeEnum, AlertSuspendEnum, LicenseTierEnum, @@ -35,7 +35,7 @@ FirstHopRedundancyProtocolEnum, ) # Re-use shared nested models from the iBGP module -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import ( +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.manage_fabric_ibgp import ( LocationModel, NetflowExporterModel, NetflowRecordModel, diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_external.py b/plugins/module_utils/models/manage_fabric/manage_fabric_external.py similarity index 98% rename from plugins/module_utils/models/nd_manage_fabric/manage_fabric_external.py rename to plugins/module_utils/models/manage_fabric/manage_fabric_external.py index b8e41ddc..e6f2d0a6 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_external.py +++ b/plugins/module_utils/models/manage_fabric/manage_fabric_external.py @@ -22,7 +22,7 @@ field_validator, model_validator, ) -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.enums import ( +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.enums import ( FabricTypeEnum, AlertSuspendEnum, LicenseTierEnum, @@ -31,7 +31,7 @@ PowerRedundancyModeEnum, ) # Re-use shared nested models from the iBGP module -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import ( +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.manage_fabric_ibgp import ( LocationModel, NetflowSettingsModel, BootstrapSubnetModel, diff --git a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py b/plugins/module_utils/models/manage_fabric/manage_fabric_ibgp.py similarity index 99% rename from plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py rename to plugins/module_utils/models/manage_fabric/manage_fabric_ibgp.py index 703f9fd7..5e8169de 100644 --- a/plugins/module_utils/models/nd_manage_fabric/manage_fabric_ibgp.py +++ b/plugins/module_utils/models/manage_fabric/manage_fabric_ibgp.py @@ -24,7 +24,7 @@ field_validator, model_validator, ) -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.enums import ( +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.enums import ( FabricTypeEnum, AlertSuspendEnum, LicenseTierEnum, diff --git a/plugins/module_utils/orchestrators/manage_fabric_ebgp.py b/plugins/module_utils/orchestrators/manage_fabric_ebgp.py new file mode 100644 index 00000000..45df1acd --- /dev/null +++ b/plugins/module_utils/orchestrators/manage_fabric_ebgp.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2026, Mike Wiebe (@mwiebe) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +from typing import Type +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.manage_fabric_ibgp import FabricIbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.manage_fabric_ebgp import FabricEbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.manage.manage_fabrics import ( + EpManageFabricsGet, + EpManageFabricsListGet, + EpManageFabricsPost, + EpManageFabricsPut, + EpManageFabricsDelete, +) + +class ManageEbgpFabricOrchestrator(NDBaseOrchestrator): + model_class: Type[NDBaseModel] = FabricEbgpModel + + create_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPost + update_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPut + delete_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsDelete + query_one_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsGet + query_all_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsListGet + + def query_all(self) -> ResponseType: + """ + Custom query_all action to extract 'fabrics' from response, + filtered to only vxlanEbgp fabric types. + """ + try: + api_endpoint = self.query_all_endpoint() + result = self.sender.query_obj(api_endpoint.path) + fabrics = result.get("fabrics", []) or [] + return [f for f in fabrics if f.get("management", {}).get("type") == "vxlanEbgp"] + except Exception as e: + raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/module_utils/orchestrators/manage_fabric.py b/plugins/module_utils/orchestrators/manage_fabric_ibgp.py similarity index 60% rename from plugins/module_utils/orchestrators/manage_fabric.py rename to plugins/module_utils/orchestrators/manage_fabric_ibgp.py index d936353c..e2082b57 100644 --- a/plugins/module_utils/orchestrators/manage_fabric.py +++ b/plugins/module_utils/orchestrators/manage_fabric_ibgp.py @@ -11,8 +11,8 @@ from typing import Type from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricIbgpModel -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ebgp import FabricEbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.manage_fabric_ibgp import FabricIbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.manage_fabric_ebgp import FabricEbgpModel from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.manage.manage_fabrics import ( @@ -45,25 +45,3 @@ def query_all(self) -> ResponseType: return [f for f in fabrics if f.get("management", {}).get("type") == "vxlanIbgp"] except Exception as e: raise Exception(f"Query all failed: {e}") from e - -class ManageEbgpFabricOrchestrator(NDBaseOrchestrator): - model_class: Type[NDBaseModel] = FabricEbgpModel - - create_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPost - update_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsPut - delete_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsDelete - query_one_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsGet - query_all_endpoint: Type[NDEndpointBaseModel] = EpManageFabricsListGet - - def query_all(self) -> ResponseType: - """ - Custom query_all action to extract 'fabrics' from response, - filtered to only vxlanEbgp fabric types. - """ - try: - api_endpoint = self.query_all_endpoint() - result = self.sender.query_obj(api_endpoint.path) - fabrics = result.get("fabrics", []) or [] - return [f for f in fabrics if f.get("management", {}).get("type") == "vxlanEbgp"] - except Exception as e: - raise Exception(f"Query all failed: {e}") from e diff --git a/plugins/modules/nd_manage_fabric_ebgp.py b/plugins/modules/nd_manage_fabric_ebgp.py index 31caac93..04a4ab72 100644 --- a/plugins/modules/nd_manage_fabric_ebgp.py +++ b/plugins/modules/nd_manage_fabric_ebgp.py @@ -1144,8 +1144,8 @@ from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ebgp import FabricEbgpModel -from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric import ManageEbgpFabricOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.manage_fabric_ebgp import FabricEbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric_ebgp import ManageEbgpFabricOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError diff --git a/plugins/modules/nd_manage_fabric_ibgp.py b/plugins/modules/nd_manage_fabric_ibgp.py index dcd638cc..9d857fc6 100644 --- a/plugins/modules/nd_manage_fabric_ibgp.py +++ b/plugins/modules/nd_manage_fabric_ibgp.py @@ -1358,8 +1358,8 @@ from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine -from ansible_collections.cisco.nd.plugins.module_utils.models.nd_manage_fabric.manage_fabric_ibgp import FabricIbgpModel -from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric import ManageIbgpFabricOrchestrator +from ansible_collections.cisco.nd.plugins.module_utils.models.manage_fabric.manage_fabric_ibgp import FabricIbgpModel +from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.manage_fabric_ibgp import ManageIbgpFabricOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.common.exceptions import NDStateMachineError From 50e8dea67ea692c6a4d309e351fbc3c27dac153e Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 19 Mar 2026 12:54:08 -0400 Subject: [PATCH 123/131] [ignore] Update integration tests for nd_local_user module. --- .../targets/nd_local_user/tasks/main.yml | 1136 ++++++++++++----- 1 file changed, 800 insertions(+), 336 deletions(-) diff --git a/tests/integration/targets/nd_local_user/tasks/main.yml b/tests/integration/targets/nd_local_user/tasks/main.yml index c4540568..c76e22c3 100644 --- a/tests/integration/targets/nd_local_user/tasks/main.yml +++ b/tests/integration/targets/nd_local_user/tasks/main.yml @@ -14,7 +14,7 @@ output_level: '{{ api_key_output_level | default("debug") }}' - name: Ensure local users do not exist before test starts - cisco.nd.nd_local_user: + cisco.nd.nd_local_user: &clean_all_local_users <<: *nd_info config: - login_id: ansible_local_user @@ -22,9 +22,12 @@ - login_id: ansible_local_user_3 state: deleted -# CREATE -- name: Create local users with full and minimum configuration (check mode) - cisco.nd.nd_local_user: &create_local_user + +# --- MERGED STATE TESTS --- + +# MERGED STATE TESTS: CREATE +- name: Create local users with full and minimum configuration (merged state - check mode) + cisco.nd.nd_local_user: &create_local_user_merged_state <<: *nd_info config: - email: ansibleuser@example.com @@ -47,151 +50,124 @@ - name: all state: merged check_mode: true - register: cm_create_local_users + register: cm_merged_create_local_users -- name: Create local users with full and minimum configuration (normal mode) +- name: Create local users with full and minimum configuration (merged state - normal mode) cisco.nd.nd_local_user: - <<: *create_local_user - register: nm_create_local_users + <<: *create_local_user_merged_state + register: nm_merged_create_local_users -- name: Asserts for local users creation tasks +- name: Asserts for local users merged state creation tasks ansible.builtin.assert: that: - - cm_create_local_users is changed - - cm_create_local_users.after | length == 3 - - cm_create_local_users.after.0.login_id == "admin" - - cm_create_local_users.after.0.first_name == "admin" - - cm_create_local_users.after.0.remote_user_authorization == false - - cm_create_local_users.after.0.reuse_limitation == 0 - - cm_create_local_users.after.0.security_domains | length == 1 - - cm_create_local_users.after.0.security_domains.0.name == "all" - - cm_create_local_users.after.0.security_domains.0.roles | length == 1 - - cm_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" - - cm_create_local_users.after.0.time_interval_limitation == 0 - - cm_create_local_users.after.1.email == "ansibleuser@example.com" - - cm_create_local_users.after.1.first_name == "Ansible first name" - - cm_create_local_users.after.1.last_name == "Ansible last name" - - cm_create_local_users.after.1.login_id == "ansible_local_user" - - cm_create_local_users.after.1.remote_id_claim == "ansible_remote_user" - - cm_create_local_users.after.1.remote_user_authorization == true - - cm_create_local_users.after.1.reuse_limitation == 20 - - cm_create_local_users.after.1.security_domains | length == 1 - - cm_create_local_users.after.1.security_domains.0.name == "all" - - cm_create_local_users.after.1.security_domains.0.roles | length == 2 - - cm_create_local_users.after.1.security_domains.0.roles.0 == "observer" - - cm_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" - - cm_create_local_users.after.1.time_interval_limitation == 10 - - cm_create_local_users.after.2.login_id == "ansible_local_user_2" - - cm_create_local_users.after.2.security_domains | length == 1 - - cm_create_local_users.after.2.security_domains.0.name == "all" - - cm_create_local_users.before | length == 1 - - cm_create_local_users.before.0.login_id == "admin" - - cm_create_local_users.before.0.first_name == "admin" - - cm_create_local_users.before.0.remote_user_authorization == false - - cm_create_local_users.before.0.reuse_limitation == 0 - - cm_create_local_users.before.0.security_domains | length == 1 - - cm_create_local_users.before.0.security_domains.0.name == "all" - - cm_create_local_users.before.0.security_domains.0.roles | length == 1 - - cm_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" - - cm_create_local_users.before.0.time_interval_limitation == 0 - - cm_create_local_users.diff == [] - - cm_create_local_users.proposed.0.email == "ansibleuser@example.com" - - cm_create_local_users.proposed.0.first_name == "Ansible first name" - - cm_create_local_users.proposed.0.last_name == "Ansible last name" - - cm_create_local_users.proposed.0.login_id == "ansible_local_user" - - cm_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" - - cm_create_local_users.proposed.0.remote_user_authorization == true - - cm_create_local_users.proposed.0.reuse_limitation == 20 - - cm_create_local_users.proposed.0.security_domains | length == 1 - - cm_create_local_users.proposed.0.security_domains.0.name == "all" - - cm_create_local_users.proposed.0.security_domains.0.roles | length == 2 - - cm_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" - - cm_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" - - cm_create_local_users.proposed.0.time_interval_limitation == 10 - - cm_create_local_users.proposed.1.login_id == "ansible_local_user_2" - - cm_create_local_users.proposed.1.security_domains | length == 1 - - cm_create_local_users.proposed.1.security_domains.0.name == "all" - - nm_create_local_users is changed - - nm_create_local_users.after.0.first_name == "admin" - - nm_create_local_users.after.0.remote_user_authorization == false - - nm_create_local_users.after.0.reuse_limitation == 0 - - nm_create_local_users.after.0.security_domains | length == 1 - - nm_create_local_users.after.0.security_domains.0.name == "all" - - nm_create_local_users.after.0.security_domains.0.roles | length == 1 - - nm_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" - - nm_create_local_users.after.0.time_interval_limitation == 0 - - nm_create_local_users.after.1.email == "ansibleuser@example.com" - - nm_create_local_users.after.1.first_name == "Ansible first name" - - nm_create_local_users.after.1.last_name == "Ansible last name" - - nm_create_local_users.after.1.login_id == "ansible_local_user" - - nm_create_local_users.after.1.remote_id_claim == "ansible_remote_user" - - nm_create_local_users.after.1.remote_user_authorization == true - - nm_create_local_users.after.1.reuse_limitation == 20 - - nm_create_local_users.after.1.security_domains | length == 1 - - nm_create_local_users.after.1.security_domains.0.name == "all" - - nm_create_local_users.after.1.security_domains.0.roles | length == 2 - - nm_create_local_users.after.1.security_domains.0.roles.0 == "observer" - - nm_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" - - nm_create_local_users.after.1.time_interval_limitation == 10 - - nm_create_local_users.after.2.login_id == "ansible_local_user_2" - - nm_create_local_users.after.2.security_domains | length == 1 - - nm_create_local_users.after.2.security_domains.0.name == "all" - - nm_create_local_users.before | length == 1 - - nm_create_local_users.before.0.login_id == "admin" - - nm_create_local_users.before.0.first_name == "admin" - - nm_create_local_users.before.0.remote_user_authorization == false - - nm_create_local_users.before.0.reuse_limitation == 0 - - nm_create_local_users.before.0.security_domains | length == 1 - - nm_create_local_users.before.0.security_domains.0.name == "all" - - nm_create_local_users.before.0.security_domains.0.roles | length == 1 - - nm_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" - - nm_create_local_users.before.0.time_interval_limitation == 0 - - nm_create_local_users.diff == [] - - nm_create_local_users.proposed.0.email == "ansibleuser@example.com" - - nm_create_local_users.proposed.0.first_name == "Ansible first name" - - nm_create_local_users.proposed.0.last_name == "Ansible last name" - - nm_create_local_users.proposed.0.login_id == "ansible_local_user" - - nm_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" - - nm_create_local_users.proposed.0.remote_user_authorization == true - - nm_create_local_users.proposed.0.reuse_limitation == 20 - - nm_create_local_users.proposed.0.security_domains | length == 1 - - nm_create_local_users.proposed.0.security_domains.0.name == "all" - - nm_create_local_users.proposed.0.security_domains.0.roles | length == 2 - - nm_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" - - nm_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" - - nm_create_local_users.proposed.0.time_interval_limitation == 10 - - nm_create_local_users.proposed.1.login_id == "ansible_local_user_2" - - nm_create_local_users.proposed.1.security_domains | length == 1 - - nm_create_local_users.proposed.1.security_domains.0.name == "all" - -# UPDATE -- name: Replace all ansible_local_user's attributes (check mode) - cisco.nd.nd_local_user: &update_first_local_user - <<: *nd_info - config: - - email: updatedansibleuser@example.com - login_id: ansible_local_user - first_name: Updated Ansible first name - last_name: Updated Ansible last name - user_password: updatedAnsibleLocalUserPassword1% - reuse_limitation: 25 - time_interval_limitation: 15 - security_domains: - - name: all - roles: super_admin - remote_id_claim: "" - remote_user_authorization: false - state: replaced - check_mode: true - register: cm_replace_local_user + - cm_merged_create_local_users is changed + - cm_merged_create_local_users.after | length == 3 + - cm_merged_create_local_users.after.0.login_id == "admin" + - cm_merged_create_local_users.after.0.first_name == "admin" + - cm_merged_create_local_users.after.0.remote_user_authorization == false + - cm_merged_create_local_users.after.0.reuse_limitation == 0 + - cm_merged_create_local_users.after.0.security_domains | length == 1 + - cm_merged_create_local_users.after.0.security_domains.0.name == "all" + - cm_merged_create_local_users.after.0.security_domains.0.roles | length == 1 + - cm_merged_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - cm_merged_create_local_users.after.0.time_interval_limitation == 0 + - cm_merged_create_local_users.after.1.email == "ansibleuser@example.com" + - cm_merged_create_local_users.after.1.first_name == "Ansible first name" + - cm_merged_create_local_users.after.1.last_name == "Ansible last name" + - cm_merged_create_local_users.after.1.login_id == "ansible_local_user" + - cm_merged_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - cm_merged_create_local_users.after.1.remote_user_authorization == true + - cm_merged_create_local_users.after.1.reuse_limitation == 20 + - cm_merged_create_local_users.after.1.security_domains | length == 1 + - cm_merged_create_local_users.after.1.security_domains.0.name == "all" + - cm_merged_create_local_users.after.1.security_domains.0.roles | length == 2 + - cm_merged_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - cm_merged_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - cm_merged_create_local_users.after.1.time_interval_limitation == 10 + - cm_merged_create_local_users.after.2.login_id == "ansible_local_user_2" + - cm_merged_create_local_users.after.2.security_domains | length == 1 + - cm_merged_create_local_users.after.2.security_domains.0.name == "all" + - cm_merged_create_local_users.before | length == 1 + - cm_merged_create_local_users.before.0.login_id == "admin" + - cm_merged_create_local_users.before.0.first_name == "admin" + - cm_merged_create_local_users.before.0.remote_user_authorization == false + - cm_merged_create_local_users.before.0.reuse_limitation == 0 + - cm_merged_create_local_users.before.0.security_domains | length == 1 + - cm_merged_create_local_users.before.0.security_domains.0.name == "all" + - cm_merged_create_local_users.before.0.security_domains.0.roles | length == 1 + - cm_merged_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - cm_merged_create_local_users.before.0.time_interval_limitation == 0 + - cm_merged_create_local_users.proposed.0.email == "ansibleuser@example.com" + - cm_merged_create_local_users.proposed.0.first_name == "Ansible first name" + - cm_merged_create_local_users.proposed.0.last_name == "Ansible last name" + - cm_merged_create_local_users.proposed.0.login_id == "ansible_local_user" + - cm_merged_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - cm_merged_create_local_users.proposed.0.remote_user_authorization == true + - cm_merged_create_local_users.proposed.0.reuse_limitation == 20 + - cm_merged_create_local_users.proposed.0.security_domains | length == 1 + - cm_merged_create_local_users.proposed.0.security_domains.0.name == "all" + - cm_merged_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - cm_merged_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - cm_merged_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - cm_merged_create_local_users.proposed.0.time_interval_limitation == 10 + - cm_merged_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - cm_merged_create_local_users.proposed.1.security_domains | length == 1 + - cm_merged_create_local_users.proposed.1.security_domains.0.name == "all" + - nm_merged_create_local_users is changed + - nm_merged_create_local_users.after.0.first_name == "admin" + - nm_merged_create_local_users.after.0.remote_user_authorization == false + - nm_merged_create_local_users.after.0.reuse_limitation == 0 + - nm_merged_create_local_users.after.0.security_domains | length == 1 + - nm_merged_create_local_users.after.0.security_domains.0.name == "all" + - nm_merged_create_local_users.after.0.security_domains.0.roles | length == 1 + - nm_merged_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - nm_merged_create_local_users.after.0.time_interval_limitation == 0 + - nm_merged_create_local_users.after.1.email == "ansibleuser@example.com" + - nm_merged_create_local_users.after.1.first_name == "Ansible first name" + - nm_merged_create_local_users.after.1.last_name == "Ansible last name" + - nm_merged_create_local_users.after.1.login_id == "ansible_local_user" + - nm_merged_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - nm_merged_create_local_users.after.1.remote_user_authorization == true + - nm_merged_create_local_users.after.1.reuse_limitation == 20 + - nm_merged_create_local_users.after.1.security_domains | length == 1 + - nm_merged_create_local_users.after.1.security_domains.0.name == "all" + - nm_merged_create_local_users.after.1.security_domains.0.roles | length == 2 + - nm_merged_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - nm_merged_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - nm_merged_create_local_users.after.1.time_interval_limitation == 10 + - nm_merged_create_local_users.after.2.login_id == "ansible_local_user_2" + - nm_merged_create_local_users.after.2.security_domains | length == 1 + - nm_merged_create_local_users.after.2.security_domains.0.name == "all" + - nm_merged_create_local_users.before | length == 1 + - nm_merged_create_local_users.before.0.login_id == "admin" + - nm_merged_create_local_users.before.0.first_name == "admin" + - nm_merged_create_local_users.before.0.remote_user_authorization == false + - nm_merged_create_local_users.before.0.reuse_limitation == 0 + - nm_merged_create_local_users.before.0.security_domains | length == 1 + - nm_merged_create_local_users.before.0.security_domains.0.name == "all" + - nm_merged_create_local_users.before.0.security_domains.0.roles | length == 1 + - nm_merged_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - nm_merged_create_local_users.before.0.time_interval_limitation == 0 + - nm_merged_create_local_users.proposed.0.email == "ansibleuser@example.com" + - nm_merged_create_local_users.proposed.0.first_name == "Ansible first name" + - nm_merged_create_local_users.proposed.0.last_name == "Ansible last name" + - nm_merged_create_local_users.proposed.0.login_id == "ansible_local_user" + - nm_merged_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - nm_merged_create_local_users.proposed.0.remote_user_authorization == true + - nm_merged_create_local_users.proposed.0.reuse_limitation == 20 + - nm_merged_create_local_users.proposed.0.security_domains | length == 1 + - nm_merged_create_local_users.proposed.0.security_domains.0.name == "all" + - nm_merged_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - nm_merged_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - nm_merged_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - nm_merged_create_local_users.proposed.0.time_interval_limitation == 10 + - nm_merged_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - nm_merged_create_local_users.proposed.1.security_domains | length == 1 + - nm_merged_create_local_users.proposed.1.security_domains.0.name == "all" -- name: Replace all ansible_local_user's attributes (normal mode) - cisco.nd.nd_local_user: - <<: *update_first_local_user - register: nm_replace_local_user - -- name: Update all ansible_local_user_2's attributes except password - cisco.nd.nd_local_user: &update_second_local_user +# MERGED STATE TESTS: UPDATE +- name: Update all ansible_local_user_2's attributes except password (merge state - check mode) + cisco.nd.nd_local_user: &update_second_local_user_merged_state <<: *nd_info config: - email: secondansibleuser@example.com @@ -206,48 +182,341 @@ remote_id_claim: ansible_remote_user_2 remote_user_authorization: true state: merged - register: nm_merge_local_user_2 + check_mode: true + register: cm_merged_update_local_user_2 -- name: Update all ansible_local_user_2's attributes except password again (idempotency) +- name: Update all ansible_local_user_2's attributes except password (merge state - normal mode) cisco.nd.nd_local_user: - <<: *update_second_local_user - register: nm_merge_local_user_2_again + <<: *update_second_local_user_merged_state + register: nm_merged_update_local_user_2 +- name: Update all ansible_local_user_2's attributes except password again (merge state - idempotency) + cisco.nd.nd_local_user: + <<: *update_second_local_user_merged_state + register: nm_merged_update_local_user_2_again -- name: Override local users with minimum configuration +- name: Asserts for local users update tasks + ansible.builtin.assert: + that: + - cm_merged_update_local_user_2 is changed + - cm_merged_update_local_user_2.after | length == 3 + - cm_merged_update_local_user_2.after.0.email == "secondansibleuser@example.com" + - cm_merged_update_local_user_2.after.0.first_name == "Second Ansible first name" + - cm_merged_update_local_user_2.after.0.last_name == "Second Ansible last name" + - cm_merged_update_local_user_2.after.0.login_id == "ansible_local_user_2" + - cm_merged_update_local_user_2.after.0.remote_id_claim == "ansible_remote_user_2" + - cm_merged_update_local_user_2.after.0.remote_user_authorization == true + - cm_merged_update_local_user_2.after.0.reuse_limitation == 20 + - cm_merged_update_local_user_2.after.0.security_domains | length == 1 + - cm_merged_update_local_user_2.after.0.security_domains.0.name == "all" + - cm_merged_update_local_user_2.after.0.security_domains.0.roles | length == 1 + - cm_merged_update_local_user_2.after.0.security_domains.0.roles.0 == "fabric_admin" + - cm_merged_update_local_user_2.after.0.time_interval_limitation == 10 + - cm_merged_update_local_user_2.after.1.email == "updatedansibleuser@example.com" + - cm_merged_update_local_user_2.after.1.first_name == "Updated Ansible first name" + - cm_merged_update_local_user_2.after.1.last_name == "Updated Ansible last name" + - cm_merged_update_local_user_2.after.1.login_id == "ansible_local_user" + - cm_merged_update_local_user_2.after.1.remote_user_authorization == false + - cm_merged_update_local_user_2.after.1.reuse_limitation == 25 + - cm_merged_update_local_user_2.after.1.security_domains | length == 1 + - cm_merged_update_local_user_2.after.1.security_domains.0.name == "all" + - cm_merged_update_local_user_2.after.1.security_domains.0.roles | length == 1 + - cm_merged_update_local_user_2.after.1.security_domains.0.roles.0 == "super_admin" + - cm_merged_update_local_user_2.after.1.time_interval_limitation == 15 + - cm_merged_update_local_user_2.after.2.login_id == "admin" + - cm_merged_update_local_user_2.after.2.first_name == "admin" + - cm_merged_update_local_user_2.after.2.remote_user_authorization == false + - cm_merged_update_local_user_2.after.2.reuse_limitation == 0 + - cm_merged_update_local_user_2.after.2.security_domains | length == 1 + - cm_merged_update_local_user_2.after.2.security_domains.0.name == "all" + - cm_merged_update_local_user_2.after.2.security_domains.0.roles | length == 1 + - cm_merged_update_local_user_2.after.2.security_domains.0.roles.0 == "super_admin" + - cm_merged_update_local_user_2.after.2.time_interval_limitation == 0 + - cm_merged_update_local_user_2.before | length == 3 + - cm_merged_update_local_user_2.before.2.first_name == "admin" + - cm_merged_update_local_user_2.before.2.remote_user_authorization == false + - cm_merged_update_local_user_2.before.2.reuse_limitation == 0 + - cm_merged_update_local_user_2.before.2.security_domains | length == 1 + - cm_merged_update_local_user_2.before.2.security_domains.0.name == "all" + - cm_merged_update_local_user_2.before.2.security_domains.0.roles | length == 1 + - cm_merged_update_local_user_2.before.2.security_domains.0.roles.0 == "super_admin" + - cm_merged_update_local_user_2.before.2.time_interval_limitation == 0 + - cm_merged_update_local_user_2.before.1.email == "ansibleuser@example.com" + - cm_merged_update_local_user_2.before.1.first_name == "Ansible first name" + - cm_merged_update_local_user_2.before.1.last_name == "Ansible last name" + - cm_merged_update_local_user_2.before.1.login_id == "ansible_local_user" + - cm_merged_update_local_user_2.before.1.remote_id_claim == "ansible_remote_user" + - cm_merged_update_local_user_2.before.1.remote_user_authorization == true + - cm_merged_update_local_user_2.before.1.reuse_limitation == 20 + - cm_merged_update_local_user_2.before.1.security_domains | length == 1 + - cm_merged_update_local_user_2.before.1.security_domains.0.name == "all" + - cm_merged_update_local_user_2.before.1.security_domains.0.roles | length == 2 + - cm_merged_update_local_user_2.before.1.security_domains.0.roles.0 == "observer" + - cm_merged_update_local_user_2.before.1.security_domains.0.roles.0 == "support-engineer" + - cm_merged_update_local_user_2.before.1.time_interval_limitation == 10 + - cm_merged_update_local_user_2.before.0.login_id == "ansible_local_user_2" + - cm_merged_update_local_user_2.before.0.security_domains | length == 1 + - cm_merged_update_local_user_2.before.0.security_domains.0.name == "all" + - cm_merged_update_local_user_2.proposed.0.email == "secondansibleuser@example.com" + - cm_merged_update_local_user_2.proposed.0.first_name == "Second Ansible first name" + - cm_merged_update_local_user_2.proposed.0.last_name == "Second Ansible last name" + - cm_merged_update_local_user_2.proposed.0.login_id == "ansible_local_user_2" + - cm_merged_update_local_user_2.proposed.0.remote_id_claim == "ansible_remote_user_2" + - cm_merged_update_local_user_2.proposed.0.remote_user_authorization == true + - cm_merged_update_local_user_2.proposed.0.reuse_limitation == 20 + - cm_merged_update_local_user_2.proposed.0.security_domains | length == 1 + - cm_merged_update_local_user_2.proposed.0.security_domains.0.name == "all" + - cm_merged_update_local_user_2.proposed.0.security_domains.0.roles | length == 1 + - cm_merged_update_local_user_2.proposed.0.security_domains.0.roles.0 == "fabric_admin" + - cm_merged_update_local_user_2.proposed.0.time_interval_limitation == 10 + - nm_merged_update_local_user_2 is changed + - nm_merged_update_local_user_2.after | length == 3 + - nm_merged_update_local_user_2.after.0.email == "secondansibleuser@example.com" + - nm_merged_update_local_user_2.after.0.first_name == "Second Ansible first name" + - nm_merged_update_local_user_2.after.0.last_name == "Second Ansible last name" + - nm_merged_update_local_user_2.after.0.login_id == "ansible_local_user_2" + - nm_merged_update_local_user_2.after.0.remote_id_claim == "ansible_remote_user_2" + - nm_merged_update_local_user_2.after.0.remote_user_authorization == true + - nm_merged_update_local_user_2.after.0.reuse_limitation == 20 + - nm_merged_update_local_user_2.after.0.security_domains | length == 1 + - nm_merged_update_local_user_2.after.0.security_domains.0.name == "all" + - nm_merged_update_local_user_2.after.0.security_domains.0.roles | length == 1 + - nm_merged_update_local_user_2.after.0.security_domains.0.roles.0 == "fabric_admin" + - nm_merged_update_local_user_2.after.0.time_interval_limitation == 10 + - nm_merged_update_local_user_2.after.1.email == "updatedansibleuser@example.com" + - nm_merged_update_local_user_2.after.1.first_name == "Updated Ansible first name" + - nm_merged_update_local_user_2.after.1.last_name == "Updated Ansible last name" + - nm_merged_update_local_user_2.after.1.login_id == "ansible_local_user" + - nm_merged_update_local_user_2.after.1.remote_user_authorization == false + - nm_merged_update_local_user_2.after.1.reuse_limitation == 25 + - nm_merged_update_local_user_2.after.1.security_domains | length == 1 + - nm_merged_update_local_user_2.after.1.security_domains.0.name == "all" + - nm_merged_update_local_user_2.after.1.security_domains.0.roles | length == 1 + - nm_merged_update_local_user_2.after.1.security_domains.0.roles.0 == "super_admin" + - nm_merged_update_local_user_2.after.1.time_interval_limitation == 15 + - nm_merged_update_local_user_2.after.2.login_id == "admin" + - nm_merged_update_local_user_2.after.2.first_name == "admin" + - nm_merged_update_local_user_2.after.2.remote_user_authorization == false + - nm_merged_update_local_user_2.after.2.reuse_limitation == 0 + - nm_merged_update_local_user_2.after.2.security_domains | length == 1 + - nm_merged_update_local_user_2.after.2.security_domains.0.name == "all" + - nm_merged_update_local_user_2.after.2.security_domains.0.roles | length == 1 + - nm_merged_update_local_user_2.after.2.security_domains.0.roles.0 == "super_admin" + - nm_merged_update_local_user_2.after.2.time_interval_limitation == 0 + - nm_merged_update_local_user_2.before | length == 3 + - nm_merged_update_local_user_2.before.2.first_name == "admin" + - nm_merged_update_local_user_2.before.2.remote_user_authorization == false + - nm_merged_update_local_user_2.before.2.reuse_limitation == 0 + - nm_merged_update_local_user_2.before.2.security_domains | length == 1 + - nm_merged_update_local_user_2.before.2.security_domains.0.name == "all" + - nm_merged_update_local_user_2.before.2.security_domains.0.roles | length == 1 + - nm_merged_update_local_user_2.before.2.security_domains.0.roles.0 == "super_admin" + - nm_merged_update_local_user_2.before.2.time_interval_limitation == 0 + - nm_merged_update_local_user_2.before.1.email == "ansibleuser@example.com" + - nm_merged_update_local_user_2.before.1.first_name == "Ansible first name" + - nm_merged_update_local_user_2.before.1.last_name == "Ansible last name" + - nm_merged_update_local_user_2.before.1.login_id == "ansible_local_user" + - nm_merged_update_local_user_2.before.1.remote_id_claim == "ansible_remote_user" + - nm_merged_update_local_user_2.before.1.remote_user_authorization == true + - nm_merged_update_local_user_2.before.1.reuse_limitation == 20 + - nm_merged_update_local_user_2.before.1.security_domains | length == 1 + - nm_merged_update_local_user_2.before.1.security_domains.0.name == "all" + - nm_merged_update_local_user_2.before.1.security_domains.0.roles | length == 2 + - nm_merged_update_local_user_2.before.1.security_domains.0.roles.0 == "observer" + - nm_merged_update_local_user_2.before.1.security_domains.0.roles.0 == "support-engineer" + - nm_merged_update_local_user_2.before.1.time_interval_limitation == 10 + - nm_merged_update_local_user_2.before.0.login_id == "ansible_local_user_2" + - nm_merged_update_local_user_2.before.0.security_domains | length == 1 + - nm_merged_update_local_user_2.before.0.security_domains.0.name == "all" + - nm_merged_update_local_user_2.proposed.0.email == "secondansibleuser@example.com" + - nm_merged_update_local_user_2.proposed.0.first_name == "Second Ansible first name" + - nm_merged_update_local_user_2.proposed.0.last_name == "Second Ansible last name" + - nm_merged_update_local_user_2.proposed.0.login_id == "ansible_local_user_2" + - nm_merged_update_local_user_2.proposed.0.remote_id_claim == "ansible_remote_user_2" + - nm_merged_update_local_user_2.proposed.0.remote_user_authorization == true + - nm_merged_update_local_user_2.proposed.0.reuse_limitation == 20 + - nm_merged_update_local_user_2.proposed.0.security_domains | length == 1 + - nm_merged_update_local_user_2.proposed.0.security_domains.0.name == "all" + - nm_merged_update_local_user_2.proposed.0.security_domains.0.roles | length == 1 + - nm_merged_update_local_user_2.proposed.0.security_domains.0.roles.0 == "fabric_admin" + - nm_merged_update_local_user_2.proposed.0.time_interval_limitation == 10 + - nm_merged_update_local_user_2_again is not changed + - nm_merged_update_local_user_2_again.after == nm_merged_update_local_user_2.after + - nm_merged_update_local_user_2_again.proposed == nm_merged_update_local_user_2.proposed + +- name: Ensure local users do not exist for next tests cisco.nd.nd_local_user: + <<: *clean_all_local_users + +# --- REPLACED STATE TESTS --- + +# REPLACED STATE TESTS: CREATE +- name: Create local users with full and minimum configuration (replaced state - check mode) + cisco.nd.nd_local_user: &create_local_user_replaced_state <<: *nd_info config: - - email: overrideansibleuser@example.com + - email: ansibleuser@example.com login_id: ansible_local_user - first_name: Overridden Ansible first name - last_name: Overridden Ansible last name - user_password: overideansibleLocalUserPassword1% - reuse_limitation: 15 - time_interval_limitation: 5 + first_name: Ansible first name + last_name: Ansible last name + user_password: ansibleLocalUserPassword1%Test + reuse_limitation: 20 + time_interval_limitation: 10 security_domains: - name: all roles: - observer + - support_engineer remote_id_claim: ansible_remote_user remote_user_authorization: true - - login_id: admin - first_name: admin - remote_user_authorization: false - reuse_limitation: 0 - time_interval_limitation: 0 + - login_id: ansible_local_user_2 + user_password: ansibleLocalUser2Password1%Test security_domains: - name: all - roles: - - super_admin - - login_id: ansible_local_user_3 - user_password: ansibleLocalUser3Password1%Test + state: replaced + check_mode: true + register: cm_replaced_create_local_users + +- name: Create local users with full and minimum configuration (replaced state - normal mode) + cisco.nd.nd_local_user: + <<: *create_local_user_replaced_state + register: nm_replaced_create_local_users + +- name: Asserts for local users replaced state creation tasks + ansible.builtin.assert: + that: + - cm_replaced_create_local_users is changed + - cm_replaced_create_local_users.after | length == 3 + - cm_replaced_create_local_users.after.0.login_id == "admin" + - cm_replaced_create_local_users.after.0.first_name == "admin" + - cm_replaced_create_local_users.after.0.remote_user_authorization == false + - cm_replaced_create_local_users.after.0.reuse_limitation == 0 + - cm_replaced_create_local_users.after.0.security_domains | length == 1 + - cm_replaced_create_local_users.after.0.security_domains.0.name == "all" + - cm_replaced_create_local_users.after.0.security_domains.0.roles | length == 1 + - cm_replaced_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - cm_replaced_create_local_users.after.0.time_interval_limitation == 0 + - cm_replaced_create_local_users.after.1.email == "ansibleuser@example.com" + - cm_replaced_create_local_users.after.1.first_name == "Ansible first name" + - cm_replaced_create_local_users.after.1.last_name == "Ansible last name" + - cm_replaced_create_local_users.after.1.login_id == "ansible_local_user" + - cm_replaced_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - cm_replaced_create_local_users.after.1.remote_user_authorization == true + - cm_replaced_create_local_users.after.1.reuse_limitation == 20 + - cm_replaced_create_local_users.after.1.security_domains | length == 1 + - cm_replaced_create_local_users.after.1.security_domains.0.name == "all" + - cm_replaced_create_local_users.after.1.security_domains.0.roles | length == 2 + - cm_replaced_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - cm_replaced_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - cm_replaced_create_local_users.after.1.time_interval_limitation == 10 + - cm_replaced_create_local_users.after.2.login_id == "ansible_local_user_2" + - cm_replaced_create_local_users.after.2.security_domains | length == 1 + - cm_replaced_create_local_users.after.2.security_domains.0.name == "all" + - cm_replaced_create_local_users.before | length == 1 + - cm_replaced_create_local_users.before.0.login_id == "admin" + - cm_replaced_create_local_users.before.0.first_name == "admin" + - cm_replaced_create_local_users.before.0.remote_user_authorization == false + - cm_replaced_create_local_users.before.0.reuse_limitation == 0 + - cm_replaced_create_local_users.before.0.security_domains | length == 1 + - cm_replaced_create_local_users.before.0.security_domains.0.name == "all" + - cm_replaced_create_local_users.before.0.security_domains.0.roles | length == 1 + - cm_replaced_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - cm_replaced_create_local_users.before.0.time_interval_limitation == 0 + - cm_replaced_create_local_users.proposed.0.email == "ansibleuser@example.com" + - cm_replaced_create_local_users.proposed.0.first_name == "Ansible first name" + - cm_replaced_create_local_users.proposed.0.last_name == "Ansible last name" + - cm_replaced_create_local_users.proposed.0.login_id == "ansible_local_user" + - cm_replaced_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - cm_replaced_create_local_users.proposed.0.remote_user_authorization == true + - cm_replaced_create_local_users.proposed.0.reuse_limitation == 20 + - cm_replaced_create_local_users.proposed.0.security_domains | length == 1 + - cm_replaced_create_local_users.proposed.0.security_domains.0.name == "all" + - cm_replaced_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - cm_replaced_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - cm_replaced_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - cm_replaced_create_local_users.proposed.0.time_interval_limitation == 10 + - cm_replaced_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - cm_replaced_create_local_users.proposed.1.security_domains | length == 1 + - cm_replaced_create_local_users.proposed.1.security_domains.0.name == "all" + - nm_replaced_create_local_users is changed + - nm_replaced_create_local_users.after.0.first_name == "admin" + - nm_replaced_create_local_users.after.0.remote_user_authorization == false + - nm_replaced_create_local_users.after.0.reuse_limitation == 0 + - nm_replaced_create_local_users.after.0.security_domains | length == 1 + - nm_replaced_create_local_users.after.0.security_domains.0.name == "all" + - nm_replaced_create_local_users.after.0.security_domains.0.roles | length == 1 + - nm_replaced_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - nm_replaced_create_local_users.after.0.time_interval_limitation == 0 + - nm_replaced_create_local_users.after.1.email == "ansibleuser@example.com" + - nm_replaced_create_local_users.after.1.first_name == "Ansible first name" + - nm_replaced_create_local_users.after.1.last_name == "Ansible last name" + - nm_replaced_create_local_users.after.1.login_id == "ansible_local_user" + - nm_replaced_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - nm_replaced_create_local_users.after.1.remote_user_authorization == true + - nm_replaced_create_local_users.after.1.reuse_limitation == 20 + - nm_replaced_create_local_users.after.1.security_domains | length == 1 + - nm_replaced_create_local_users.after.1.security_domains.0.name == "all" + - nm_replaced_create_local_users.after.1.security_domains.0.roles | length == 2 + - nm_replaced_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - nm_replaced_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - nm_replaced_create_local_users.after.1.time_interval_limitation == 10 + - nm_replaced_create_local_users.after.2.login_id == "ansible_local_user_2" + - nm_replaced_create_local_users.after.2.security_domains | length == 1 + - nm_replaced_create_local_users.after.2.security_domains.0.name == "all" + - nm_replaced_create_local_users.before | length == 1 + - nm_replaced_create_local_users.before.0.login_id == "admin" + - nm_replaced_create_local_users.before.0.first_name == "admin" + - nm_replaced_create_local_users.before.0.remote_user_authorization == false + - nm_replaced_create_local_users.before.0.reuse_limitation == 0 + - nm_replaced_create_local_users.before.0.security_domains | length == 1 + - nm_replaced_create_local_users.before.0.security_domains.0.name == "all" + - nm_replaced_create_local_users.before.0.security_domains.0.roles | length == 1 + - nm_replaced_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - nm_replaced_create_local_users.before.0.time_interval_limitation == 0 + - nm_replaced_create_local_users.proposed.0.email == "ansibleuser@example.com" + - nm_replaced_create_local_users.proposed.0.first_name == "Ansible first name" + - nm_replaced_create_local_users.proposed.0.last_name == "Ansible last name" + - nm_replaced_create_local_users.proposed.0.login_id == "ansible_local_user" + - nm_replaced_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - nm_replaced_create_local_users.proposed.0.remote_user_authorization == true + - nm_replaced_create_local_users.proposed.0.reuse_limitation == 20 + - nm_replaced_create_local_users.proposed.0.security_domains | length == 1 + - nm_replaced_create_local_users.proposed.0.security_domains.0.name == "all" + - nm_replaced_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - nm_replaced_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - nm_replaced_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - nm_replaced_create_local_users.proposed.0.time_interval_limitation == 10 + - nm_replaced_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - nm_replaced_create_local_users.proposed.1.security_domains | length == 1 + - nm_replaced_create_local_users.proposed.1.security_domains.0.name == "all" + +# REPLACED STATE TESTS: UPDATE +- name: Replace all ansible_local_user's attributes (replaced state - check mode) + cisco.nd.nd_local_user: &update_first_local_user_replaced_state + <<: *nd_info + config: + - email: updatedansibleuser@example.com + login_id: ansible_local_user + first_name: Updated Ansible first name + last_name: Updated Ansible last name + user_password: updatedAnsibleLocalUserPassword1% + reuse_limitation: 25 + time_interval_limitation: 15 security_domains: - name: all - state: overridden - register: nm_override_local_users + roles: super_admin + remote_id_claim: "" + remote_user_authorization: false + state: replaced + check_mode: true + register: cm_replace_local_user -- name: Asserts for local users update tasks +- name: Replace all ansible_local_user's attributes (replaced - normal mode) + cisco.nd.nd_local_user: + <<: *update_first_local_user_replaced_state + register: nm_replace_local_user + +- name: Asserts for local users replaced state update tasks ansible.builtin.assert: that: - cm_replace_local_user is changed @@ -301,7 +570,6 @@ - cm_replace_local_user.before.0.login_id == "ansible_local_user_2" - cm_replace_local_user.before.0.security_domains | length == 1 - cm_replace_local_user.before.0.security_domains.0.name == "all" - - cm_replace_local_user.diff == [] - cm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com" - cm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" - cm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" @@ -365,7 +633,6 @@ - nm_replace_local_user.before.0.login_id == "ansible_local_user_2" - nm_replace_local_user.before.0.security_domains | length == 1 - nm_replace_local_user.before.0.security_domains.0.name == "all" - - nm_replace_local_user.diff == [] - nm_replace_local_user.proposed.0.email == "updatedansibleuser@example.com" - nm_replace_local_user.proposed.0.first_name == "Updated Ansible first name" - nm_replace_local_user.proposed.0.last_name == "Updated Ansible last name" @@ -378,164 +645,368 @@ - nm_replace_local_user.proposed.0.security_domains.0.roles | length == 1 - nm_replace_local_user.proposed.0.security_domains.0.roles.0 == "super_admin" - nm_replace_local_user.proposed.0.time_interval_limitation == 15 - - nm_merge_local_user_2 is changed - - nm_merge_local_user_2.after | length == 3 - - nm_merge_local_user_2.after.0.email == "secondansibleuser@example.com" - - nm_merge_local_user_2.after.0.first_name == "Second Ansible first name" - - nm_merge_local_user_2.after.0.last_name == "Second Ansible last name" - - nm_merge_local_user_2.after.0.login_id == "ansible_local_user_2" - - nm_merge_local_user_2.after.0.remote_id_claim == "ansible_remote_user_2" - - nm_merge_local_user_2.after.0.remote_user_authorization == true - - nm_merge_local_user_2.after.0.reuse_limitation == 20 - - nm_merge_local_user_2.after.0.security_domains | length == 1 - - nm_merge_local_user_2.after.0.security_domains.0.name == "all" - - nm_merge_local_user_2.after.0.security_domains.0.roles | length == 1 - - nm_merge_local_user_2.after.0.security_domains.0.roles.0 == "fabric_admin" - - nm_merge_local_user_2.after.0.time_interval_limitation == 10 - - nm_merge_local_user_2.after.1.email == "updatedansibleuser@example.com" - - nm_merge_local_user_2.after.1.first_name == "Updated Ansible first name" - - nm_merge_local_user_2.after.1.last_name == "Updated Ansible last name" - - nm_merge_local_user_2.after.1.login_id == "ansible_local_user" - - nm_merge_local_user_2.after.1.remote_user_authorization == false - - nm_merge_local_user_2.after.1.reuse_limitation == 25 - - nm_merge_local_user_2.after.1.security_domains | length == 1 - - nm_merge_local_user_2.after.1.security_domains.0.name == "all" - - nm_merge_local_user_2.after.1.security_domains.0.roles | length == 1 - - nm_merge_local_user_2.after.1.security_domains.0.roles.0 == "super_admin" - - nm_merge_local_user_2.after.1.time_interval_limitation == 15 - - nm_merge_local_user_2.after.2.login_id == "admin" - - nm_merge_local_user_2.after.2.first_name == "admin" - - nm_merge_local_user_2.after.2.remote_user_authorization == false - - nm_merge_local_user_2.after.2.reuse_limitation == 0 - - nm_merge_local_user_2.after.2.security_domains | length == 1 - - nm_merge_local_user_2.after.2.security_domains.0.name == "all" - - nm_merge_local_user_2.after.2.security_domains.0.roles | length == 1 - - nm_merge_local_user_2.after.2.security_domains.0.roles.0 == "super_admin" - - nm_merge_local_user_2.after.2.time_interval_limitation == 0 - - nm_merge_local_user_2.before | length == 3 - - nm_merge_local_user_2.before.2.first_name == "admin" - - nm_merge_local_user_2.before.2.remote_user_authorization == false - - nm_merge_local_user_2.before.2.reuse_limitation == 0 - - nm_merge_local_user_2.before.2.security_domains | length == 1 - - nm_merge_local_user_2.before.2.security_domains.0.name == "all" - - nm_merge_local_user_2.before.2.security_domains.0.roles | length == 1 - - nm_merge_local_user_2.before.2.security_domains.0.roles.0 == "super_admin" - - nm_merge_local_user_2.before.2.time_interval_limitation == 0 - - nm_merge_local_user_2.before.1.email == "updatedansibleuser@example.com" - - nm_merge_local_user_2.before.1.first_name == "Updated Ansible first name" - - nm_merge_local_user_2.before.1.last_name == "Updated Ansible last name" - - nm_merge_local_user_2.before.1.login_id == "ansible_local_user" - - nm_merge_local_user_2.before.1.remote_user_authorization == false - - nm_merge_local_user_2.before.1.reuse_limitation == 25 - - nm_merge_local_user_2.before.1.security_domains | length == 1 - - nm_merge_local_user_2.before.1.security_domains.0.name == "all" - - nm_merge_local_user_2.before.1.security_domains.0.roles | length == 1 - - nm_merge_local_user_2.before.1.security_domains.0.roles.0 == "super_admin" - - nm_merge_local_user_2.before.1.time_interval_limitation == 15 - - nm_merge_local_user_2.before.0.login_id == "ansible_local_user_2" - - nm_merge_local_user_2.before.0.security_domains | length == 1 - - nm_merge_local_user_2.before.0.security_domains.0.name == "all" - - nm_merge_local_user_2.diff == [] - - nm_merge_local_user_2.proposed.0.email == "secondansibleuser@example.com" - - nm_merge_local_user_2.proposed.0.first_name == "Second Ansible first name" - - nm_merge_local_user_2.proposed.0.last_name == "Second Ansible last name" - - nm_merge_local_user_2.proposed.0.login_id == "ansible_local_user_2" - - nm_merge_local_user_2.proposed.0.remote_id_claim == "ansible_remote_user_2" - - nm_merge_local_user_2.proposed.0.remote_user_authorization == true - - nm_merge_local_user_2.proposed.0.reuse_limitation == 20 - - nm_merge_local_user_2.proposed.0.security_domains | length == 1 - - nm_merge_local_user_2.proposed.0.security_domains.0.name == "all" - - nm_merge_local_user_2.proposed.0.security_domains.0.roles | length == 1 - - nm_merge_local_user_2.proposed.0.security_domains.0.roles.0 == "fabric_admin" - - nm_merge_local_user_2.proposed.0.time_interval_limitation == 10 - - nm_merge_local_user_2_again is not changed - - nm_merge_local_user_2_again.after == nm_merge_local_user_2.after - - nm_merge_local_user_2_again.diff == [] - - nm_merge_local_user_2_again.proposed == nm_merge_local_user_2.proposed - - nm_override_local_users is changed - - nm_override_local_users.after | length == 3 - - nm_override_local_users.after.0.email == "overrideansibleuser@example.com" - - nm_override_local_users.after.0.first_name == "Overridden Ansible first name" - - nm_override_local_users.after.0.last_name == "Overridden Ansible last name" - - nm_override_local_users.after.0.login_id == "ansible_local_user" - - nm_override_local_users.after.0.remote_id_claim == "ansible_remote_user" - - nm_override_local_users.after.0.remote_user_authorization == true - - nm_override_local_users.after.0.reuse_limitation == 15 - - nm_override_local_users.after.0.security_domains | length == 1 - - nm_override_local_users.after.0.security_domains.0.name == "all" - - nm_override_local_users.after.0.security_domains.0.roles | length == 1 - - nm_override_local_users.after.0.security_domains.0.roles.0 == "observer" - - nm_override_local_users.after.0.time_interval_limitation == 5 - - nm_override_local_users.after.1.login_id == "admin" - - nm_override_local_users.after.1.first_name == "admin" - - nm_override_local_users.after.1.remote_user_authorization == false - - nm_override_local_users.after.1.reuse_limitation == 0 - - nm_override_local_users.after.1.security_domains | length == 1 - - nm_override_local_users.after.1.security_domains.0.name == "all" - - nm_override_local_users.after.1.security_domains.0.roles | length == 1 - - nm_override_local_users.after.1.security_domains.0.roles.0 == "super_admin" - - nm_override_local_users.after.1.time_interval_limitation == 0 - - nm_override_local_users.after.2.login_id == "ansible_local_user_3" - - nm_override_local_users.after.2.security_domains.0.name == "all" - - nm_override_local_users.before | length == 3 - - nm_override_local_users.before.2.first_name == "admin" - - nm_override_local_users.before.2.remote_user_authorization == false - - nm_override_local_users.before.2.reuse_limitation == 0 - - nm_override_local_users.before.2.security_domains | length == 1 - - nm_override_local_users.before.2.security_domains.0.name == "all" - - nm_override_local_users.before.2.security_domains.0.roles | length == 1 - - nm_override_local_users.before.2.security_domains.0.roles.0 == "super_admin" - - nm_override_local_users.before.2.time_interval_limitation == 0 - - nm_override_local_users.before.1.email == "updatedansibleuser@example.com" - - nm_override_local_users.before.1.first_name == "Updated Ansible first name" - - nm_override_local_users.before.1.last_name == "Updated Ansible last name" - - nm_override_local_users.before.1.login_id == "ansible_local_user" - - nm_override_local_users.before.1.remote_user_authorization == false - - nm_override_local_users.before.1.reuse_limitation == 25 - - nm_override_local_users.before.1.security_domains | length == 1 - - nm_override_local_users.before.1.security_domains.0.name == "all" - - nm_override_local_users.before.1.security_domains.0.roles | length == 1 - - nm_override_local_users.before.1.security_domains.0.roles.0 == "super_admin" - - nm_override_local_users.before.1.time_interval_limitation == 15 - - nm_override_local_users.before.0.email == "secondansibleuser@example.com" - - nm_override_local_users.before.0.first_name == "Second Ansible first name" - - nm_override_local_users.before.0.last_name == "Second Ansible last name" - - nm_override_local_users.before.0.login_id == "ansible_local_user_2" - - nm_override_local_users.before.0.remote_id_claim == "ansible_remote_user_2" - - nm_override_local_users.before.0.remote_user_authorization == true - - nm_override_local_users.before.0.reuse_limitation == 20 - - nm_override_local_users.before.0.security_domains | length == 1 - - nm_override_local_users.before.0.security_domains.0.name == "all" - - nm_override_local_users.before.0.security_domains.0.roles | length == 1 - - nm_override_local_users.before.0.security_domains.0.roles.0 == "fabric_admin" - - nm_override_local_users.before.0.time_interval_limitation == 10 - - nm_override_local_users.diff == [] - - nm_override_local_users.proposed.0.email == "overrideansibleuser@example.com" - - nm_override_local_users.proposed.0.first_name == "Overridden Ansible first name" - - nm_override_local_users.proposed.0.last_name == "Overridden Ansible last name" - - nm_override_local_users.proposed.0.login_id == "ansible_local_user" - - nm_override_local_users.proposed.0.remote_id_claim == "ansible_remote_user" - - nm_override_local_users.proposed.0.remote_user_authorization == true - - nm_override_local_users.proposed.0.reuse_limitation == 15 - - nm_override_local_users.proposed.0.security_domains | length == 1 - - nm_override_local_users.proposed.0.security_domains.0.name == "all" - - nm_override_local_users.proposed.0.security_domains.0.roles | length == 1 - - nm_override_local_users.proposed.0.security_domains.0.roles.0 == "observer" - - nm_override_local_users.proposed.0.time_interval_limitation == 5 - - nm_override_local_users.proposed.1.login_id == "admin" - - nm_override_local_users.proposed.1.first_name == "admin" - - nm_override_local_users.proposed.1.remote_user_authorization == false - - nm_override_local_users.proposed.1.reuse_limitation == 0 - - nm_override_local_users.proposed.1.security_domains | length == 1 - - nm_override_local_users.proposed.1.security_domains.0.name == "all" - - nm_override_local_users.proposed.1.security_domains.0.roles | length == 1 - - nm_override_local_users.proposed.1.security_domains.0.roles.0 == "super_admin" - - nm_override_local_users.proposed.1.time_interval_limitation == 0 - - nm_override_local_users.proposed.2.login_id == "ansible_local_user_3" - - nm_override_local_users.proposed.2.security_domains.0.name == "all" - - -# DELETE + +- name: Ensure local users do not exist for next tests + cisco.nd.nd_local_user: + <<: *clean_all_local_users + +# --- OVERRIDDEN STATE TESTS --- + +# OVERRIDDEN STATE TESTS: CREATE +- name: Create local users with full and minimum configuration (overridden state - check mode) + cisco.nd.nd_local_user: &create_local_user_overridden_state + <<: *nd_info + config: + - login_id: admin + first_name: admin + remote_user_authorization: false + reuse_limitation: 0 + time_interval_limitation: 0 + security_domains: + - name: all + roles: + - super_admin + - email: ansibleuser@example.com + login_id: ansible_local_user + first_name: Ansible first name + last_name: Ansible last name + user_password: ansibleLocalUserPassword1%Test + reuse_limitation: 20 + time_interval_limitation: 10 + security_domains: + - name: all + roles: + - observer + - support_engineer + remote_id_claim: ansible_remote_user + remote_user_authorization: true + - login_id: ansible_local_user_2 + user_password: ansibleLocalUser2Password1%Test + security_domains: + - name: all + state: merged + check_mode: true + register: cm_overridden_create_local_users + +- name: Create local users with full and minimum configuration (overridden state - normal mode) + cisco.nd.nd_local_user: + <<: *create_local_user_overridden_state + register: nm_overridden_create_local_users + +- name: Asserts for local users overridden state creation tasks + ansible.builtin.assert: + that: + - cm_overridden_create_local_users is changed + - cm_overridden_create_local_users.after | length == 3 + - cm_overridden_create_local_users.after.0.login_id == "admin" + - cm_overridden_create_local_users.after.0.first_name == "admin" + - cm_overridden_create_local_users.after.0.remote_user_authorization == false + - cm_overridden_create_local_users.after.0.reuse_limitation == 0 + - cm_overridden_create_local_users.after.0.security_domains | length == 1 + - cm_overridden_create_local_users.after.0.security_domains.0.name == "all" + - cm_overridden_create_local_users.after.0.security_domains.0.roles | length == 1 + - cm_overridden_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - cm_overridden_create_local_users.after.0.time_interval_limitation == 0 + - cm_overridden_create_local_users.after.1.email == "ansibleuser@example.com" + - cm_overridden_create_local_users.after.1.first_name == "Ansible first name" + - cm_overridden_create_local_users.after.1.last_name == "Ansible last name" + - cm_overridden_create_local_users.after.1.login_id == "ansible_local_user" + - cm_overridden_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - cm_overridden_create_local_users.after.1.remote_user_authorization == true + - cm_overridden_create_local_users.after.1.reuse_limitation == 20 + - cm_overridden_create_local_users.after.1.security_domains | length == 1 + - cm_overridden_create_local_users.after.1.security_domains.0.name == "all" + - cm_overridden_create_local_users.after.1.security_domains.0.roles | length == 2 + - cm_overridden_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - cm_overridden_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - cm_overridden_create_local_users.after.1.time_interval_limitation == 10 + - cm_overridden_create_local_users.after.2.login_id == "ansible_local_user_2" + - cm_overridden_create_local_users.after.2.security_domains | length == 1 + - cm_overridden_create_local_users.after.2.security_domains.0.name == "all" + - cm_overridden_create_local_users.before | length == 1 + - cm_overridden_create_local_users.before.0.login_id == "admin" + - cm_overridden_create_local_users.before.0.first_name == "admin" + - cm_overridden_create_local_users.before.0.remote_user_authorization == false + - cm_overridden_create_local_users.before.0.reuse_limitation == 0 + - cm_overridden_create_local_users.before.0.security_domains | length == 1 + - cm_overridden_create_local_users.before.0.security_domains.0.name == "all" + - cm_overridden_create_local_users.before.0.security_domains.0.roles | length == 1 + - cm_overridden_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - cm_overridden_create_local_users.before.0.time_interval_limitation == 0 + - cm_overridden_create_local_users.proposed.0.email == "ansibleuser@example.com" + - cm_overridden_create_local_users.proposed.0.first_name == "Ansible first name" + - cm_overridden_create_local_users.proposed.0.last_name == "Ansible last name" + - cm_overridden_create_local_users.proposed.0.login_id == "ansible_local_user" + - cm_overridden_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - cm_overridden_create_local_users.proposed.0.remote_user_authorization == true + - cm_overridden_create_local_users.proposed.0.reuse_limitation == 20 + - cm_overridden_create_local_users.proposed.0.security_domains | length == 1 + - cm_overridden_create_local_users.proposed.0.security_domains.0.name == "all" + - cm_overridden_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - cm_overridden_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - cm_overridden_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - cm_overridden_create_local_users.proposed.0.time_interval_limitation == 10 + - cm_overridden_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - cm_overridden_create_local_users.proposed.1.security_domains | length == 1 + - cm_overridden_create_local_users.proposed.1.security_domains.0.name == "all" + - nm_overridden_create_local_users is changed + - nm_overridden_create_local_users.after.0.first_name == "admin" + - nm_overridden_create_local_users.after.0.remote_user_authorization == false + - nm_overridden_create_local_users.after.0.reuse_limitation == 0 + - nm_overridden_create_local_users.after.0.security_domains | length == 1 + - nm_overridden_create_local_users.after.0.security_domains.0.name == "all" + - nm_overridden_create_local_users.after.0.security_domains.0.roles | length == 1 + - nm_overridden_create_local_users.after.0.security_domains.0.roles.0 == "super_admin" + - nm_overridden_create_local_users.after.0.time_interval_limitation == 0 + - nm_overridden_create_local_users.after.1.email == "ansibleuser@example.com" + - nm_overridden_create_local_users.after.1.first_name == "Ansible first name" + - nm_overridden_create_local_users.after.1.last_name == "Ansible last name" + - nm_overridden_create_local_users.after.1.login_id == "ansible_local_user" + - nm_overridden_create_local_users.after.1.remote_id_claim == "ansible_remote_user" + - nm_overridden_create_local_users.after.1.remote_user_authorization == true + - nm_overridden_create_local_users.after.1.reuse_limitation == 20 + - nm_overridden_create_local_users.after.1.security_domains | length == 1 + - nm_overridden_create_local_users.after.1.security_domains.0.name == "all" + - nm_overridden_create_local_users.after.1.security_domains.0.roles | length == 2 + - nm_overridden_create_local_users.after.1.security_domains.0.roles.0 == "observer" + - nm_overridden_create_local_users.after.1.security_domains.0.roles.1 == "support_engineer" + - nm_overridden_create_local_users.after.1.time_interval_limitation == 10 + - nm_overridden_create_local_users.after.2.login_id == "ansible_local_user_2" + - nm_overridden_create_local_users.after.2.security_domains | length == 1 + - nm_overridden_create_local_users.after.2.security_domains.0.name == "all" + - nm_overridden_create_local_users.before | length == 1 + - nm_overridden_create_local_users.before.0.login_id == "admin" + - nm_overridden_create_local_users.before.0.first_name == "admin" + - nm_overridden_create_local_users.before.0.remote_user_authorization == false + - nm_overridden_create_local_users.before.0.reuse_limitation == 0 + - nm_overridden_create_local_users.before.0.security_domains | length == 1 + - nm_overridden_create_local_users.before.0.security_domains.0.name == "all" + - nm_overridden_create_local_users.before.0.security_domains.0.roles | length == 1 + - nm_overridden_create_local_users.before.0.security_domains.0.roles.0 == "super_admin" + - nm_overridden_create_local_users.before.0.time_interval_limitation == 0 + - nm_overridden_create_local_users.proposed.0.email == "ansibleuser@example.com" + - nm_overridden_create_local_users.proposed.0.first_name == "Ansible first name" + - nm_overridden_create_local_users.proposed.0.last_name == "Ansible last name" + - nm_overridden_create_local_users.proposed.0.login_id == "ansible_local_user" + - nm_overridden_create_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - nm_overridden_create_local_users.proposed.0.remote_user_authorization == true + - nm_overridden_create_local_users.proposed.0.reuse_limitation == 20 + - nm_overridden_create_local_users.proposed.0.security_domains | length == 1 + - nm_overridden_create_local_users.proposed.0.security_domains.0.name == "all" + - nm_overridden_create_local_users.proposed.0.security_domains.0.roles | length == 2 + - nm_overridden_create_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - nm_overridden_create_local_users.proposed.0.security_domains.0.roles.1 == "support_engineer" + - nm_overridden_create_local_users.proposed.0.time_interval_limitation == 10 + - nm_overridden_create_local_users.proposed.1.login_id == "ansible_local_user_2" + - nm_overridden_create_local_users.proposed.1.security_domains | length == 1 + - nm_overridden_create_local_users.proposed.1.security_domains.0.name == "all" + +# OVERRIDDEN STATE TESTS: UPDATE +- name: Override local users with minimum configuration (overridden state - check mode) + cisco.nd.nd_local_user: &update_all_local_users_overridden_state + <<: *nd_info + config: + - email: overrideansibleuser@example.com + login_id: ansible_local_user + first_name: Overridden Ansible first name + last_name: Overridden Ansible last name + user_password: overideansibleLocalUserPassword1% + reuse_limitation: 15 + time_interval_limitation: 5 + security_domains: + - name: all + roles: + - observer + remote_id_claim: ansible_remote_user + remote_user_authorization: true + - login_id: admin + first_name: admin + remote_user_authorization: false + reuse_limitation: 0 + time_interval_limitation: 0 + security_domains: + - name: all + roles: + - super_admin + - login_id: ansible_local_user_3 + user_password: ansibleLocalUser3Password1%Test + security_domains: + - name: all + state: overridden + check_mode: true + register: cm_override_local_users + +- name: Override local users with minimum configuration (overridden state - normal mode) + cisco.nd.nd_local_user: + <<: *update_all_local_users_overridden_state + register: nm_override_local_users + +- name: Asserts for local users overridden state update tasks + ansible.builtin.assert: + that: + - cm_override_local_users is changed + - cm_override_local_users.after | length == 3 + - cm_override_local_users.after.0.email == "overrideansibleuser@example.com" + - cm_override_local_users.after.0.first_name == "Overridden Ansible first name" + - cm_override_local_users.after.0.last_name == "Overridden Ansible last name" + - cm_override_local_users.after.0.login_id == "ansible_local_user" + - cm_override_local_users.after.0.remote_id_claim == "ansible_remote_user" + - cm_override_local_users.after.0.remote_user_authorization == true + - cm_override_local_users.after.0.reuse_limitation == 15 + - cm_override_local_users.after.0.security_domains | length == 1 + - cm_override_local_users.after.0.security_domains.0.name == "all" + - cm_override_local_users.after.0.security_domains.0.roles | length == 1 + - cm_override_local_users.after.0.security_domains.0.roles.0 == "observer" + - cm_override_local_users.after.0.time_interval_limitation == 5 + - cm_override_local_users.after.1.login_id == "admin" + - cm_override_local_users.after.1.first_name == "admin" + - cm_override_local_users.after.1.remote_user_authorization == false + - cm_override_local_users.after.1.reuse_limitation == 0 + - cm_override_local_users.after.1.security_domains | length == 1 + - cm_override_local_users.after.1.security_domains.0.name == "all" + - cm_override_local_users.after.1.security_domains.0.roles | length == 1 + - cm_override_local_users.after.1.security_domains.0.roles.0 == "super_admin" + - cm_override_local_users.after.1.time_interval_limitation == 0 + - cm_override_local_users.after.2.login_id == "ansible_local_user_3" + - cm_override_local_users.after.2.security_domains.0.name == "all" + - cm_override_local_users.before | length == 3 + - cm_override_local_users.before.2.first_name == "admin" + - cm_override_local_users.before.2.remote_user_authorization == false + - cm_override_local_users.before.2.reuse_limitation == 0 + - cm_override_local_users.before.2.security_domains | length == 1 + - cm_override_local_users.before.2.security_domains.0.name == "all" + - cm_override_local_users.before.2.security_domains.0.roles | length == 1 + - cm_override_local_users.before.2.security_domains.0.roles.0 == "super_admin" + - cm_override_local_users.before.2.time_interval_limitation == 0 + - cm_override_local_users.before.1.email == "updatedansibleuser@example.com" + - cm_override_local_users.before.1.first_name == "Updated Ansible first name" + - cm_override_local_users.before.1.last_name == "Updated Ansible last name" + - cm_override_local_users.before.1.login_id == "ansible_local_user" + - cm_override_local_users.before.1.remote_user_authorization == false + - cm_override_local_users.before.1.reuse_limitation == 25 + - cm_override_local_users.before.1.security_domains | length == 1 + - cm_override_local_users.before.1.security_domains.0.name == "all" + - cm_override_local_users.before.1.security_domains.0.roles | length == 1 + - cm_override_local_users.before.1.security_domains.0.roles.0 == "super_admin" + - cm_override_local_users.before.1.time_interval_limitation == 15 + - cm_override_local_users.before.0.email == "secondansibleuser@example.com" + - cm_override_local_users.before.0.first_name == "Second Ansible first name" + - cm_override_local_users.before.0.last_name == "Second Ansible last name" + - cm_override_local_users.before.0.login_id == "ansible_local_user_2" + - cm_override_local_users.before.0.remote_id_claim == "ansible_remote_user_2" + - cm_override_local_users.before.0.remote_user_authorization == true + - cm_override_local_users.before.0.reuse_limitation == 20 + - cm_override_local_users.before.0.security_domains | length == 1 + - cm_override_local_users.before.0.security_domains.0.name == "all" + - cm_override_local_users.before.0.security_domains.0.roles | length == 1 + - cm_override_local_users.before.0.security_domains.0.roles.0 == "fabric_admin" + - cm_override_local_users.before.0.time_interval_limitation == 10 + - cm_override_local_users.proposed.0.email == "overrideansibleuser@example.com" + - cm_override_local_users.proposed.0.first_name == "Overridden Ansible first name" + - cm_override_local_users.proposed.0.last_name == "Overridden Ansible last name" + - cm_override_local_users.proposed.0.login_id == "ansible_local_user" + - cm_override_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - cm_override_local_users.proposed.0.remote_user_authorization == true + - cm_override_local_users.proposed.0.reuse_limitation == 15 + - cm_override_local_users.proposed.0.security_domains | length == 1 + - cm_override_local_users.proposed.0.security_domains.0.name == "all" + - cm_override_local_users.proposed.0.security_domains.0.roles | length == 1 + - cm_override_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - cm_override_local_users.proposed.0.time_interval_limitation == 5 + - cm_override_local_users.proposed.1.login_id == "admin" + - cm_override_local_users.proposed.1.first_name == "admin" + - cm_override_local_users.proposed.1.remote_user_authorization == false + - cm_override_local_users.proposed.1.reuse_limitation == 0 + - cm_override_local_users.proposed.1.security_domains | length == 1 + - cm_override_local_users.proposed.1.security_domains.0.name == "all" + - cm_override_local_users.proposed.1.security_domains.0.roles | length == 1 + - cm_override_local_users.proposed.1.security_domains.0.roles.0 == "super_admin" + - cm_override_local_users.proposed.1.time_interval_limitation == 0 + - cm_override_local_users.proposed.2.login_id == "ansible_local_user_3" + - cm_override_local_users.proposed.2.security_domains.0.name == "all" + - cm_override_local_users is changed + - cm_override_local_users.after | length == 3 + - cm_override_local_users.after.0.email == "overrideansibleuser@example.com" + - cm_override_local_users.after.0.first_name == "Overridden Ansible first name" + - cm_override_local_users.after.0.last_name == "Overridden Ansible last name" + - cm_override_local_users.after.0.login_id == "ansible_local_user" + - cm_override_local_users.after.0.remote_id_claim == "ansible_remote_user" + - cm_override_local_users.after.0.remote_user_authorization == true + - cm_override_local_users.after.0.reuse_limitation == 15 + - cm_override_local_users.after.0.security_domains | length == 1 + - cm_override_local_users.after.0.security_domains.0.name == "all" + - cm_override_local_users.after.0.security_domains.0.roles | length == 1 + - cm_override_local_users.after.0.security_domains.0.roles.0 == "observer" + - cm_override_local_users.after.0.time_interval_limitation == 5 + - cm_override_local_users.after.1.login_id == "admin" + - cm_override_local_users.after.1.first_name == "admin" + - cm_override_local_users.after.1.remote_user_authorization == false + - cm_override_local_users.after.1.reuse_limitation == 0 + - cm_override_local_users.after.1.security_domains | length == 1 + - cm_override_local_users.after.1.security_domains.0.name == "all" + - cm_override_local_users.after.1.security_domains.0.roles | length == 1 + - cm_override_local_users.after.1.security_domains.0.roles.0 == "super_admin" + - cm_override_local_users.after.1.time_interval_limitation == 0 + - cm_override_local_users.after.2.login_id == "ansible_local_user_3" + - cm_override_local_users.after.2.security_domains.0.name == "all" + - cm_override_local_users.before | length == 3 + - cm_override_local_users.before.2.first_name == "admin" + - cm_override_local_users.before.2.remote_user_authorization == false + - cm_override_local_users.before.2.reuse_limitation == 0 + - cm_override_local_users.before.2.security_domains | length == 1 + - cm_override_local_users.before.2.security_domains.0.name == "all" + - cm_override_local_users.before.2.security_domains.0.roles | length == 1 + - cm_override_local_users.before.2.security_domains.0.roles.0 == "super_admin" + - cm_override_local_users.before.2.time_interval_limitation == 0 + - cm_override_local_users.before.1.email == "updatedansibleuser@example.com" + - cm_override_local_users.before.1.first_name == "Updated Ansible first name" + - cm_override_local_users.before.1.last_name == "Updated Ansible last name" + - cm_override_local_users.before.1.login_id == "ansible_local_user" + - cm_override_local_users.before.1.remote_user_authorization == false + - cm_override_local_users.before.1.reuse_limitation == 25 + - cm_override_local_users.before.1.security_domains | length == 1 + - cm_override_local_users.before.1.security_domains.0.name == "all" + - cm_override_local_users.before.1.security_domains.0.roles | length == 1 + - cm_override_local_users.before.1.security_domains.0.roles.0 == "super_admin" + - cm_override_local_users.before.1.time_interval_limitation == 15 + - cm_override_local_users.before.0.email == "secondansibleuser@example.com" + - cm_override_local_users.before.0.first_name == "Second Ansible first name" + - cm_override_local_users.before.0.last_name == "Second Ansible last name" + - cm_override_local_users.before.0.login_id == "ansible_local_user_2" + - cm_override_local_users.before.0.remote_id_claim == "ansible_remote_user_2" + - cm_override_local_users.before.0.remote_user_authorization == true + - cm_override_local_users.before.0.reuse_limitation == 20 + - cm_override_local_users.before.0.security_domains | length == 1 + - cm_override_local_users.before.0.security_domains.0.name == "all" + - cm_override_local_users.before.0.security_domains.0.roles | length == 1 + - cm_override_local_users.before.0.security_domains.0.roles.0 == "fabric_admin" + - cm_override_local_users.before.0.time_interval_limitation == 10 + - cm_override_local_users.proposed.0.email == "overrideansibleuser@example.com" + - cm_override_local_users.proposed.0.first_name == "Overridden Ansible first name" + - cm_override_local_users.proposed.0.last_name == "Overridden Ansible last name" + - cm_override_local_users.proposed.0.login_id == "ansible_local_user" + - cm_override_local_users.proposed.0.remote_id_claim == "ansible_remote_user" + - cm_override_local_users.proposed.0.remote_user_authorization == true + - cm_override_local_users.proposed.0.reuse_limitation == 15 + - cm_override_local_users.proposed.0.security_domains | length == 1 + - cm_override_local_users.proposed.0.security_domains.0.name == "all" + - cm_override_local_users.proposed.0.security_domains.0.roles | length == 1 + - cm_override_local_users.proposed.0.security_domains.0.roles.0 == "observer" + - cm_override_local_users.proposed.0.time_interval_limitation == 5 + - cm_override_local_users.proposed.1.login_id == "admin" + - cm_override_local_users.proposed.1.first_name == "admin" + - cm_override_local_users.proposed.1.remote_user_authorization == false + - cm_override_local_users.proposed.1.reuse_limitation == 0 + - cm_override_local_users.proposed.1.security_domains | length == 1 + - cm_override_local_users.proposed.1.security_domains.0.name == "all" + - cm_override_local_users.proposed.1.security_domains.0.roles | length == 1 + - cm_override_local_users.proposed.1.security_domains.0.roles.0 == "super_admin" + - cm_override_local_users.proposed.1.time_interval_limitation == 0 + - cm_override_local_users.proposed.2.login_id == "ansible_local_user_3" + - cm_override_local_users.proposed.2.security_domains.0.name == "all" + +# --- DELETED STATE TESTS --- + - name: Delete local user (check mode) cisco.nd.nd_local_user: &delete_local_user <<: *nd_info @@ -594,7 +1065,6 @@ - cm_delete_local_user.before.2.security_domains.0.roles | length == 1 - cm_delete_local_user.before.2.security_domains.0.roles.0 == "super_admin" - cm_delete_local_user.before.2.time_interval_limitation == 0 - - cm_delete_local_user.diff == [] - cm_delete_local_user.proposed.0.login_id == "ansible_local_user" - nm_delete_local_user is changed - nm_delete_local_user.after | length == 2 @@ -632,20 +1102,14 @@ - nm_delete_local_user.before.2.security_domains.0.roles | length == 1 - nm_delete_local_user.before.2.security_domains.0.roles.0 == "super_admin" - nm_delete_local_user.before.2.time_interval_limitation == 0 - - nm_delete_local_user.diff == [] - nm_delete_local_user.proposed.0.login_id == "ansible_local_user" - nm_delete_local_user_again is not changed - nm_delete_local_user_again.after == nm_delete_local_user.after - nm_delete_local_user_again.before == nm_delete_local_user.after - - nm_delete_local_user_again.diff == [] - nm_delete_local_user_again.proposed == nm_delete_local_user.proposed -# CLEAN UP +# --- CLEAN UP --- + - name: Ensure local users do not exist cisco.nd.nd_local_user: - <<: *nd_info - config: - - login_id: ansible_local_user - - login_id: ansible_local_user_2 - - login_id: ansible_local_user_3 - state: deleted + <<: *clean_all_local_users From a92a30151459d3878ebd0f9917519f405313c298 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 19 Mar 2026 13:03:29 -0400 Subject: [PATCH 124/131] [ignore] Revert local users endpoints filename to aaa_local_users.py. --- .../v1/infra/{infra_aaa_local_users.py => aaa_local_users.py} | 0 plugins/module_utils/orchestrators/local_user.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename plugins/module_utils/endpoints/v1/infra/{infra_aaa_local_users.py => aaa_local_users.py} (100%) diff --git a/plugins/module_utils/endpoints/v1/infra/infra_aaa_local_users.py b/plugins/module_utils/endpoints/v1/infra/aaa_local_users.py similarity index 100% rename from plugins/module_utils/endpoints/v1/infra/infra_aaa_local_users.py rename to plugins/module_utils/endpoints/v1/infra/aaa_local_users.py diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index 0c2a6bf8..b567efa5 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -10,7 +10,7 @@ from ansible_collections.cisco.nd.plugins.module_utils.models.local_user.local_user import LocalUserModel from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.infra_aaa_local_users import ( +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.v1.infra.aaa_local_users import ( EpInfraAaaLocalUsersPost, EpInfraAaaLocalUsersPut, EpInfraAaaLocalUsersDelete, From 64ae6ececacdfb6d8dfb2891cac94876784fca40 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 19 Mar 2026 13:30:59 -0400 Subject: [PATCH 125/131] [ignore] Change in NDStateMachine initialization to take advantage of from_ansible_config static method from NDConfigCollection. --- plugins/module_utils/nd_state_machine.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 37324020..56adc9a9 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -44,17 +44,10 @@ def __init__(self, module: AnsibleModule, model_orchestrator: Type[NDBaseOrchest # Ongoing collection of configuration objects that were changed self.sent = NDConfigCollection(model_class=self.model_class) # Collection of configuration objects given by user - self.proposed = NDConfigCollection(model_class=self.model_class) - - for config in self.module.params.get("config", []): - # Parse config into model - item = self.model_class.from_config(config) - self.proposed.add(item) + self.proposed = NDConfigCollection.from_ansible_config(data=self.module.params.get("config", []), model_class=self.model_class) self.output.assign(after=self.existing, before=self.before, proposed=self.proposed) - except ValidationError as e: - raise NDStateMachineError(f"Invalid configuration. for config {config}: {str(e)}") from e except Exception as e: raise NDStateMachineError(f"Initialization failed: {str(e)}") from e From a2037ac1f54e76ef35009d7621be314eb8c5f1d0 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Thu, 19 Mar 2026 13:32:38 -0400 Subject: [PATCH 126/131] [ignore] Remove ValidationError import from nd_state_machine.py. --- plugins/module_utils/nd_state_machine.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 56adc9a9..fb812c33 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -5,7 +5,6 @@ from __future__ import absolute_import, division, print_function from typing import Type -from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import ValidationError from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.nd_output import NDOutput From fff4f7dd67f53cef6a9a0c3aa9189406214df9a1 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 24 Mar 2026 12:21:33 -0400 Subject: [PATCH 127/131] [ignore] Add function to nd_local_user module. Slighty fix Documentation and Example sections in nd_local_user module. Remove Dict class inheritance from NDConstantMapping. --- plugins/module_utils/constants.py | 2 +- plugins/modules/nd_local_user.py | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index adbe345e..f5bfd977 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -10,7 +10,7 @@ from copy import deepcopy -class NDConstantMapping(Dict): +class NDConstantMapping: def __init__(self, data: Dict): self.data = data self.new_dict = deepcopy(data) diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 53680e99..4f1ff197 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -109,6 +109,7 @@ notes: - This module is only supported on Nexus Dashboard having version 4.2.1 or higher. - This module is not idempotent when creating or updating a local user object when O(config.user_password) is used. +- When using O(state=overridden), admin user configuration must be specified as it cannot be deleted. """ EXAMPLES = r""" @@ -137,13 +138,14 @@ config: - login_id: local_user_min user_password: localUserMinuser_password - security_domain: all + security_domains: + - name: all state: merged - name: Update local user cisco.nd.nd_local_user: config: - - email: udpateduser@example.com + - email: updateduser@example.com login_id: local_user first_name: Updated user first name last_name: Updated user last name @@ -155,7 +157,6 @@ roles: super_admin - name: ansible_domain roles: observer - roles: super_admin remote_id_claim: "" remote_user_authorization: false state: replaced @@ -173,6 +174,7 @@ from ansible.module_utils.basic import AnsibleModule from ansible_collections.cisco.nd.plugins.module_utils.nd import nd_argument_spec from ansible_collections.cisco.nd.plugins.module_utils.nd_state_machine import NDStateMachine +from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import require_pydantic from ansible_collections.cisco.nd.plugins.module_utils.models.local_user.local_user import LocalUserModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.local_user import LocalUserOrchestrator @@ -185,6 +187,7 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, ) + require_pydantic(module) try: # Initialize StateMachine From f4d58f5cbc9ff71a8bcdd29582fbecd6dec0e3de Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 24 Mar 2026 12:33:17 -0400 Subject: [PATCH 128/131] [ignore] Make NDBaseOrchestrator a Generic class. --- plugins/module_utils/orchestrators/base.py | 18 ++++++++++-------- .../module_utils/orchestrators/local_user.py | 6 +++--- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index fe16a524..be790125 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -5,14 +5,16 @@ from __future__ import absolute_import, division, print_function from ansible_collections.cisco.nd.plugins.module_utils.common.pydantic_compat import BaseModel, ConfigDict -from typing import ClassVar, Type, Optional +from typing import ClassVar, Type, Optional, Generic, TypeVar from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType +ModelType = TypeVar("ModelType", bound=NDBaseModel) -class NDBaseOrchestrator(BaseModel): + +class NDBaseOrchestrator(BaseModel, Generic[ModelType]): model_config = ConfigDict( use_enum_values=True, validate_assignment=True, @@ -20,7 +22,7 @@ class NDBaseOrchestrator(BaseModel): arbitrary_types_allowed=True, ) - model_class: ClassVar[Type[NDBaseModel]] = Type[NDBaseModel] + model_class: ClassVar[Type[NDBaseModel]] = NDBaseModel # NOTE: if not defined by subclasses, return an error as they are required create_endpoint: Type[NDEndpointBaseModel] @@ -33,14 +35,14 @@ class NDBaseOrchestrator(BaseModel): sender: NDModule # NOTE: Generic CRUD API operations for simple endpoints with single identifier (e.g. "api/v1/infra/aaa/LocalUsers/{loginID}") - def create(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: + def create(self, model_instance: ModelType, **kwargs) -> ResponseType: try: api_endpoint = self.create_endpoint() return self.sender.request(path=api_endpoint.path, method=api_endpoint.verb, data=model_instance.to_payload()) except Exception as e: raise Exception(f"Create failed for {model_instance.get_identifier_value()}: {e}") from e - def update(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: + def update(self, model_instance: ModelType, **kwargs) -> ResponseType: try: api_endpoint = self.update_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) @@ -48,7 +50,7 @@ def update(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: except Exception as e: raise Exception(f"Update failed for {model_instance.get_identifier_value()}: {e}") from e - def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: + def delete(self, model_instance: ModelType, **kwargs) -> ResponseType: try: api_endpoint = self.delete_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) @@ -56,7 +58,7 @@ def delete(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: except Exception as e: raise Exception(f"Delete failed for {model_instance.get_identifier_value()}: {e}") from e - def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: + def query_one(self, model_instance: ModelType, **kwargs) -> ResponseType: try: api_endpoint = self.query_one_endpoint() api_endpoint.set_identifiers(model_instance.get_identifier_value()) @@ -64,7 +66,7 @@ def query_one(self, model_instance: NDBaseModel, **kwargs) -> ResponseType: except Exception as e: raise Exception(f"Query failed for {model_instance.get_identifier_value()}: {e}") from e - def query_all(self, model_instance: Optional[NDBaseModel] = None, **kwargs) -> ResponseType: + def query_all(self, model_instance: Optional[ModelType] = None, **kwargs) -> ResponseType: try: api_endpoint = self.query_all_endpoint() result = self.sender.query_obj(api_endpoint.path) diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index b567efa5..e95a3003 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -4,7 +4,7 @@ from __future__ import absolute_import, division, print_function -from typing import Type +from typing import Type, ClassVar from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.base import NDBaseOrchestrator from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.models.local_user.local_user import LocalUserModel @@ -18,8 +18,8 @@ ) -class LocalUserOrchestrator(NDBaseOrchestrator): - model_class: Type[NDBaseModel] = LocalUserModel +class LocalUserOrchestrator(NDBaseOrchestrator[LocalUserModel]): + model_class: ClassVar[Type[NDBaseModel]] = LocalUserModel create_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPost update_endpoint: Type[NDEndpointBaseModel] = EpInfraAaaLocalUsersPut From 8a44e8ded1a4ea5dfe921ce34312fd5b3082a73a Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 12:18:35 -0500 Subject: [PATCH 129/131] [ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues. --- plugins/module_utils/nd_state_machine.py | 1 + plugins/module_utils/orchestrators/base.py | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index fb812c33..9d1d138a 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -74,6 +74,7 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier + response = {} identifier = proposed_item.get_identifier_value() try: # Determine diff status diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index be790125..58cd0aad 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,8 +8,13 @@ from typing import ClassVar, Type, Optional, Generic, TypeVar from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule +<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType +======= +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint +from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType +>>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) ModelType = TypeVar("ModelType", bound=NDBaseModel) From 76d6b4e88f033b48a123951efaf71a66c3ceb534 Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Tue, 3 Mar 2026 17:19:47 -0500 Subject: [PATCH 130/131] [ignore] Rename NDBaseSmartEndpoint to NDBaseEndpoint. Fix importation issues. --- plugins/module_utils/endpoints/base.py | 2 +- plugins/module_utils/orchestrators/base.py | 7 +------ plugins/module_utils/orchestrators/local_user.py | 2 +- plugins/modules/nd_local_user.py | 3 +++ 4 files changed, 6 insertions(+), 8 deletions(-) diff --git a/plugins/module_utils/endpoints/base.py b/plugins/module_utils/endpoints/base.py index c3d7f4e1..f2f16dbc 100644 --- a/plugins/module_utils/endpoints/base.py +++ b/plugins/module_utils/endpoints/base.py @@ -134,4 +134,4 @@ def verb(self) -> HttpVerbEnum: # NOTE: function to set endpoints attribute fields from identifiers -> acts as the bridge between Models and Endpoints for API Request Orchestration def set_identifiers(self, identifier: IdentifierKey = None): - pass + pass \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/base.py b/plugins/module_utils/orchestrators/base.py index 58cd0aad..42944e26 100644 --- a/plugins/module_utils/orchestrators/base.py +++ b/plugins/module_utils/orchestrators/base.py @@ -8,13 +8,8 @@ from typing import ClassVar, Type, Optional, Generic, TypeVar from ansible_collections.cisco.nd.plugins.module_utils.models.base import NDBaseModel from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule -<<<<<<< HEAD from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDEndpointBaseModel from ansible_collections.cisco.nd.plugins.module_utils.orchestrators.types import ResponseType -======= -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.base import NDBaseSmartEndpoint -from ansible_collections.cisco.nd.plugins.module_utils.endpoints.types import ResponseType ->>>>>>> 8d72e06 ([ignore] Restructure api_endpoints folder into endpoints -> v1. Fix some sanity issues.) ModelType = TypeVar("ModelType", bound=NDBaseModel) @@ -77,4 +72,4 @@ def query_all(self, model_instance: Optional[ModelType] = None, **kwargs) -> Res result = self.sender.query_obj(api_endpoint.path) return result or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/module_utils/orchestrators/local_user.py b/plugins/module_utils/orchestrators/local_user.py index e95a3003..425a8b9b 100644 --- a/plugins/module_utils/orchestrators/local_user.py +++ b/plugins/module_utils/orchestrators/local_user.py @@ -36,4 +36,4 @@ def query_all(self) -> ResponseType: result = self.sender.query_obj(api_endpoint.path) return result.get("localusers", []) or [] except Exception as e: - raise Exception(f"Query all failed: {e}") from e + raise Exception(f"Query all failed: {e}") from e \ No newline at end of file diff --git a/plugins/modules/nd_local_user.py b/plugins/modules/nd_local_user.py index 4f1ff197..24eb49c4 100644 --- a/plugins/modules/nd_local_user.py +++ b/plugins/modules/nd_local_user.py @@ -197,6 +197,9 @@ def main(): ) # Manage state + # TODO: return module output class object: + # output = nd_state_machine.manage_state() + # module.exit_json(**output) nd_state_machine.manage_state() module.exit_json(**nd_state_machine.output.format()) From c03bea88b44745eea1e0b02994b8489b3ec7c50b Mon Sep 17 00:00:00 2001 From: Gaspard Micol Date: Fri, 6 Mar 2026 13:35:38 -0500 Subject: [PATCH 131/131] [ignore] Add NDOutput class. Modify NDStateMachine and nd_local_user accordingly --- plugins/module_utils/nd_state_machine.py | 1 - tests/integration/inventory.networking | 10 +++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/plugins/module_utils/nd_state_machine.py b/plugins/module_utils/nd_state_machine.py index 9d1d138a..fb812c33 100644 --- a/plugins/module_utils/nd_state_machine.py +++ b/plugins/module_utils/nd_state_machine.py @@ -74,7 +74,6 @@ def _manage_create_update_state(self) -> None: """ for proposed_item in self.proposed: # Extract identifier - response = {} identifier = proposed_item.get_identifier_value() try: # Determine diff status diff --git a/tests/integration/inventory.networking b/tests/integration/inventory.networking index 6b37d8f3..2aa818d7 100644 --- a/tests/integration/inventory.networking +++ b/tests/integration/inventory.networking @@ -1,15 +1,15 @@ [nd] -nd ansible_host= +nd-test ansible_host=10.48.161.120 [nd:vars] ansible_connection=ansible.netcommon.httpapi -ansible_python_interpreter=/usr/bin/python3.9 +ansible_python_interpreter=/usr/bin/python3.12 ansible_network_os=cisco.nd.nd ansible_httpapi_validate_certs=False ansible_httpapi_use_ssl=True ansible_httpapi_use_proxy=True -ansible_user=ansible_github_ci -ansible_password= +ansible_user=admin +ansible_password=C1sco123 insights_group= site_name= site_host= @@ -28,4 +28,4 @@ external_management_service_ip= external_data_service_ip= data_ip= data_gateway= -service_package_host=173.36.219.254 +service_package_host=173.36.219.254