From 6dd30fa4196f93ed84c2689c25701838b372c69a Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Thu, 4 Jan 2024 10:29:16 +0100 Subject: [PATCH 01/10] Fix for remote mount Signed-off-by: Rajan Mishra --- roles/remotemount_configure/tasks/remotecluster.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/roles/remotemount_configure/tasks/remotecluster.yml b/roles/remotemount_configure/tasks/remotecluster.yml index 4da9dba6..5770f24f 100644 --- a/roles/remotemount_configure/tasks/remotecluster.yml +++ b/roles/remotemount_configure/tasks/remotecluster.yml @@ -285,9 +285,12 @@ run_once: True - set_fact: - owning_nodes_name: "{{ owning_nodes_name }} + [ '{{ item.adminNodeName }}' ]" + owning_nodes_name: "{{ owning_nodes_name + [item.adminNodeName] }}" with_items: "{{ owning_cluster_nodes.json.nodes }}" run_once: True + + - debug: + msg: "{{owning_nodes_name}}" # # This Section is when using daemonNodeName @@ -312,7 +315,7 @@ run_once: True - set_fact: - owning_daemon_nodes_name: "{{ owning_daemon_nodes_name }} + [ '{{ item.json.nodes.0.network.daemonNodeName }}' ]" + owning_daemon_nodes_name: "{{ owning_daemon_nodes_name + [item.json.nodes.0.network.daemonNodeName] }}" with_items: "{{ owning_cluster_daemonnodes.results }}" run_once: True From b93247f61dfccfac167ffdf0f398aa28bbe91183 Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Sun, 28 Apr 2024 15:25:14 +0200 Subject: [PATCH 02/10] Fixed sync issue Signed-off-by: Rajan Mishra --- roles/remotemount_configure/tasks/remotecluster.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/roles/remotemount_configure/tasks/remotecluster.yml b/roles/remotemount_configure/tasks/remotecluster.yml index 5770f24f..99e3e03e 100644 --- a/roles/remotemount_configure/tasks/remotecluster.yml +++ b/roles/remotemount_configure/tasks/remotecluster.yml @@ -288,9 +288,6 @@ owning_nodes_name: "{{ owning_nodes_name + [item.adminNodeName] }}" with_items: "{{ owning_cluster_nodes.json.nodes }}" run_once: True - - - debug: - msg: "{{owning_nodes_name}}" # # This Section is when using daemonNodeName From 0dbe217bf170659484a85fb838eb6f10ad305f1f Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Wed, 28 Aug 2024 19:45:27 +0200 Subject: [PATCH 03/10] CES S3 upgrade support role Signed-off-by: Rajan Mishra --- roles/s3_upgrade/README.md | 1 + roles/s3_upgrade/defaults/main.yml | 20 +++ roles/s3_upgrade/handlers/main.yml | 4 + roles/s3_upgrade/meta/main.yml | 20 +++ roles/s3_upgrade/tasks/apt/install.yml | 15 ++ roles/s3_upgrade/tasks/install.yml | 69 +++++++++ roles/s3_upgrade/tasks/install_dir_pkg.yml | 77 ++++++++++ roles/s3_upgrade/tasks/install_local_pkg.yml | 137 ++++++++++++++++++ roles/s3_upgrade/tasks/install_remote_pkg.yml | 109 ++++++++++++++ roles/s3_upgrade/tasks/install_repository.yml | 31 ++++ roles/s3_upgrade/tasks/main.yml | 4 + roles/s3_upgrade/tasks/yum/install.yml | 6 + roles/s3_upgrade/tasks/zypper/install.yml | 6 + roles/s3_upgrade/vars/main.yml | 10 ++ 14 files changed, 509 insertions(+) create mode 120000 roles/s3_upgrade/README.md create mode 100644 roles/s3_upgrade/defaults/main.yml create mode 100644 roles/s3_upgrade/handlers/main.yml create mode 100644 roles/s3_upgrade/meta/main.yml create mode 100644 roles/s3_upgrade/tasks/apt/install.yml create mode 100644 roles/s3_upgrade/tasks/install.yml create mode 100644 roles/s3_upgrade/tasks/install_dir_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_local_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_remote_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_repository.yml create mode 100644 roles/s3_upgrade/tasks/main.yml create mode 100644 roles/s3_upgrade/tasks/yum/install.yml create mode 100644 roles/s3_upgrade/tasks/zypper/install.yml create mode 100644 roles/s3_upgrade/vars/main.yml diff --git a/roles/s3_upgrade/README.md b/roles/s3_upgrade/README.md new file mode 120000 index 00000000..6a3df305 --- /dev/null +++ b/roles/s3_upgrade/README.md @@ -0,0 +1 @@ +../../docs/README.NFS.md \ No newline at end of file diff --git a/roles/s3_upgrade/defaults/main.yml b/roles/s3_upgrade/defaults/main.yml new file mode 100644 index 00000000..a694d569 --- /dev/null +++ b/roles/s3_upgrade/defaults/main.yml @@ -0,0 +1,20 @@ +--- +# Default variables for the IBM Spectrum Scale (S3) role - +# either edit this file or define your own variables to override the defaults + +## Specify the URL of the (existing) Spectrum Scale YUM/apt/zypper repository +#scale_install_s3_repository_rpms: http:///s3_rpms/ +#scale_install_s3_repository_debs: http:///s3_debs/ +#scale_install_s3_repository_rpms_sles: http:///s3_rpms/sles12/ + +## List of S3 packages to install +scale_s3_packages: +- noobaa-core +- gpfs.mms3 + +## Temporary directory to copy installation package to +## (local package installation method) +scale_install_localpkg_tmpdir_path: /tmp + +## Flag to install s3 debug package +scale_s3_install_debuginfo: true diff --git a/roles/s3_upgrade/handlers/main.yml b/roles/s3_upgrade/handlers/main.yml new file mode 100644 index 00000000..2e896124 --- /dev/null +++ b/roles/s3_upgrade/handlers/main.yml @@ -0,0 +1,4 @@ +--- +# handlers file for node +- name: yum-clean-metadata + command: yum clean metadata diff --git a/roles/s3_upgrade/meta/main.yml b/roles/s3_upgrade/meta/main.yml new file mode 100644 index 00000000..d32d632b --- /dev/null +++ b/roles/s3_upgrade/meta/main.yml @@ -0,0 +1,20 @@ +--- +galaxy_info: + author: IBM Corporation + description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) + company: IBM + + license: Apache-2.0 + + min_ansible_version: 2.9 + + platforms: + - name: EL + versions: + - 7 + - 8 + + galaxy_tags: [] + +dependencies: + - ibm.spectrum_scale.core_common diff --git a/roles/s3_upgrade/tasks/apt/install.yml b/roles/s3_upgrade/tasks/apt/install.yml new file mode 100644 index 00000000..75fd2f00 --- /dev/null +++ b/roles/s3_upgrade/tasks/apt/install.yml @@ -0,0 +1,15 @@ +--- +- name: upgrade | Upgrade s3 packages + package: + name: "{{ scale_install_all_packages }}" + state: latest + when: scale_install_repository_url is defined + + +- name: upgrade | Upgrade GPFS S3 deb + apt: + deb: "{{ item }}" + state: latest + when: scale_install_repository_url is not defined + with_items: + - "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install.yml b/roles/s3_upgrade/tasks/install.yml new file mode 100644 index 00000000..88264e77 --- /dev/null +++ b/roles/s3_upgrade/tasks/install.yml @@ -0,0 +1,69 @@ +--- +# Install or update RPMs +# Ensure that installation method was chosen during previous role +- block: + - name: upgrade | Check for repository installation method + set_fact: + scale_installmethod: repository + when: + - scale_install_repository_url is defined + + - name: upgrade | Check for localpkg installation method + set_fact: + scale_installmethod: local_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is undefined + - scale_install_localpkg_path is defined + + - name: upgrade | Check for remotepkg installation method + set_fact: + scale_installmethod: remote_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is defined + + - name: upgrade | Check for directory package installation method + set_fact: + scale_installmethod: dir_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is undefined + - scale_install_localpkg_path is undefined + - scale_install_directory_pkg_path is defined + + - name: upgrade | Check installation method + assert: + that: scale_installmethod is defined + msg: >- + Please set the appropriate variable 'scale_install_*' for your desired + installation method! + run_once: true + delegate_to: localhost + +# Run chosen installation method to get list of RPMs + +- name: upgrade | Initialize list of packages + set_fact: + scale_install_all_packages: [] + +- name: upgrade | Set the extracted package directory path + set_fact: + s3_extracted_path: "{{ scale_extracted_path }}" + +- name: upgrade | Stat extracted packages directory + stat: + path: "{{ s3_extracted_path }}" + register: scale_extracted_gpfs_dir + +- include_tasks: install_{{ scale_installmethod }}.yml + +- import_tasks: apt/install.yml + when: ansible_distribution in scale_ubuntu_distribution + +- import_tasks: yum/install.yml + when: ansible_distribution in scale_rhel_distribution + +- import_tasks: zypper/install.yml + when: ansible_distribution in scale_sles_distribution + diff --git a/roles/s3_upgrade/tasks/install_dir_pkg.yml b/roles/s3_upgrade/tasks/install_dir_pkg.yml new file mode 100644 index 00000000..0dc1730a --- /dev/null +++ b/roles/s3_upgrade/tasks/install_dir_pkg.yml @@ -0,0 +1,77 @@ +--- +# Dir package installation method + +- block: ## run_once: true + - name: install | Stat directory installation package + stat: + path: "{{ scale_install_directory_pkg_path }}" + register: scale_install_dirpkg + + - name: install | Check directory installation package + assert: + that: scale_install_dirpkg.stat.exists + msg: >- + Please set the variable 'scale_install_directory_pkg_path' to point to the + local installation package (accessible on Ansible control machine)! + run_once: true + delegate_to: localhost + +- name: install| Creates default directory + file: + path: "{{ scale_extracted_path }}" + state: directory + mode: a+x + recurse: yes + +- name: install | Stat extracted packages + stat: + path: "{{ scale_extracted_path + '/' + scale_install_directory_pkg_path | basename }}" + register: scale_install_gpfs_packagedir + +# +# Copy installation directory package to default +# +- block: + - name: install | Copy installation package to node + copy: + src: "{{ scale_install_directory_pkg_path }}" + dest: "{{ scale_extracted_path }}" + mode: a+x + +- name: install | Set installation package path + set_fact: + dir_path: "{{ scale_extracted_path + '/' + scale_install_directory_pkg_path | basename }}" + +- name: install | gpfs base path + set_fact: + gpfs_path_url: "{{ dir_path }}" + when: scale_install_directory_pkg_path is defined + +# +# Find noobaa-core +# +# + +- block: ## when: host is defined as a protocol node + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ gpfs_path_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid GPFS (s3) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No S3 (noobaa-core) package found {{ gpfs_path_url }}noobaa-core*" + + - name: install | Add GPFS s3 package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_local_pkg.yml b/roles/s3_upgrade/tasks/install_local_pkg.yml new file mode 100644 index 00000000..27606923 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_local_pkg.yml @@ -0,0 +1,137 @@ +--- +# Local package installation method +- block: ## run_once: true + - name: install | Stat local installation package + stat: + path: "{{ scale_install_localpkg_path }}" + checksum_algorithm: md5 + register: scale_install_localpkg + + - name: install | Check local installation package + assert: + that: scale_install_localpkg.stat.exists + msg: >- + Please set the variable 'scale_install_localpkg_path' to point to the + local installation package (accessible on Ansible control machine)! + +# +# Optionally, verify package checksum +# + - name: install | Stat checksum file + stat: + path: "{{ scale_install_localpkg_path }}.md5" + register: scale_install_md5_file + + - block: ## when: scale_install_md5_file.stat.exists + - name: install | Read checksum from file + set_fact: + scale_install_md5_sum: "{{ lookup('file', scale_install_localpkg_path + '.md5') }}" + + - name: install | Compare checksums + assert: + that: scale_install_md5_sum.strip().split().0 == scale_install_localpkg.stat.checksum + msg: >- + Checksums don't match. Please check integritiy of your local + installation package! + + when: scale_install_md5_file.stat.exists + run_once: true + delegate_to: localhost + +# +# Copy installation package +# +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- block: ## when: not scale_install_gpfs_rpmdir.stat.exists + - name: install | Stat temporary directory + stat: + path: "{{ scale_install_localpkg_tmpdir_path }}" + register: scale_install_localpkg_tmpdir + + - name: install | Check temporary directory + assert: + that: + - scale_install_localpkg_tmpdir.stat.exists + - scale_install_localpkg_tmpdir.stat.isdir + msg: >- + Please set the variable 'scale_install_localpkg_tmpdir_path' to point + to a temporary directory on the remote system! + + - name: install | Copy installation package to node + copy: + src: "{{ scale_install_localpkg_path }}" + dest: "{{ scale_install_localpkg_tmpdir_path }}" + mode: a+x + when: not scale_install_gpfs_rpmdir.stat.exists + +# +# Extract installation package +# +- name: install | Extract installation package + vars: + localpkg: "{{ scale_install_localpkg_tmpdir_path + '/' + scale_install_localpkg_path | basename }}" + command: "{{ localpkg + ' --silent' }}" + args: + creates: "{{ s3_extracted_path }}" + +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Check extracted packages + assert: + that: + - scale_install_gpfs_rpmdir.stat.exists + - scale_install_gpfs_rpmdir.stat.isdir + msg: >- + The variable 'scale_version' doesn't seem to match the contents of the + local installation package! + +# Delete installation package +- name: install | Delete installation package from node + file: + path: "{{ scale_install_localpkg_tmpdir_path + '/' + scale_install_localpkg_path | basename }}" + state: absent + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +# Find s3 rpms +- block: ## when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ s3_extracted_path }}/{{ scale_s3_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid (noobaa-core) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No noobaa-core (noobaa-core) package found {{ s3_extracted_path }}/{{ scale_s3_url }}noobaa-core*" + + - name: install | Add noobaa-core package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + + when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_remote_pkg.yml b/roles/s3_upgrade/tasks/install_remote_pkg.yml new file mode 100644 index 00000000..56227dc2 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_remote_pkg.yml @@ -0,0 +1,109 @@ +--- +# Remote package installation method + +- name: install | Stat remote installation package + stat: + path: "{{ scale_install_remotepkg_path }}" + checksum_algorithm: md5 + register: scale_install_remotepkg + +- name: install | Check remote installation package + assert: + that: scale_install_remotepkg.stat.exists + msg: >- + Please set the variable 'scale_install_remotepkg_path' to point to the + remote installation package (accessible on Ansible managed node)! + +# +# Optionally, verify package checksum +# +- name: install | Stat checksum file + stat: + path: "{{ scale_install_remotepkg_path }}.md5" + register: scale_install_md5_file + +- block: ## when: scale_install_md5_file.stat.exists + - name: install | Read checksum from file + slurp: + src: "{{ scale_install_remotepkg_path }}.md5" + register: scale_install_md5_sum + + - name: install | Compare checksums + vars: + md5sum: "{{ scale_install_md5_sum.content | b64decode }}" + assert: + that: md5sum.strip().split().0 == scale_install_remotepkg.stat.checksum + msg: >- + Checksums don't match. Please check integritiy of your remote + installation package! + when: scale_install_md5_file.stat.exists + +# +# Extract installation package +# +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Make installation package executable + file: + path: "{{ scale_install_remotepkg_path }}" + mode: a+x + when: not scale_install_gpfs_rpmdir.stat.exists + +- name: install | Extract installation package + command: "{{ scale_install_remotepkg_path + ' --silent' }}" + args: + creates: "{{ s3_extracted_path }}" + +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Check extracted packages + assert: + that: + - scale_install_gpfs_rpmdir.stat.exists + - scale_install_gpfs_rpmdir.stat.isdir + msg: >- + The variable 'scale_version' doesn't seem to match the contents of the + remote installation package! + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +# Find s3 rpms +- block: ## when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ s3_extracted_path }}/{{ scale_s3_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid noobaa-core (noobaa-core) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No S3 (noobaa-core) package found {{ s3_extracted_path }}/{{ scale_s3_url }}gpfs.s3*" + + - name: install | Add GPFS s3 package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + + when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_repository.yml b/roles/s3_upgrade/tasks/install_repository.yml new file mode 100644 index 00000000..201d7e69 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_repository.yml @@ -0,0 +1,31 @@ +--- +- name: upgrade | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: upgrade | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +- name: upgrade | Configure s3 YUM repository + yum_repository: + name: spectrum-scale-s3 + description: IBM Spectrum Scale (s3) + baseurl: "{{ scale_install_repository_url }}{{ scale_s3_url }}" + gpgcheck: "{{ scale_install_gpgcheck }}" + repo_gpgcheck: no + sslverify: no + state: present + notify: yum-clean-metadata + when: + - ansible_pkg_mgr == 'yum' or ansible_pkg_mgr == 'dnf' + - scale_install_repository_url is defined + - scale_install_repository_url != 'existing' + +- name: upgrade | Add GPFS s3 packages to list + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ item ] }}" + with_items: + - "{{ scale_s3_packages }}" diff --git a/roles/s3_upgrade/tasks/main.yml b/roles/s3_upgrade/tasks/main.yml new file mode 100644 index 00000000..fc33687b --- /dev/null +++ b/roles/s3_upgrade/tasks/main.yml @@ -0,0 +1,4 @@ +--- +# Install IBM Spectrum Scale (S3) +- import_tasks: install.yml + tags: upgrade diff --git a/roles/s3_upgrade/tasks/yum/install.yml b/roles/s3_upgrade/tasks/yum/install.yml new file mode 100644 index 00000000..9ddbc12e --- /dev/null +++ b/roles/s3_upgrade/tasks/yum/install.yml @@ -0,0 +1,6 @@ +--- +- name: upgrade | Upgrade GPFS S3 packages + yum: + name: "{{ scale_install_all_packages }}" + state: latest + disable_gpg_check: "{{ scale_disable_gpgcheck }}" diff --git a/roles/s3_upgrade/tasks/zypper/install.yml b/roles/s3_upgrade/tasks/zypper/install.yml new file mode 100644 index 00000000..2ea66d79 --- /dev/null +++ b/roles/s3_upgrade/tasks/zypper/install.yml @@ -0,0 +1,6 @@ +--- +- name: upgrade | Upgrade GPFS S3 packages + zypper: + name: "{{ scale_install_all_packages }}" + state: latest + disable_gpg_check: no diff --git a/roles/s3_upgrade/vars/main.yml b/roles/s3_upgrade/vars/main.yml new file mode 100644 index 00000000..5a6e9c01 --- /dev/null +++ b/roles/s3_upgrade/vars/main.yml @@ -0,0 +1,10 @@ +--- +# Variables for the IBM Spectrum Scale (GPFS) role - +# these variables are *not* meant to be overridden + +## Compute RPM version from Spectrum Scale version +scale_rpmversion: "{{ scale_version | regex_replace('^([0-9.]+)\\.([0-9])$', '\\1-\\2') }}" + +## Default scale extraction path +scale_extracted_default_path: "/usr/lpp/mmfs" +scale_extracted_path: "{{ scale_extracted_default_path }}/{{ scale_version }}" From abbe501fcdc36e10a507fd07a6fe20c0212adfab Mon Sep 17 00:00:00 2001 From: sujeet Date: Wed, 8 Jan 2025 18:28:01 +0100 Subject: [PATCH 04/10] Fixed callhome defect fixes Signed-off-by: sujeet --- roles/callhome_configure/tasks/configure.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/callhome_configure/tasks/configure.yml b/roles/callhome_configure/tasks/configure.yml index 14274003..c87df6f1 100755 --- a/roles/callhome_configure/tasks/configure.yml +++ b/roles/callhome_configure/tasks/configure.yml @@ -54,7 +54,7 @@ - name: configure| Setup the call home customer configuration shell: - cmd: "{{ scale_command_path }}mmcallhome info change --customer-name {{ scale_callhome_params.customer_name }} --customer-id {{ scale_callhome_params.customer_id }} --email {{ scale_callhome_params.customer_email}} --country-code {{ scale_callhome_params.customer_country }}" + cmd: "{{ scale_command_path }}mmcallhome info change --customer-name \"{{ scale_callhome_params.customer_name }}\" --customer-id {{ scale_callhome_params.customer_id }} --email {{ scale_callhome_params.customer_email}} --country-code {{ scale_callhome_params.customer_country }}" register: scale_callhome_customer_config - debug: From 7b8484d48948e88707fe61ef9f389b484a8427db Mon Sep 17 00:00:00 2001 From: sujeet Date: Mon, 20 Jan 2025 06:56:12 +0100 Subject: [PATCH 05/10] README file update for CES S3 and supported OS Signed-off-by: sujeet --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index b37d74d8..7f157e72 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,9 @@ Features - [x] Support for RHEL 7 on x86_64, PPC64 and PPC64LE - [x] Support for RHEL 8 on x86_64 and PPC64LE +- [x] Support for RHEL 9 on x86_64 and PPC64LE - [x] Support for UBUNTU 20 on x86_64 and PPC64LE +- [x] Support for UBUNTU 22 on x86_64 and PPC64LE - [x] Support for SLES 15 on x86_64 and PPC64LE #### Common prerequisites @@ -91,6 +93,7 @@ Features - [x] Install IBM Storage Scale SMB or NFS on selected cluster nodes (5.0.5.2 and above) - [x] Install IBM Storage Scale Object on selected cluster nodes (5.1.1.0 and above) +- [x] Install IBM Storage Scale S3 on selected cluster nodes (5.2.0.0 and above) - [x] CES IPV4 or IPV6 support - [x] CES interface mode support @@ -107,12 +110,14 @@ The following IBM Storage Scale versions are tested: - 5.0.4.0 and above - 5.0.5.2 and above for CES (SMB and NFS) - 5.1.1.0 and above for CES (Object) +- 5.2.0.0 and above for CES (S3) - **Refer to the [Release Notes](https://github.com/IBM/ibm-spectrum-scale-install-infra/releases) for details** Specific OS requirements: - For CES (SMB/NFS) on SLES15: Python 3 is required. - For CES (Object): RhedHat 8.x is required. +- For CES (S3): RhedHat 8.x or RhedHat 9.x is required. Prerequisites ------------- @@ -308,6 +313,7 @@ The following [roles](https://docs.ansible.com/ansible/latest/user_guide/playboo - HDFS (`roles/hdfs_*`) - Call Home (`roles/callhome_*`) - File Audit Logging (`roles/fal_*`) +- S3 (`roles/s3_*`) - ... Note that [Core GPFS](roles/core) is the only mandatory role, all other roles are optional. Each of the optional roles requires additional configuration variables. Browse the examples in the [samples/](samples/) directory to learn how to: From bffd367d25b895bd9c1a21e010ae802e9a91256a Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Thu, 31 Jul 2025 15:08:07 +0200 Subject: [PATCH 06/10] Fixed SLES15 issue fix Signed-off-by: Rajan Mishra --- roles/fal_install/tasks/install_repository.yml | 1 + roles/gui_install/tasks/install_repository.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/roles/fal_install/tasks/install_repository.yml b/roles/fal_install/tasks/install_repository.yml index f98af3fd..5d691864 100644 --- a/roles/fal_install/tasks/install_repository.yml +++ b/roles/fal_install/tasks/install_repository.yml @@ -86,6 +86,7 @@ repo: "{{ scale_install_repository_url }}{{ scale_fal_url }}" disable_gpg_check: no state: present + overwrite_multiple: yes when: - ansible_pkg_mgr == 'zypper' - scale_install_repository_url is defined diff --git a/roles/gui_install/tasks/install_repository.yml b/roles/gui_install/tasks/install_repository.yml index 70396d60..446e1cd5 100644 --- a/roles/gui_install/tasks/install_repository.yml +++ b/roles/gui_install/tasks/install_repository.yml @@ -37,7 +37,7 @@ zypper_repository: name: spectrum-scale-gui description: IBM Spectrum Scale (GUI) - repo: "{{ scale_install_repository_url }}/gpfs_rpms/" + repo: "{{ scale_install_repository_url }}gpfs_rpms/" disable_gpg_check: no state: present overwrite_multiple: yes From 064ce8fbdfacfd7b83c6ee0b597aff9b3284d822 Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Fri, 1 Aug 2025 08:42:31 +0200 Subject: [PATCH 07/10] Fixed SLES15 issue Signed-off-by: Rajan Mishra --- roles/gui_upgrade/tasks/install_repository.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/gui_upgrade/tasks/install_repository.yml b/roles/gui_upgrade/tasks/install_repository.yml index d548e2d3..3ae49a57 100644 --- a/roles/gui_upgrade/tasks/install_repository.yml +++ b/roles/gui_upgrade/tasks/install_repository.yml @@ -44,7 +44,7 @@ zypper_repository: name: spectrum-scale-gui description: IBM Spectrum Scale (GUI) - repo: "{{ scale_install_repository_url }}/gpfs_rpms/" + repo: "{{ scale_install_repository_url }}gpfs_rpms/" disable_gpg_check: no state: present overwrite_multiple: yes From e70e2a73ed3a579885f3a2f0a6ff4445d3e1b238 Mon Sep 17 00:00:00 2001 From: Dhananjay Ganesh Sonawane Date: Mon, 1 Sep 2025 11:01:59 +0530 Subject: [PATCH 08/10] Added First role stubs for sed tpm support Signed-off-by: Dhananjay Sonawane --- roles/.DS_Store | Bin 0 -> 10244 bytes roles/tpm_drives_enrol/defaults/main.yml | 7 ++++++ roles/tpm_drives_enrol/meta/main.yml | 19 ++++++++++++++++ roles/tpm_drives_enrol/tasks/main.yml | 8 +++++++ roles/tpm_drives_enrol/vars/main.yml | 0 roles/tpm_key_generate/defaults/main.yml | 8 +++++++ roles/tpm_key_generate/meta/main.yml | 0 roles/tpm_key_generate/tasks/main.yml | 8 +++++++ roles/tpm_key_generate/vars/main.yml | 0 roles/tpm_key_manage/defaults/main.yml | 7 ++++++ roles/tpm_key_manage/meta/main.yml | 19 ++++++++++++++++ roles/tpm_key_manage/tasks/main.yml | 12 ++++++++++ roles/tpm_key_manage/vars/main.yml | 0 roles/tpm_nv_slots_create/defaults/main.yml | 3 +++ roles/tpm_nv_slots_create/meta/main.yml | 19 ++++++++++++++++ roles/tpm_nv_slots_create/tasks/main.yml | 3 +++ roles/tpm_nv_slots_create/vars/main.yml | 0 roles/tpm_ownership/defaults/main.yml | 2 ++ roles/tpm_ownership/meta/main.yml | 19 ++++++++++++++++ roles/tpm_ownership/tasks/check.yml | 24 ++++++++++++++++++++ roles/tpm_ownership/tasks/main.yml | 17 ++++++++++++++ 21 files changed, 175 insertions(+) create mode 100644 roles/.DS_Store create mode 100644 roles/tpm_drives_enrol/defaults/main.yml create mode 100644 roles/tpm_drives_enrol/meta/main.yml create mode 100644 roles/tpm_drives_enrol/tasks/main.yml create mode 100644 roles/tpm_drives_enrol/vars/main.yml create mode 100644 roles/tpm_key_generate/defaults/main.yml create mode 100644 roles/tpm_key_generate/meta/main.yml create mode 100644 roles/tpm_key_generate/tasks/main.yml create mode 100644 roles/tpm_key_generate/vars/main.yml create mode 100644 roles/tpm_key_manage/defaults/main.yml create mode 100644 roles/tpm_key_manage/meta/main.yml create mode 100644 roles/tpm_key_manage/tasks/main.yml create mode 100644 roles/tpm_key_manage/vars/main.yml create mode 100644 roles/tpm_nv_slots_create/defaults/main.yml create mode 100644 roles/tpm_nv_slots_create/meta/main.yml create mode 100644 roles/tpm_nv_slots_create/tasks/main.yml create mode 100644 roles/tpm_nv_slots_create/vars/main.yml create mode 100644 roles/tpm_ownership/defaults/main.yml create mode 100644 roles/tpm_ownership/meta/main.yml create mode 100644 roles/tpm_ownership/tasks/check.yml create mode 100644 roles/tpm_ownership/tasks/main.yml diff --git a/roles/.DS_Store b/roles/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..123ac1370f48d78ab9957981deefbbad1f20b9ea GIT binary patch literal 10244 zcmeI1PiQ1X6o+3;=CICWCW*TWg325{iQ-X^aajc~dRXviOm>nOcQP|186}rx+=CZk zkD>=fMA(Dif}o&>1qDG53VRUrB0VcGTfUcKt+>OWPHWnmB1&|RIL{`ysY zUH!VMLqz8EbnrM)kBClS<#Krun~H^h=e4vn^vJU)4S1q+G^RaTm9lhtI*<$`1Ia)# zkPIXPcYy)Cvw1nuwq@ET1Ia)#kQw0o(8J1QU(1;-C3RpUQvj4r9F_(9m}U(1;-El$SbWHhqT8H$nU;Cga8nZ7O4HW^3;$_9Au zzCvrXNjo}kck}lVj&0KxO=*`lo*RzOtq;fRIGZ1^r?EbIOO|(UV1KD}g>4$)OTa#k z4{2DSugvN929@-LW6}h4DYkiNPJi%IB|QbX1#*oB#hRP9=JZD|V!vCe`D=*0izl1)LCagQI>u>ro*i$O(@6<85i% zraWQMwuwuv<)6PxJVyrF#+ksy`>XF!fwqZ9jrGrEj|#MnbFH~k}p+|b`VvY?tfyr7fWCboAcXSf9&BqIPVVjQAdoXWftr@oID02v| zja-4Jcux=!^@RT+A0#9k$85!`w&$px0qKg;ufgV{yg?yktdQ>Z`VXY7DiypHCW`cLk z2zhIO>@`s{weN68pE^SOR)pO}50i1iF|B@LtsY*)6_!d@h}jL(G0RHR8w2wt2~# ze3M()&d5MK&I^vZXgn%rQ)7MTQ!$@PJ`q^lzSd4&ZT*&SQ_W z1iyg_vqo{5XO}bfJeUKJTf*|}ZH;?;`JL6gnd_VL*pA-;H^PeR_$-eSJ@#Z4dxTNg zDr!EIr_7Egcgy;-nV=(SNkgDGXr z|Nn10PP#xckPO_t22AtJ*)yk6KTiHtO9KacSYO7<3!gW$r6AbwDSTW$$KwzF9RG+O zdyGq7@Oxj&nJop!alujAulaZUp8?sI%ryTmziDb_@_})OSiGKlx6Z#BgvK5G6{VUA Tj5~N7;8AJ*FW=<<@BIHi{K!`J literal 0 HcmV?d00001 diff --git a/roles/tpm_drives_enrol/defaults/main.yml b/roles/tpm_drives_enrol/defaults/main.yml new file mode 100644 index 00000000..b410b4a4 --- /dev/null +++ b/roles/tpm_drives_enrol/defaults/main.yml @@ -0,0 +1,7 @@ +nv_slot_id: "0x01500021" +tpm_password_file: "/path/to/password.txt" +recovery_group: RecoveryGroupName +rkmid: RKMId +key_uuid: KeyId +rekey_drives: false + diff --git a/roles/tpm_drives_enrol/meta/main.yml b/roles/tpm_drives_enrol/meta/main.yml new file mode 100644 index 00000000..55f890b6 --- /dev/null +++ b/roles/tpm_drives_enrol/meta/main.yml @@ -0,0 +1,19 @@ +--- +galaxy_info: + author: IBM Corporation + description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) + company: IBM + + license: Apache-2.0 + + min_ansible_version: 2.9 + + platforms: + - name: EL + versions: + - 7 + - 8 + + galaxy_tags: [] + +dependencies: \ No newline at end of file diff --git a/roles/tpm_drives_enrol/tasks/main.yml b/roles/tpm_drives_enrol/tasks/main.yml new file mode 100644 index 00000000..2cf56a04 --- /dev/null +++ b/roles/tpm_drives_enrol/tasks/main.yml @@ -0,0 +1,8 @@ +--- +- name: Enroll drives with TPM key + command: mmvdisk sed enroll --recovery-group {{ recovery_group }} --rkmid {{ RKMId }} --key-uuid {{ KeyId }} --tpm-slot-id {{ nv_slot_id }} + when: !rekey_drives + +- name: Rekey drives with new TPM key + command: mmvdisk sed rekey --recovery-group {{ recovery_group }} --rkmid {{ RKMId }} --key-uuid {{ KeyId }} --tpm-slot-id {{ nv_slot_id }} + when: rekey_drives \ No newline at end of file diff --git a/roles/tpm_drives_enrol/vars/main.yml b/roles/tpm_drives_enrol/vars/main.yml new file mode 100644 index 00000000..e69de29b diff --git a/roles/tpm_key_generate/defaults/main.yml b/roles/tpm_key_generate/defaults/main.yml new file mode 100644 index 00000000..3921ad9c --- /dev/null +++ b/roles/tpm_key_generate/defaults/main.yml @@ -0,0 +1,8 @@ +nv_slot_id: "0x01500021" +tpm_password_file: "/path/to/password.txt" +source_node: node1 +target_nodes: + - node2 + - node3 +generate: true +migrate: false diff --git a/roles/tpm_key_generate/meta/main.yml b/roles/tpm_key_generate/meta/main.yml new file mode 100644 index 00000000..e69de29b diff --git a/roles/tpm_key_generate/tasks/main.yml b/roles/tpm_key_generate/tasks/main.yml new file mode 100644 index 00000000..5115d47d --- /dev/null +++ b/roles/tpm_key_generate/tasks/main.yml @@ -0,0 +1,8 @@ +--- +- name: Generate TPM key + command: mmvdisk tpm genkey --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} + when: generate + +- name: Migrate TPM key to other nodes + command: mmvdisk tpm migratekey --nv-slot-id {{ nv_slot_id }} -s {{ source_node }} -N {{ target_nodes }} + when: migrate diff --git a/roles/tpm_key_generate/vars/main.yml b/roles/tpm_key_generate/vars/main.yml new file mode 100644 index 00000000..e69de29b diff --git a/roles/tpm_key_manage/defaults/main.yml b/roles/tpm_key_manage/defaults/main.yml new file mode 100644 index 00000000..0e9ffa70 --- /dev/null +++ b/roles/tpm_key_manage/defaults/main.yml @@ -0,0 +1,7 @@ +tpm_slot_id: "0x01500021" +tpm_password_file: "/path/to/password.txt" +ess_io_node : node1 +utility_node: node2 +backup: false +restore: false +rotate: false diff --git a/roles/tpm_key_manage/meta/main.yml b/roles/tpm_key_manage/meta/main.yml new file mode 100644 index 00000000..55f890b6 --- /dev/null +++ b/roles/tpm_key_manage/meta/main.yml @@ -0,0 +1,19 @@ +--- +galaxy_info: + author: IBM Corporation + description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) + company: IBM + + license: Apache-2.0 + + min_ansible_version: 2.9 + + platforms: + - name: EL + versions: + - 7 + - 8 + + galaxy_tags: [] + +dependencies: \ No newline at end of file diff --git a/roles/tpm_key_manage/tasks/main.yml b/roles/tpm_key_manage/tasks/main.yml new file mode 100644 index 00000000..71fb117e --- /dev/null +++ b/roles/tpm_key_manage/tasks/main.yml @@ -0,0 +1,12 @@ +--- +- name: Backup TPM key + command: esstpmkey backup --source-node {{ ess_io_node }} --destination-node {{ utility_node }} --tpm-slot-id {{ tpm_slot_id }} + when: backup + +- name: Rotate TPM key + command: mmvdisk tpm genkey --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} + when: rotate + +- name: Restore TPM key from backup + command: esstpmkey restore --source-node {{ utility_node }} --destination-node {{ ess_io_node }} --tpm-slot-id {{ tpm_slot_id }} + when: restore diff --git a/roles/tpm_key_manage/vars/main.yml b/roles/tpm_key_manage/vars/main.yml new file mode 100644 index 00000000..e69de29b diff --git a/roles/tpm_nv_slots_create/defaults/main.yml b/roles/tpm_nv_slots_create/defaults/main.yml new file mode 100644 index 00000000..0349b08d --- /dev/null +++ b/roles/tpm_nv_slots_create/defaults/main.yml @@ -0,0 +1,3 @@ +nv_slot_count: 2 +nv_slot_id: "0x01500021" +tpm_password_file: "/path/to/password.txt" diff --git a/roles/tpm_nv_slots_create/meta/main.yml b/roles/tpm_nv_slots_create/meta/main.yml new file mode 100644 index 00000000..55f890b6 --- /dev/null +++ b/roles/tpm_nv_slots_create/meta/main.yml @@ -0,0 +1,19 @@ +--- +galaxy_info: + author: IBM Corporation + description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) + company: IBM + + license: Apache-2.0 + + min_ansible_version: 2.9 + + platforms: + - name: EL + versions: + - 7 + - 8 + + galaxy_tags: [] + +dependencies: \ No newline at end of file diff --git a/roles/tpm_nv_slots_create/tasks/main.yml b/roles/tpm_nv_slots_create/tasks/main.yml new file mode 100644 index 00000000..39b17fbf --- /dev/null +++ b/roles/tpm_nv_slots_create/tasks/main.yml @@ -0,0 +1,3 @@ +--- +- name: Create NV slots + command: mmvdisk tpm createSlots --number-of-slots {{ nv_slot_count }} --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} diff --git a/roles/tpm_nv_slots_create/vars/main.yml b/roles/tpm_nv_slots_create/vars/main.yml new file mode 100644 index 00000000..e69de29b diff --git a/roles/tpm_ownership/defaults/main.yml b/roles/tpm_ownership/defaults/main.yml new file mode 100644 index 00000000..f00ad472 --- /dev/null +++ b/roles/tpm_ownership/defaults/main.yml @@ -0,0 +1,2 @@ +tpm_password_file: "/etc/tpm_password.txt" +disable_clear: true diff --git a/roles/tpm_ownership/meta/main.yml b/roles/tpm_ownership/meta/main.yml new file mode 100644 index 00000000..55f890b6 --- /dev/null +++ b/roles/tpm_ownership/meta/main.yml @@ -0,0 +1,19 @@ +--- +galaxy_info: + author: IBM Corporation + description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) + company: IBM + + license: Apache-2.0 + + min_ansible_version: 2.9 + + platforms: + - name: EL + versions: + - 7 + - 8 + + galaxy_tags: [] + +dependencies: \ No newline at end of file diff --git a/roles/tpm_ownership/tasks/check.yml b/roles/tpm_ownership/tasks/check.yml new file mode 100644 index 00000000..219cb1b2 --- /dev/null +++ b/roles/tpm_ownership/tasks/check.yml @@ -0,0 +1,24 @@ +--- +# requirements: +# RHEL 9+ +# OpenSSL 3+ + +- name: Check OpenSSL version + command: openssl version + register: openssl_version_output + changed_when: false + +# Stop the execution of playbook if required openSSL version not found. +- fail: + msg: "OpenSSL version 3 or higher is required for SED support with TPM." + when: openssl_version_output.stdout is search('OpenSSL\\s([0-2]\\.[0-9]+)') + +- name: Check OS version + command: cat /etc/redhat-release + register: os_version_output + changed_when: false + +# Stop the execution of playbook if required openSSL version not found. +- fail: + msg: "RHEL 9 or higher is required for SED support with TPM." + when: os_version_output.stdout is search('release\\s([0-8])') \ No newline at end of file diff --git a/roles/tpm_ownership/tasks/main.yml b/roles/tpm_ownership/tasks/main.yml new file mode 100644 index 00000000..bca9ede2 --- /dev/null +++ b/roles/tpm_ownership/tasks/main.yml @@ -0,0 +1,17 @@ +--- +- import_tasks: check.yml + tags: check prerequisites + +# Do we need other ways to check tpm presence? +- name: Check TPM presence + command: ls /dev/tpm* + +- name: Disable TPM clear if required + command: tpm2_clearcontrol disable + when: disable_clear + + +# enabling tpm from bios is a manual process to be performed, before taking the ownership + +- name: Take TPM ownership + command: mmvdisk tpm setup --password-file {{ tpm_password_file }} From 81639e5934a235ecf3fe5eef0322c7d55907e207 Mon Sep 17 00:00:00 2001 From: Dhananjay Sonawane Date: Mon, 22 Sep 2025 11:42:09 +0530 Subject: [PATCH 09/10] Added sed tpm support Signed-off-by: Dhananjay Sonawane --- roles/sed_configure/README.md | 35 +++++++ roles/sed_configure/defaults/main.yml | 15 +++ roles/sed_configure/meta/main.yml | 19 ++++ roles/sed_configure/tasks/check_prereq.yml | 53 +++++++++++ roles/sed_configure/tasks/create_nv_slot.yml | 22 +++++ .../sed_configure/tasks/enroll_sed_drive.yml | 24 +++++ .../sed_configure/tasks/generate_tpm_key.yml | 29 ++++++ roles/sed_configure/tasks/main.yml | 21 ++++ roles/sed_configure/tasks/manage_key.yml | 26 +++++ roles/sed_configure/tasks/tpm_ownership.yml | 95 +++++++++++++++++++ samples/playbook_sed_tpm.yml | 71 ++++++++++++++ 11 files changed, 410 insertions(+) create mode 100644 roles/sed_configure/README.md create mode 100644 roles/sed_configure/defaults/main.yml create mode 100644 roles/sed_configure/meta/main.yml create mode 100644 roles/sed_configure/tasks/check_prereq.yml create mode 100644 roles/sed_configure/tasks/create_nv_slot.yml create mode 100644 roles/sed_configure/tasks/enroll_sed_drive.yml create mode 100644 roles/sed_configure/tasks/generate_tpm_key.yml create mode 100644 roles/sed_configure/tasks/main.yml create mode 100644 roles/sed_configure/tasks/manage_key.yml create mode 100644 roles/sed_configure/tasks/tpm_ownership.yml create mode 100644 samples/playbook_sed_tpm.yml diff --git a/roles/sed_configure/README.md b/roles/sed_configure/README.md new file mode 100644 index 00000000..0d77564a --- /dev/null +++ b/roles/sed_configure/README.md @@ -0,0 +1,35 @@ +Role Definition +------------------------------- +- Role name: sed +- Definition: + - The self-encrypting drives (SED) support protects data at rest on IBM Storage Scale System drives. + - TPM is a specialized hardware security chip that provides secure cryptographic functions. + - mmvdisk tpm , esstpm and esstpm key provides options to setup the tpm ,generate keys, enroll drives with the generated keys in the IBM Storage Scale cluster. + - These operations are performed on the I/O nodes and the keys generated are also backed up on the utility node. + + +Prerequisite +---------------------------- +- Red Hat Enterprise Linux 9.x is supported. +- OpenSSL version 3+ is supported. +- TPM version 2.0 is required to use this support +- A password file with appropriate permissions (600) must exist for taking TPM ownership. + +Design +--------------------------- +- Directory Structure: + - Path: /ibm-spectrum-scale-install-infra/roles/sed_configure + - Inside the sed role, there are sub-tasks to setup the TPM stepwise + - `check_prereq`: This task checks that all the prerequisites are satisfied before proceeding with the TPM setup. It checks the following things: + - RHEL 9.x is present. + - OpenSSL 3+ version present. + - Check whether TPM is enabled from BIOS. + - Check tpm2-tools rpms. If not installed already, install it. + - `tpm_ownership`: This task sets up the TPM to be used. + - check if tpm ownership already taken, if yes skip the entire process after validating the ownership + - if not taken, we proceed to take the ownership + - if 'change_pasword' flag is set, we skip the setup and jump to the password change + - `create_nv_slots`: This task create NV slots which will be used for key generation. + - `generate_tpm_key`: This task generated a tpm key in the mentioned nv slot. + - `enroll_sed`: This task enrolls an sed using the tpm key + - `manage_key`: This task handles the backup and restore of the tpm key. diff --git a/roles/sed_configure/defaults/main.yml b/roles/sed_configure/defaults/main.yml new file mode 100644 index 00000000..277d298a --- /dev/null +++ b/roles/sed_configure/defaults/main.yml @@ -0,0 +1,15 @@ +tpm_password_file: "/path/to/password.txt" +new_tpm_password_file: "/path/to/newpassword.txt" +disable_clear: true +change_password: false +nv_slot_id: "0x01500000" +nv_slot_count: 2 +recovery_group: "RecoveryGroupName" +enroll_drive: true +rekey_drive: false +generate: true +migrate: true +backup_key: true +restore_key: true + + diff --git a/roles/sed_configure/meta/main.yml b/roles/sed_configure/meta/main.yml new file mode 100644 index 00000000..55f890b6 --- /dev/null +++ b/roles/sed_configure/meta/main.yml @@ -0,0 +1,19 @@ +--- +galaxy_info: + author: IBM Corporation + description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) + company: IBM + + license: Apache-2.0 + + min_ansible_version: 2.9 + + platforms: + - name: EL + versions: + - 7 + - 8 + + galaxy_tags: [] + +dependencies: \ No newline at end of file diff --git a/roles/sed_configure/tasks/check_prereq.yml b/roles/sed_configure/tasks/check_prereq.yml new file mode 100644 index 00000000..31a7e723 --- /dev/null +++ b/roles/sed_configure/tasks/check_prereq.yml @@ -0,0 +1,53 @@ +--- + - block: + # Check the OpenSSL version and fail if the version is < 3 + - name: Check OpenSSL version + command: openssl version + register: openssl_version_output + changed_when: false + failed_when: openssl_version_output.stdout | regex_search('OpenSSL\s([0-2]\\.[0-9]+)') + + - debug: + msg: "{{(openssl_version_output.rc == 0) | ternary(openssl_version_output.stdout.split('\n'), openssl_version_output.stderr.split('\n')) }}" + + # Check the OS version and fail if the version is < RHEL 9 + - name: Check OS version + command: cat /etc/redhat-release + register: os_version_output + changed_when: false + failed_when: os_version_output.stdout | regex_search('release\s([0-8])') + + - debug: + msg: "{{(os_version_output.rc == 0) | ternary(os_version_output.stdout.split('\n'), os_version_output.stderr.split('\n')) }}" + when: inventory_hostname == 'localhost' or inventory_hostname in scale_io_nodes_list + + - block: + - name: Check TPM presence + stat: + path: /dev/tpm0 + register: tpm_device + + - debug: + msg: "TPM device present" + when: tpm_device.stat.exists + + - fail: + msg: "TPM is not enabled in BIOS. Please enable it manually before proceeding." + when: not tpm_device.stat.exists + + - name: Check if tpm2-tools is installed + command: rpm -q tpm2-tools + register: tpm2_tools_check + ignore_errors: true + changed_when: false + + - name: Install tpm2-tools if not present + yum: + name: tpm2-tools + state: present + when: tpm2_tools_check.rc != 0 + when: inventory_hostname in scale_io_nodes_list + + + + diff --git a/roles/sed_configure/tasks/create_nv_slot.yml b/roles/sed_configure/tasks/create_nv_slot.yml new file mode 100644 index 00000000..fc65f864 --- /dev/null +++ b/roles/sed_configure/tasks/create_nv_slot.yml @@ -0,0 +1,22 @@ +--- +- block: + # Creation of NV slots on IO nodes + - name: Create NV slots + command: mmvdisk tpm createSlots --number-of-slots {{ nv_slot_count }} --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} + register: nv_slot_creation_io + failed_when: nv_slot_creation_io.rc != 0 + + - debug: + msg: "{{(nv_slot_creation_io.rc == 0) | ternary(nv_slot_creation_io.stdout.split('\n'), nv_slot_creation_io.stderr.split('\n')) }}" + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + +- block: + # Creation of NV slots on utility nodes + - name: Create NV slots on utility node + command: /opt/ibm/ess/tools/bin/.TPM/./esstpm createslot --nv-slot-id {{nv_slot_id}} --password-file {{ tpm_password_file }} + register: nv_slot_creation_utility + failed_when: nv_slot_creation_utility.rc != 0 + + - debug: + msg: "{{(nv_slot_creation_utility.rc == 0) | ternary(nv_slot_creation_utility.stdout.split('\n'), nv_slot_creation_utility.stderr.split('\n')) }}" + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] diff --git a/roles/sed_configure/tasks/enroll_sed_drive.yml b/roles/sed_configure/tasks/enroll_sed_drive.yml new file mode 100644 index 00000000..c108988e --- /dev/null +++ b/roles/sed_configure/tasks/enroll_sed_drive.yml @@ -0,0 +1,24 @@ +--- +- block: + # Enrolling the SED with the generated TPM key + - name: Enroll drives with TPM key + command: mmvdisk sed enroll --recovery-group {{ recovery_group }} --tpm-slot-id {{ nv_slot_id }} + register: drive_enrollment + + - debug: + msg: "{{(drive_enrollment.rc == 0) | ternary(drive_enrollment.stdout.split('\n'), drive_enrollment.stderr.split('\n')) }}" + failed_when: drive_enrollment.rc != 0 + when: enroll_drive and inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + run_once: true + +- block: + # Rekeying the SED with the a new TPM key + - name: Rekey drives with new TPM key + command: mmvdisk sed rekey --recovery-group {{ recovery_group }} --tpm-slot-id {{ nv_slot_id }} + register: drive_rekey + + - debug: + msg: "{{(drive_rekey.rc == 0) | ternary(drive_rekey.stdout.split('\n'), drive_rekey.stderr.split('\n')) }}" + failed_when: drive_rekey.rc != 0 + when: rekey_drives and inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + run_once: true \ No newline at end of file diff --git a/roles/sed_configure/tasks/generate_tpm_key.yml b/roles/sed_configure/tasks/generate_tpm_key.yml new file mode 100644 index 00000000..6ebc852e --- /dev/null +++ b/roles/sed_configure/tasks/generate_tpm_key.yml @@ -0,0 +1,29 @@ +--- +# Generate only on one node. migrate to others. +# Run both commands only on one single io node. + +- block: + # Generate a TPM key + - name: Generate TPM key + command: mmvdisk tpm genkey --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} + register: tpm_key_generate + + - debug: + msg: "{{(tpm_key_generate.rc == 0) | ternary(tpm_key_generate.stdout.split('\n'), tpm_key_generate.stderr.split('\n')) }}" + failed_when: tpm_key_generate.rc != 0 + when: generate and inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + run_once: true + +- block: + # Migrate the generated TPM key to other io nodes + - name: Migrate TPM key to other nodes + command: mmvdisk tpm migratekey --nv-slot-id {{ nv_slot_id }} -s {{ inventory_hostname }} -N {{ target_nodes | join(',') }} + vars: + target_nodes: "{{ (hostvars[groups['emsvm'][0]]['scale_io_nodes_list'])[1:]}}" + register: tpm_key_migrate + + - debug: + msg: "{{ (tpm_key_migrate.rc == 0) | ternary(tpm_key_migrate.stdout.split('\n'),tpm_key_migrate.stderr.split('\n')) }}" + failed_when: tpm_key_migrate.rc != 0 + when: migrate and inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + run_once: true \ No newline at end of file diff --git a/roles/sed_configure/tasks/main.yml b/roles/sed_configure/tasks/main.yml new file mode 100644 index 00000000..ab76a93d --- /dev/null +++ b/roles/sed_configure/tasks/main.yml @@ -0,0 +1,21 @@ +--- +- import_tasks: check_prereq.yml + tags: check prerequisites + +- import_tasks: tpm_ownership.yml + tags: tpm ownership + +- import_tasks: create_nv_slot.yml + tags: create nv slot + +- import_tasks: generate_tpm_key.yml + tags: generate tpm key + +- import_tasks: enroll_sed_drive.yml + tags: enroll sed drive + +- import_tasks: manage_key.yml + tags: restore and backup key + + + diff --git a/roles/sed_configure/tasks/manage_key.yml b/roles/sed_configure/tasks/manage_key.yml new file mode 100644 index 00000000..853d5509 --- /dev/null +++ b/roles/sed_configure/tasks/manage_key.yml @@ -0,0 +1,26 @@ +--- +- block: + - name: Backup TPM key + command: /opt/ibm/ess/tools/bin/.TPM/./esstpmkey backup --source-node {{ source_node }} --destination-node {{ dest_node }} --tpm-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file}} + vars: + source_node: "{{ hostvars[groups['emsvm'][0]]['scale_io_nodes_list'][0] }}" + dest_node: "{{ hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'][0] }}" + register: backup_key + when: backup + + - debug: + msg: "{{(backup_key.rc == 0) | ternary(backup_key.stdout.split('\n'), backup_key.stderr.split('\n')) }}" + + - name: Restore TPM key from backup + command: /opt/ibm/ess/tools/bin/.TPM/./esstpmkey restore --source-node {{ source_node }} --destination-node {{ dest_node }} --tpm-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file}} + vars: + source_node: "{{hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'][0] }}" + dest_node: "{{ hostvars[groups['emsvm'][0]]['scale_io_nodes_list'][0] }}" + register: restore_key + when: restore + + - debug: + msg: "{{(restore_key.rc == 0) | ternary(restore_key.stdout.split('\n'), restore_key.stderr.split('\n')) }}" + when: inventory_hostname in scale_ems_vm_nodes_list + + diff --git a/roles/sed_configure/tasks/tpm_ownership.yml b/roles/sed_configure/tasks/tpm_ownership.yml new file mode 100644 index 00000000..9736adfc --- /dev/null +++ b/roles/sed_configure/tasks/tpm_ownership.yml @@ -0,0 +1,95 @@ +--- +- name: Check if ownership already taken on IO nodes + shell: | + tpm2_getcap properties-variable | awk -F': *' '/ownerAuthSet:/ { v=tolower($2); print (v ~ /(^1$|set|true)/) ? 1 : 0; exit }' + register: ownership_taken_io + changed_when: false + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + +- name: Check if ownership already taken on utility node + shell: | + tpm2_getcap properties-variable | awk -F': *' '/ownerAuthSet:/ { v=tolower($2); print (v ~ /(^1$|set|true)/) ? 1 : 0; exit }' + register: ownership_taken_utility + changed_when: false + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] + +# skip the setup -> if ownership taken. We mention that ownership is taken, validate it. And continue to nv slot creation. +- block: + - name: Validate ownership password + debug: + msg: Ownership already taken. Skipping the tpm ownership setup. + + - name: Check the password + command: mmvdisk tpm chpasswd --old-password-file {{ tpm_password_file }} --new-password-file {{ tpm_password_file }} + register: pwd_validation_io + + - debug: + msg: "Validated the Ownership successfully" + when: pwd_validation_io.rc == 0 + + - fail: + msg: "Ownership password did not match. Please verify" + when: pw_validation_io.rc != 0 + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] and ownership_taken_io.stdout == '1' and not change_password + +- block: + - name: Validate ownership password + debug: + msg: Ownership already taken. Skipping the tpm ownership setup. + + - name: Check the password + command: /opt/ibm/ess/tools/bin/.TPM/./esstpm chpasswd --old-password-file {{ tpm_password_file }} --new-password-file {{ tpm_password_file }} + register: pwd_validation_utility + + - debug: + msg: "Validated the Ownership successfully" + when: pwd_validation_utility.rc == 0 + + - fail: + msg: "Ownership password did not match. Please verify" + when: pw_validation_utility.rc != 0 + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] and ownership_taken_utility.stdout == '1' and not change_password + +# if tpm ownership not taken already, proceed to take it on io nodes +- block: + - name: Disable TPM clear + command: tpm2_clearcontrol -C l s + register: disable_operation + when: disable_clear + + - name: Take TPM ownership # ensure file permission is 600 + command: mmvdisk tpm setup --password-file {{ tpm_password_file }} + register: take_ownership_io + + - debug: + msg: "{{(take_ownership.rc == 0) | ternary(take_ownership_io.stdout.split('\n'),take_ownership_io.stderr.split('\n')) }}" + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] and ownership_taken_io.stdout == '0' and not change_password + +# if tpm ownership not taken already, proceed to take it on utility node +- block: + - name: Disable TPM clear + command: tpm2_clearcontrol -C l s + register: disable_operation + when: disable_clear + + - name: Take TPM ownership on utility node + command: /opt/ibm/ess/tools/bin/.TPM/./esstpm setup --password-file {{ tpm_password_file }} + register: take_ownership_utility + + - debug: + msg: "{{(take_ownership_utility.rc == 0) | ternary(take_ownership_utility.stdout.split('\n'), take_ownership_utility.stderr.split('\n')) }}" + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] and ownership_taken_utility.stdout == '0' and not change_password + +# execute only when change password is set to true. +- block: + - name: Change TPM Ownership password + command: mmvdisk tpm chpasswd --old-password-file {{ tpm_password_file }} --new-password-file {{ new_tpm_password_file }} + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + + - name: Change TPM Ownership password on utility node + command: /opt/ibm/ess/tools/bin/.TPM/./esstpm chpasswd --old-password-file {{ tpm_password_file }} --new-password-file {{ new_tpm_password_file }} + when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] + when: change_password + + + diff --git a/samples/playbook_sed_tpm.yml b/samples/playbook_sed_tpm.yml new file mode 100644 index 00000000..c6b614a1 --- /dev/null +++ b/samples/playbook_sed_tpm.yml @@ -0,0 +1,71 @@ +--- + +- name: Collect the hosts + hosts: all + become: no + tasks: + - name: Get the node classes + command: mmlsnodeclass + register: node_class_output + changed_when: false + run_once: true + + - name: Extract and create list for x86 hosts + set_fact: + scale_io_nodes_list: "{{ ( (node_class_output.stdout | regex_search(node_class_name, '\\1') | first ) | default('')).split() }}" + vars: + node_class_name: '(?m)^ess3500_x86_64\s+([\s\S]*?)(?=\n\S|\Z)' + register: io_node_collection + run_once: true + + - debug: + var: io_node_collection + run_once: true + + - name: Get utility node + command: ssh -G utilitybaremetal | grep hostname + register: utility_collection_output + changed_when: false + run_once: true + + - name: Extract and create list for utility node + set_fact: + scale_utility_nodes_list: "{{ ( (utility_collection_output.stdout | regex_search(key, '\\1') | first ) | default('')).split() }}" + vars: + key: '(?m)hostname\s+([\s\S]*?)(?=\n\S|\Z)' + register: utility_node_collection + run_once: true + + - debug: + var: utility_node_collection + run_once: true + + - name: Add io nodes to hosts + add_host: + name: "{{ item }}" + groups: ionodes + loop: "{{scale_io_nodes_list}}" + + - name: Add utility nodes to hosts + add_host: + name: "{{ item }}" + groups: utilitynode + loop: "{{scale_utility_nodes_list}}" + + +- hosts: all + become: no + + vars: + tpm_password_file: "/root/tpm_pwd" + disable_clear: true + nv_slot_count: 1 + nv_slot_id: "0x1500001" + generate: true + migrate: true + enroll_drive: true + rekey_drive: false + recovery_group: "rg1" + + roles: + - sed_configure \ No newline at end of file From 3bb87edd12d96c787811b98135ac75b656259eb9 Mon Sep 17 00:00:00 2001 From: Dhananjay Sonawane Date: Fri, 26 Sep 2025 10:22:30 +0530 Subject: [PATCH 10/10] Updated sed tpm vars and play logic Signed-off-by: Dhananjay Sonawane --- .github/ISSUE_TEMPLATE/bug_report.md | 35 ------ .github/ISSUE_TEMPLATE/feature_request.md | 20 ---- roles/sed_configure/README.md | 2 +- roles/sed_configure/defaults/main.yml | 13 ++- roles/sed_configure/meta/main.yml | 5 +- roles/sed_configure/tasks/check_prereq.yml | 103 +++++++++--------- roles/sed_configure/tasks/create_nv_slot.yml | 12 +- .../sed_configure/tasks/enroll_sed_drive.yml | 12 +- .../sed_configure/tasks/generate_tpm_key.yml | 25 ++--- roles/sed_configure/tasks/main.yml | 21 ++-- roles/sed_configure/tasks/manage_key.yml | 17 ++- roles/sed_configure/tasks/tpm_ownership.yml | 60 +++++----- roles/tpm_drives_enrol/defaults/main.yml | 7 -- roles/tpm_drives_enrol/meta/main.yml | 19 ---- roles/tpm_drives_enrol/tasks/main.yml | 8 -- roles/tpm_drives_enrol/vars/main.yml | 0 roles/tpm_key_generate/defaults/main.yml | 8 -- roles/tpm_key_generate/meta/main.yml | 0 roles/tpm_key_generate/tasks/main.yml | 8 -- roles/tpm_key_generate/vars/main.yml | 0 roles/tpm_key_manage/defaults/main.yml | 7 -- roles/tpm_key_manage/meta/main.yml | 19 ---- roles/tpm_key_manage/tasks/main.yml | 12 -- roles/tpm_key_manage/vars/main.yml | 0 roles/tpm_nv_slots_create/defaults/main.yml | 3 - roles/tpm_nv_slots_create/meta/main.yml | 19 ---- roles/tpm_nv_slots_create/tasks/main.yml | 3 - roles/tpm_nv_slots_create/vars/main.yml | 0 roles/tpm_ownership/defaults/main.yml | 2 - roles/tpm_ownership/meta/main.yml | 19 ---- roles/tpm_ownership/tasks/check.yml | 24 ---- roles/tpm_ownership/tasks/main.yml | 17 --- samples/playbook_sed_tpm.yml | 68 +----------- samples/vars/sed_tpm_vars.yml | 19 ++++ 34 files changed, 160 insertions(+), 427 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 roles/tpm_drives_enrol/defaults/main.yml delete mode 100644 roles/tpm_drives_enrol/meta/main.yml delete mode 100644 roles/tpm_drives_enrol/tasks/main.yml delete mode 100644 roles/tpm_drives_enrol/vars/main.yml delete mode 100644 roles/tpm_key_generate/defaults/main.yml delete mode 100644 roles/tpm_key_generate/meta/main.yml delete mode 100644 roles/tpm_key_generate/tasks/main.yml delete mode 100644 roles/tpm_key_generate/vars/main.yml delete mode 100644 roles/tpm_key_manage/defaults/main.yml delete mode 100644 roles/tpm_key_manage/meta/main.yml delete mode 100644 roles/tpm_key_manage/tasks/main.yml delete mode 100644 roles/tpm_key_manage/vars/main.yml delete mode 100644 roles/tpm_nv_slots_create/defaults/main.yml delete mode 100644 roles/tpm_nv_slots_create/meta/main.yml delete mode 100644 roles/tpm_nv_slots_create/tasks/main.yml delete mode 100644 roles/tpm_nv_slots_create/vars/main.yml delete mode 100644 roles/tpm_ownership/defaults/main.yml delete mode 100644 roles/tpm_ownership/meta/main.yml delete mode 100644 roles/tpm_ownership/tasks/check.yml delete mode 100644 roles/tpm_ownership/tasks/main.yml create mode 100644 samples/vars/sed_tpm_vars.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 58586726..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: 'Type: Bug' -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. ... - - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Environment** -Please run the following an paste your output here: -``` bash -# Developement -ansible --version -python --version - -# Deployment -rpm -qa | grep gpfs -``` -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index a6caefe0..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: 'Type: Enhancement' -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/roles/sed_configure/README.md b/roles/sed_configure/README.md index 0d77564a..5260478c 100644 --- a/roles/sed_configure/README.md +++ b/roles/sed_configure/README.md @@ -4,7 +4,7 @@ Role Definition - Definition: - The self-encrypting drives (SED) support protects data at rest on IBM Storage Scale System drives. - TPM is a specialized hardware security chip that provides secure cryptographic functions. - - mmvdisk tpm , esstpm and esstpm key provides options to setup the tpm ,generate keys, enroll drives with the generated keys in the IBM Storage Scale cluster. + - mmvdisk tpm , esstpm and esstpmkey provides options to setup the tpm ,generate keys, enroll drives with the generated keys in the IBM Storage Scale cluster. - These operations are performed on the I/O nodes and the keys generated are also backed up on the utility node. diff --git a/roles/sed_configure/defaults/main.yml b/roles/sed_configure/defaults/main.yml index 277d298a..84144605 100644 --- a/roles/sed_configure/defaults/main.yml +++ b/roles/sed_configure/defaults/main.yml @@ -3,13 +3,18 @@ new_tpm_password_file: "/path/to/newpassword.txt" disable_clear: true change_password: false nv_slot_id: "0x01500000" -nv_slot_count: 2 +nv_slot_count: 4 recovery_group: "RecoveryGroupName" enroll_drive: true rekey_drive: false generate: true migrate: true backup_key: true -restore_key: true - - +restore_key: false +io_nodes: + - ionode1 + - ionode2 +utility_nodes: + - utilitynode +emsvm: + - emsvmnode \ No newline at end of file diff --git a/roles/sed_configure/meta/main.yml b/roles/sed_configure/meta/main.yml index 55f890b6..e48de9b9 100644 --- a/roles/sed_configure/meta/main.yml +++ b/roles/sed_configure/meta/main.yml @@ -11,9 +11,8 @@ galaxy_info: platforms: - name: EL versions: - - 7 - - 8 + - 9 galaxy_tags: [] -dependencies: \ No newline at end of file +dependencies: [] \ No newline at end of file diff --git a/roles/sed_configure/tasks/check_prereq.yml b/roles/sed_configure/tasks/check_prereq.yml index 31a7e723..89ed00d8 100644 --- a/roles/sed_configure/tasks/check_prereq.yml +++ b/roles/sed_configure/tasks/check_prereq.yml @@ -1,53 +1,50 @@ ---- - - block: - # Check the OpenSSL version and fail if the version is < 3 - - name: Check OpenSSL version - command: openssl version - register: openssl_version_output - changed_when: false - failed_when: openssl_version_output.stdout | regex_search('OpenSSL\s([0-2]\\.[0-9]+)') - - - debug: - msg: "{{(openssl_version_output.rc == 0) | ternary(openssl_version_output.stdout.split('\n'), openssl_version_output.stderr.split('\n')) }}" - - # Check the OS version and fail if the version is < RHEL 9 - - name: Check OS version - command: cat /etc/redhat-release - register: os_version_output - changed_when: false - failed_when: os_version_output.stdout | regex_search('release\s([0-8])') - - - debug: - msg: "{{(os_version_output.rc == 0) | ternary(os_version_output.stdout.split('\n'), os_version_output.stderr.split('\n')) }}" - when: inventory_hostname == 'localhost' or inventory_hostname in scale_io_nodes_list - - - block: - - name: Check TPM presence - stat: - path: /dev/tpm0 - register: tpm_device - - - debug: - msg: "TPM device present" - when: tpm_device.stat.exists - - - fail: - msg: "TPM is not enabled in BIOS. Please enable it manually before proceeding." - when: not tpm_device.stat.exists - - - name: Check if tpm2-tools is installed - command: rpm -q tpm2-tools - register: tpm2_tools_check - ignore_errors: true - changed_when: false - - - name: Install tpm2-tools if not present - yum: - name: tpm2-tools - state: present - when: tpm2_tools_check.rc != 0 - when: inventory_hostname in scale_io_nodes_list - - - - +--- +- block: + # Check the OpenSSL version and fail if the version is < 3 + - name: Check OpenSSL version + command: openssl version + register: openssl_version_output + changed_when: false + failed_when: openssl_version_output.stdout | regex_search('OpenSSL\s([0-2]\\.[0-9]+)') + + - debug: + msg: "{{(openssl_version_output.rc == 0) | ternary(openssl_version_output.stdout.split('\n'), openssl_version_output.stderr.split('\n')) }}" + + # Check the OS version and fail if the version is < RHEL 9 + - name: Check OS version + command: cat /etc/redhat-release + register: os_version_output + changed_when: false + failed_when: os_version_output.stdout | regex_search('release\s([0-8])') + + - debug: + msg: "{{(os_version_output.rc == 0) | ternary(os_version_output.stdout.split('\n'), os_version_output.stderr.split('\n')) }}" + delegate_to: "{{ item }}" + +- block: + - name: Check TPM presence + stat: + path: /dev/tpm0 + register: tpm_device + + - debug: + msg: "TPM device present" + when: tpm_device.stat.exists + + - fail: + msg: "TPM is not enabled in BIOS. Please enable it manually before proceeding." + when: not tpm_device.stat.exists + + - name: Check if tpm2-tools is installed + command: rpm -q tpm2-tools + register: tpm2_tools_check + ignore_errors: true + changed_when: false + + - name: Install tpm2-tools if not present + yum: + name: tpm2-tools + state: present + when: tpm2_tools_check.rc != 0 + delegate_to: "{{ item }}" + when: item in io_nodes \ No newline at end of file diff --git a/roles/sed_configure/tasks/create_nv_slot.yml b/roles/sed_configure/tasks/create_nv_slot.yml index fc65f864..2c3b4f51 100644 --- a/roles/sed_configure/tasks/create_nv_slot.yml +++ b/roles/sed_configure/tasks/create_nv_slot.yml @@ -1,5 +1,5 @@ --- -- block: +- block: # Creation of NV slots on IO nodes - name: Create NV slots command: mmvdisk tpm createSlots --number-of-slots {{ nv_slot_count }} --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} @@ -8,9 +8,10 @@ - debug: msg: "{{(nv_slot_creation_io.rc == 0) | ternary(nv_slot_creation_io.stdout.split('\n'), nv_slot_creation_io.stderr.split('\n')) }}" - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] - -- block: + delegate_to: "{{ item }}" + when: item in io_nodes + +- block: # Creation of NV slots on utility nodes - name: Create NV slots on utility node command: /opt/ibm/ess/tools/bin/.TPM/./esstpm createslot --nv-slot-id {{nv_slot_id}} --password-file {{ tpm_password_file }} @@ -19,4 +20,5 @@ - debug: msg: "{{(nv_slot_creation_utility.rc == 0) | ternary(nv_slot_creation_utility.stdout.split('\n'), nv_slot_creation_utility.stderr.split('\n')) }}" - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] + delegate_to: "{{ item }}" + when: item in utility_nodes \ No newline at end of file diff --git a/roles/sed_configure/tasks/enroll_sed_drive.yml b/roles/sed_configure/tasks/enroll_sed_drive.yml index c108988e..3da3208e 100644 --- a/roles/sed_configure/tasks/enroll_sed_drive.yml +++ b/roles/sed_configure/tasks/enroll_sed_drive.yml @@ -2,23 +2,25 @@ - block: # Enrolling the SED with the generated TPM key - name: Enroll drives with TPM key - command: mmvdisk sed enroll --recovery-group {{ recovery_group }} --tpm-slot-id {{ nv_slot_id }} + command: mmvdisk sed enroll --recovery-group {{ recovery_group }} --tpm-slot-id {{ nv_slot_id }} --confirm register: drive_enrollment - debug: msg: "{{(drive_enrollment.rc == 0) | ternary(drive_enrollment.stdout.split('\n'), drive_enrollment.stderr.split('\n')) }}" failed_when: drive_enrollment.rc != 0 - when: enroll_drive and inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + delegate_to: "{{ item }}" run_once: true + when: enroll_drive - block: # Rekeying the SED with the a new TPM key - name: Rekey drives with new TPM key - command: mmvdisk sed rekey --recovery-group {{ recovery_group }} --tpm-slot-id {{ nv_slot_id }} + command: mmvdisk sed rekey --recovery-group {{ recovery_group }} --tpm-slot-id {{ nv_slot_id }} --confirm register: drive_rekey - debug: msg: "{{(drive_rekey.rc == 0) | ternary(drive_rekey.stdout.split('\n'), drive_rekey.stderr.split('\n')) }}" failed_when: drive_rekey.rc != 0 - when: rekey_drives and inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] - run_once: true \ No newline at end of file + delegate_to: "{{ item }}" + run_once: true + when: rekey_drive \ No newline at end of file diff --git a/roles/sed_configure/tasks/generate_tpm_key.yml b/roles/sed_configure/tasks/generate_tpm_key.yml index 6ebc852e..cfb30c8f 100644 --- a/roles/sed_configure/tasks/generate_tpm_key.yml +++ b/roles/sed_configure/tasks/generate_tpm_key.yml @@ -1,29 +1,28 @@ --- -# Generate only on one node. migrate to others. -# Run both commands only on one single io node. - - block: - # Generate a TPM key + # Generate a TPM key - name: Generate TPM key command: mmvdisk tpm genkey --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} register: tpm_key_generate - + - debug: msg: "{{(tpm_key_generate.rc == 0) | ternary(tpm_key_generate.stdout.split('\n'), tpm_key_generate.stderr.split('\n')) }}" - failed_when: tpm_key_generate.rc != 0 - when: generate and inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + failed_when: tpm_key_generate.rc != 0 + delegate_to: "{{ item }}" + when: generate run_once: true - -- block: + +- block: # Migrate the generated TPM key to other io nodes - name: Migrate TPM key to other nodes - command: mmvdisk tpm migratekey --nv-slot-id {{ nv_slot_id }} -s {{ inventory_hostname }} -N {{ target_nodes | join(',') }} - vars: - target_nodes: "{{ (hostvars[groups['emsvm'][0]]['scale_io_nodes_list'])[1:]}}" + command: mmvdisk tpm migratekey --nv-slot-id {{ nv_slot_id }} -s {{ io_nodes.0 }} -N {{ target_nodes | join(',') }} + vars: + target_nodes: "{{ io_nodes[1:] }}" register: tpm_key_migrate - debug: msg: "{{ (tpm_key_migrate.rc == 0) | ternary(tpm_key_migrate.stdout.split('\n'),tpm_key_migrate.stderr.split('\n')) }}" failed_when: tpm_key_migrate.rc != 0 - when: migrate and inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + delegate_to: "{{ io_nodes.0 }}" + when: migrate run_once: true \ No newline at end of file diff --git a/roles/sed_configure/tasks/main.yml b/roles/sed_configure/tasks/main.yml index ab76a93d..682cb55e 100644 --- a/roles/sed_configure/tasks/main.yml +++ b/roles/sed_configure/tasks/main.yml @@ -1,21 +1,22 @@ --- -- import_tasks: check_prereq.yml +- include_tasks: check_prereq.yml tags: check prerequisites + loop: "{{ io_nodes + utility_nodes }}" -- import_tasks: tpm_ownership.yml +- include_tasks: tpm_ownership.yml tags: tpm ownership + loop: "{{ io_nodes + utility_nodes }}" -- import_tasks: create_nv_slot.yml +- include_tasks: create_nv_slot.yml tags: create nv slot + loop: "{{ io_nodes + utility_nodes }}" -- import_tasks: generate_tpm_key.yml - tags: generate tpm key +- include_tasks: generate_tpm_key.yml + tags: generate tpm key -- import_tasks: enroll_sed_drive.yml +- include_tasks: enroll_sed_drive.yml tags: enroll sed drive -- import_tasks: manage_key.yml +- include_tasks: manage_key.yml tags: restore and backup key - - - + loop: "{{ emsvm }}" \ No newline at end of file diff --git a/roles/sed_configure/tasks/manage_key.yml b/roles/sed_configure/tasks/manage_key.yml index 853d5509..af2e1478 100644 --- a/roles/sed_configure/tasks/manage_key.yml +++ b/roles/sed_configure/tasks/manage_key.yml @@ -1,10 +1,10 @@ --- - block: - name: Backup TPM key - command: /opt/ibm/ess/tools/bin/.TPM/./esstpmkey backup --source-node {{ source_node }} --destination-node {{ dest_node }} --tpm-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file}} + command: /opt/ibm/ess/tools/bin/.TPM/./esstpmkey backup --source-node {{ source_node }} --destination-node {{ dest_node }} --tpm-slot-id {{ nv_slot_id }} --destination-node-password-file {{ tpm_password_file}} vars: - source_node: "{{ hostvars[groups['emsvm'][0]]['scale_io_nodes_list'][0] }}" - dest_node: "{{ hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'][0] }}" + source_node: "{{ io_nodes[0]}}" + dest_node: "{{ utility_nodes[0] }}" register: backup_key when: backup @@ -12,15 +12,14 @@ msg: "{{(backup_key.rc == 0) | ternary(backup_key.stdout.split('\n'), backup_key.stderr.split('\n')) }}" - name: Restore TPM key from backup - command: /opt/ibm/ess/tools/bin/.TPM/./esstpmkey restore --source-node {{ source_node }} --destination-node {{ dest_node }} --tpm-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file}} + command: /opt/ibm/ess/tools/bin/.TPM/./esstpmkey restore --source-node {{ source_node }} --destination-node {{ dest_node }} --tpm-slot-id {{ nv_slot_id }} --source-node-password-file {{ tpm_password_file}} vars: - source_node: "{{hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'][0] }}" - dest_node: "{{ hostvars[groups['emsvm'][0]]['scale_io_nodes_list'][0] }}" + source_node: "{{ utility_nodes[0] }}" + dest_node: "{{ io_nodes[0] }}" register: restore_key when: restore - debug: msg: "{{(restore_key.rc == 0) | ternary(restore_key.stdout.split('\n'), restore_key.stderr.split('\n')) }}" - when: inventory_hostname in scale_ems_vm_nodes_list - - + delegate_to: "{{ item }}" + run_once: true \ No newline at end of file diff --git a/roles/sed_configure/tasks/tpm_ownership.yml b/roles/sed_configure/tasks/tpm_ownership.yml index 9736adfc..8a6edc01 100644 --- a/roles/sed_configure/tasks/tpm_ownership.yml +++ b/roles/sed_configure/tasks/tpm_ownership.yml @@ -1,25 +1,27 @@ --- - name: Check if ownership already taken on IO nodes - shell: | + shell: | tpm2_getcap properties-variable | awk -F': *' '/ownerAuthSet:/ { v=tolower($2); print (v ~ /(^1$|set|true)/) ? 1 : 0; exit }' register: ownership_taken_io changed_when: false - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + delegate_to: "{{ item }}" + when: item in io_nodes - name: Check if ownership already taken on utility node - shell: | + shell: | tpm2_getcap properties-variable | awk -F': *' '/ownerAuthSet:/ { v=tolower($2); print (v ~ /(^1$|set|true)/) ? 1 : 0; exit }' register: ownership_taken_utility changed_when: false - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] + delegate_to: "{{ item }}" + when: item in utility_nodes -# skip the setup -> if ownership taken. We mention that ownership is taken, validate it. And continue to nv slot creation. +# skip the setup -> if ownership taken. We mention that ownership is taken, validate it. And continue to nv slot creation. - block: - name: Validate ownership password debug: - msg: Ownership already taken. Skipping the tpm ownership setup. + msg: Ownership already taken. Skipping the tpm ownership setup. - - name: Check the password + - name: Check the password command: mmvdisk tpm chpasswd --old-password-file {{ tpm_password_file }} --new-password-file {{ tpm_password_file }} register: pwd_validation_io @@ -29,15 +31,16 @@ - fail: msg: "Ownership password did not match. Please verify" - when: pw_validation_io.rc != 0 - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] and ownership_taken_io.stdout == '1' and not change_password + when: pwd_validation_io.rc != 0 + delegate_to: "{{ item }}" + when: item in io_nodes and ownership_taken_io.stdout == '1' and not change_password - block: - name: Validate ownership password debug: - msg: Ownership already taken. Skipping the tpm ownership setup. + msg: Ownership already taken. Skipping the tpm ownership setup. - - name: Check the password + - name: Check the password command: /opt/ibm/ess/tools/bin/.TPM/./esstpm chpasswd --old-password-file {{ tpm_password_file }} --new-password-file {{ tpm_password_file }} register: pwd_validation_utility @@ -47,26 +50,28 @@ - fail: msg: "Ownership password did not match. Please verify" - when: pw_validation_utility.rc != 0 - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] and ownership_taken_utility.stdout == '1' and not change_password + when: pwd_validation_utility.rc != 0 + delegate_to: "{{ item }}" + when: item in utility_nodes and ownership_taken_utility.stdout == '1' and not change_password # if tpm ownership not taken already, proceed to take it on io nodes -- block: +- block: - name: Disable TPM clear command: tpm2_clearcontrol -C l s register: disable_operation when: disable_clear - - - name: Take TPM ownership # ensure file permission is 600 + + - name: Take TPM ownership # ensure file permission is 600 command: mmvdisk tpm setup --password-file {{ tpm_password_file }} register: take_ownership_io - debug: - msg: "{{(take_ownership.rc == 0) | ternary(take_ownership_io.stdout.split('\n'),take_ownership_io.stderr.split('\n')) }}" - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] and ownership_taken_io.stdout == '0' and not change_password + msg: "{{(take_ownership_io.rc == 0) | ternary(take_ownership_io.stdout.split('\n'),take_ownership_io.stderr.split('\n')) }}" + delegate_to: "{{ item }}" + when: item in io_nodes and ownership_taken_io.stdout == '0' and not change_password # if tpm ownership not taken already, proceed to take it on utility node -- block: +- block: - name: Disable TPM clear command: tpm2_clearcontrol -C l s register: disable_operation @@ -78,18 +83,17 @@ - debug: msg: "{{(take_ownership_utility.rc == 0) | ternary(take_ownership_utility.stdout.split('\n'), take_ownership_utility.stderr.split('\n')) }}" - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] and ownership_taken_utility.stdout == '0' and not change_password + delegate_to: "{{ item }}" + when: item in utility_nodes and ownership_taken_utility.stdout == '0' and not change_password -# execute only when change password is set to true. +# execute only when change password is set to true. - block: - name: Change TPM Ownership password command: mmvdisk tpm chpasswd --old-password-file {{ tpm_password_file }} --new-password-file {{ new_tpm_password_file }} - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_io_nodes_list'] + when: inventory_hostname in io_nodes - - name: Change TPM Ownership password on utility node + - name: Change TPM Ownership password on utility node command: /opt/ibm/ess/tools/bin/.TPM/./esstpm chpasswd --old-password-file {{ tpm_password_file }} --new-password-file {{ new_tpm_password_file }} - when: inventory_hostname in hostvars[groups['emsvm'][0]]['scale_utility_nodes_list'] - when: change_password - - - + when: inventory_hostname in utility_nodes + delegate_to: "{{ item }}" + when: change_password \ No newline at end of file diff --git a/roles/tpm_drives_enrol/defaults/main.yml b/roles/tpm_drives_enrol/defaults/main.yml deleted file mode 100644 index b410b4a4..00000000 --- a/roles/tpm_drives_enrol/defaults/main.yml +++ /dev/null @@ -1,7 +0,0 @@ -nv_slot_id: "0x01500021" -tpm_password_file: "/path/to/password.txt" -recovery_group: RecoveryGroupName -rkmid: RKMId -key_uuid: KeyId -rekey_drives: false - diff --git a/roles/tpm_drives_enrol/meta/main.yml b/roles/tpm_drives_enrol/meta/main.yml deleted file mode 100644 index 55f890b6..00000000 --- a/roles/tpm_drives_enrol/meta/main.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -galaxy_info: - author: IBM Corporation - description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) - company: IBM - - license: Apache-2.0 - - min_ansible_version: 2.9 - - platforms: - - name: EL - versions: - - 7 - - 8 - - galaxy_tags: [] - -dependencies: \ No newline at end of file diff --git a/roles/tpm_drives_enrol/tasks/main.yml b/roles/tpm_drives_enrol/tasks/main.yml deleted file mode 100644 index 2cf56a04..00000000 --- a/roles/tpm_drives_enrol/tasks/main.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -- name: Enroll drives with TPM key - command: mmvdisk sed enroll --recovery-group {{ recovery_group }} --rkmid {{ RKMId }} --key-uuid {{ KeyId }} --tpm-slot-id {{ nv_slot_id }} - when: !rekey_drives - -- name: Rekey drives with new TPM key - command: mmvdisk sed rekey --recovery-group {{ recovery_group }} --rkmid {{ RKMId }} --key-uuid {{ KeyId }} --tpm-slot-id {{ nv_slot_id }} - when: rekey_drives \ No newline at end of file diff --git a/roles/tpm_drives_enrol/vars/main.yml b/roles/tpm_drives_enrol/vars/main.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/roles/tpm_key_generate/defaults/main.yml b/roles/tpm_key_generate/defaults/main.yml deleted file mode 100644 index 3921ad9c..00000000 --- a/roles/tpm_key_generate/defaults/main.yml +++ /dev/null @@ -1,8 +0,0 @@ -nv_slot_id: "0x01500021" -tpm_password_file: "/path/to/password.txt" -source_node: node1 -target_nodes: - - node2 - - node3 -generate: true -migrate: false diff --git a/roles/tpm_key_generate/meta/main.yml b/roles/tpm_key_generate/meta/main.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/roles/tpm_key_generate/tasks/main.yml b/roles/tpm_key_generate/tasks/main.yml deleted file mode 100644 index 5115d47d..00000000 --- a/roles/tpm_key_generate/tasks/main.yml +++ /dev/null @@ -1,8 +0,0 @@ ---- -- name: Generate TPM key - command: mmvdisk tpm genkey --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} - when: generate - -- name: Migrate TPM key to other nodes - command: mmvdisk tpm migratekey --nv-slot-id {{ nv_slot_id }} -s {{ source_node }} -N {{ target_nodes }} - when: migrate diff --git a/roles/tpm_key_generate/vars/main.yml b/roles/tpm_key_generate/vars/main.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/roles/tpm_key_manage/defaults/main.yml b/roles/tpm_key_manage/defaults/main.yml deleted file mode 100644 index 0e9ffa70..00000000 --- a/roles/tpm_key_manage/defaults/main.yml +++ /dev/null @@ -1,7 +0,0 @@ -tpm_slot_id: "0x01500021" -tpm_password_file: "/path/to/password.txt" -ess_io_node : node1 -utility_node: node2 -backup: false -restore: false -rotate: false diff --git a/roles/tpm_key_manage/meta/main.yml b/roles/tpm_key_manage/meta/main.yml deleted file mode 100644 index 55f890b6..00000000 --- a/roles/tpm_key_manage/meta/main.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -galaxy_info: - author: IBM Corporation - description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) - company: IBM - - license: Apache-2.0 - - min_ansible_version: 2.9 - - platforms: - - name: EL - versions: - - 7 - - 8 - - galaxy_tags: [] - -dependencies: \ No newline at end of file diff --git a/roles/tpm_key_manage/tasks/main.yml b/roles/tpm_key_manage/tasks/main.yml deleted file mode 100644 index 71fb117e..00000000 --- a/roles/tpm_key_manage/tasks/main.yml +++ /dev/null @@ -1,12 +0,0 @@ ---- -- name: Backup TPM key - command: esstpmkey backup --source-node {{ ess_io_node }} --destination-node {{ utility_node }} --tpm-slot-id {{ tpm_slot_id }} - when: backup - -- name: Rotate TPM key - command: mmvdisk tpm genkey --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} - when: rotate - -- name: Restore TPM key from backup - command: esstpmkey restore --source-node {{ utility_node }} --destination-node {{ ess_io_node }} --tpm-slot-id {{ tpm_slot_id }} - when: restore diff --git a/roles/tpm_key_manage/vars/main.yml b/roles/tpm_key_manage/vars/main.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/roles/tpm_nv_slots_create/defaults/main.yml b/roles/tpm_nv_slots_create/defaults/main.yml deleted file mode 100644 index 0349b08d..00000000 --- a/roles/tpm_nv_slots_create/defaults/main.yml +++ /dev/null @@ -1,3 +0,0 @@ -nv_slot_count: 2 -nv_slot_id: "0x01500021" -tpm_password_file: "/path/to/password.txt" diff --git a/roles/tpm_nv_slots_create/meta/main.yml b/roles/tpm_nv_slots_create/meta/main.yml deleted file mode 100644 index 55f890b6..00000000 --- a/roles/tpm_nv_slots_create/meta/main.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -galaxy_info: - author: IBM Corporation - description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) - company: IBM - - license: Apache-2.0 - - min_ansible_version: 2.9 - - platforms: - - name: EL - versions: - - 7 - - 8 - - galaxy_tags: [] - -dependencies: \ No newline at end of file diff --git a/roles/tpm_nv_slots_create/tasks/main.yml b/roles/tpm_nv_slots_create/tasks/main.yml deleted file mode 100644 index 39b17fbf..00000000 --- a/roles/tpm_nv_slots_create/tasks/main.yml +++ /dev/null @@ -1,3 +0,0 @@ ---- -- name: Create NV slots - command: mmvdisk tpm createSlots --number-of-slots {{ nv_slot_count }} --nv-slot-id {{ nv_slot_id }} --password-file {{ tpm_password_file }} diff --git a/roles/tpm_nv_slots_create/vars/main.yml b/roles/tpm_nv_slots_create/vars/main.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/roles/tpm_ownership/defaults/main.yml b/roles/tpm_ownership/defaults/main.yml deleted file mode 100644 index f00ad472..00000000 --- a/roles/tpm_ownership/defaults/main.yml +++ /dev/null @@ -1,2 +0,0 @@ -tpm_password_file: "/etc/tpm_password.txt" -disable_clear: true diff --git a/roles/tpm_ownership/meta/main.yml b/roles/tpm_ownership/meta/main.yml deleted file mode 100644 index 55f890b6..00000000 --- a/roles/tpm_ownership/meta/main.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -galaxy_info: - author: IBM Corporation - description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) - company: IBM - - license: Apache-2.0 - - min_ansible_version: 2.9 - - platforms: - - name: EL - versions: - - 7 - - 8 - - galaxy_tags: [] - -dependencies: \ No newline at end of file diff --git a/roles/tpm_ownership/tasks/check.yml b/roles/tpm_ownership/tasks/check.yml deleted file mode 100644 index 219cb1b2..00000000 --- a/roles/tpm_ownership/tasks/check.yml +++ /dev/null @@ -1,24 +0,0 @@ ---- -# requirements: -# RHEL 9+ -# OpenSSL 3+ - -- name: Check OpenSSL version - command: openssl version - register: openssl_version_output - changed_when: false - -# Stop the execution of playbook if required openSSL version not found. -- fail: - msg: "OpenSSL version 3 or higher is required for SED support with TPM." - when: openssl_version_output.stdout is search('OpenSSL\\s([0-2]\\.[0-9]+)') - -- name: Check OS version - command: cat /etc/redhat-release - register: os_version_output - changed_when: false - -# Stop the execution of playbook if required openSSL version not found. -- fail: - msg: "RHEL 9 or higher is required for SED support with TPM." - when: os_version_output.stdout is search('release\\s([0-8])') \ No newline at end of file diff --git a/roles/tpm_ownership/tasks/main.yml b/roles/tpm_ownership/tasks/main.yml deleted file mode 100644 index bca9ede2..00000000 --- a/roles/tpm_ownership/tasks/main.yml +++ /dev/null @@ -1,17 +0,0 @@ ---- -- import_tasks: check.yml - tags: check prerequisites - -# Do we need other ways to check tpm presence? -- name: Check TPM presence - command: ls /dev/tpm* - -- name: Disable TPM clear if required - command: tpm2_clearcontrol disable - when: disable_clear - - -# enabling tpm from bios is a manual process to be performed, before taking the ownership - -- name: Take TPM ownership - command: mmvdisk tpm setup --password-file {{ tpm_password_file }} diff --git a/samples/playbook_sed_tpm.yml b/samples/playbook_sed_tpm.yml index c6b614a1..0c43013c 100644 --- a/samples/playbook_sed_tpm.yml +++ b/samples/playbook_sed_tpm.yml @@ -1,71 +1,7 @@ --- - -- name: Collect the hosts - hosts: all - become: no - tasks: - - name: Get the node classes - command: mmlsnodeclass - register: node_class_output - changed_when: false - run_once: true - - - name: Extract and create list for x86 hosts - set_fact: - scale_io_nodes_list: "{{ ( (node_class_output.stdout | regex_search(node_class_name, '\\1') | first ) | default('')).split() }}" - vars: - node_class_name: '(?m)^ess3500_x86_64\s+([\s\S]*?)(?=\n\S|\Z)' - register: io_node_collection - run_once: true - - - debug: - var: io_node_collection - run_once: true - - - name: Get utility node - command: ssh -G utilitybaremetal | grep hostname - register: utility_collection_output - changed_when: false - run_once: true - - - name: Extract and create list for utility node - set_fact: - scale_utility_nodes_list: "{{ ( (utility_collection_output.stdout | regex_search(key, '\\1') | first ) | default('')).split() }}" - vars: - key: '(?m)hostname\s+([\s\S]*?)(?=\n\S|\Z)' - register: utility_node_collection - run_once: true - - - debug: - var: utility_node_collection - run_once: true - - - name: Add io nodes to hosts - add_host: - name: "{{ item }}" - groups: ionodes - loop: "{{scale_io_nodes_list}}" - - - name: Add utility nodes to hosts - add_host: - name: "{{ item }}" - groups: utilitynode - loop: "{{scale_utility_nodes_list}}" - - - hosts: all become: no - - vars: - tpm_password_file: "/root/tpm_pwd" - disable_clear: true - nv_slot_count: 1 - nv_slot_id: "0x1500001" - generate: true - migrate: true - enroll_drive: true - rekey_drive: false - recovery_group: "rg1" - + pre_tasks: + - include_vars: sed_tpm_vars.yml roles: - sed_configure \ No newline at end of file diff --git a/samples/vars/sed_tpm_vars.yml b/samples/vars/sed_tpm_vars.yml new file mode 100644 index 00000000..b6733293 --- /dev/null +++ b/samples/vars/sed_tpm_vars.yml @@ -0,0 +1,19 @@ +tpm_password_file: "/root/tpm_pwd" +new_tpm_password_file: "/path/to/newpassword.txt" +disable_clear: true +nv_slot_count: 4 +nv_slot_id: "0x1500000" +generate: true +migrate: true +enroll_drive: true +rekey_drive: false +backup_key: true +restore_key: false +recovery_group: "recoverygroup" +io_nodes: + - ionode1 + - ionode2 +utility_nodes: + - utilitynode +emsvm: + - emsvmnode \ No newline at end of file