From 6dd30fa4196f93ed84c2689c25701838b372c69a Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Thu, 4 Jan 2024 10:29:16 +0100 Subject: [PATCH 1/4] Fix for remote mount Signed-off-by: Rajan Mishra --- roles/remotemount_configure/tasks/remotecluster.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/roles/remotemount_configure/tasks/remotecluster.yml b/roles/remotemount_configure/tasks/remotecluster.yml index 4da9dba6..5770f24f 100644 --- a/roles/remotemount_configure/tasks/remotecluster.yml +++ b/roles/remotemount_configure/tasks/remotecluster.yml @@ -285,9 +285,12 @@ run_once: True - set_fact: - owning_nodes_name: "{{ owning_nodes_name }} + [ '{{ item.adminNodeName }}' ]" + owning_nodes_name: "{{ owning_nodes_name + [item.adminNodeName] }}" with_items: "{{ owning_cluster_nodes.json.nodes }}" run_once: True + + - debug: + msg: "{{owning_nodes_name}}" # # This Section is when using daemonNodeName @@ -312,7 +315,7 @@ run_once: True - set_fact: - owning_daemon_nodes_name: "{{ owning_daemon_nodes_name }} + [ '{{ item.json.nodes.0.network.daemonNodeName }}' ]" + owning_daemon_nodes_name: "{{ owning_daemon_nodes_name + [item.json.nodes.0.network.daemonNodeName] }}" with_items: "{{ owning_cluster_daemonnodes.results }}" run_once: True From b93247f61dfccfac167ffdf0f398aa28bbe91183 Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Sun, 28 Apr 2024 15:25:14 +0200 Subject: [PATCH 2/4] Fixed sync issue Signed-off-by: Rajan Mishra --- roles/remotemount_configure/tasks/remotecluster.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/roles/remotemount_configure/tasks/remotecluster.yml b/roles/remotemount_configure/tasks/remotecluster.yml index 5770f24f..99e3e03e 100644 --- a/roles/remotemount_configure/tasks/remotecluster.yml +++ b/roles/remotemount_configure/tasks/remotecluster.yml @@ -288,9 +288,6 @@ owning_nodes_name: "{{ owning_nodes_name + [item.adminNodeName] }}" with_items: "{{ owning_cluster_nodes.json.nodes }}" run_once: True - - - debug: - msg: "{{owning_nodes_name}}" # # This Section is when using daemonNodeName From 0dbe217bf170659484a85fb838eb6f10ad305f1f Mon Sep 17 00:00:00 2001 From: Rajan Mishra Date: Wed, 28 Aug 2024 19:45:27 +0200 Subject: [PATCH 3/4] CES S3 upgrade support role Signed-off-by: Rajan Mishra --- roles/s3_upgrade/README.md | 1 + roles/s3_upgrade/defaults/main.yml | 20 +++ roles/s3_upgrade/handlers/main.yml | 4 + roles/s3_upgrade/meta/main.yml | 20 +++ roles/s3_upgrade/tasks/apt/install.yml | 15 ++ roles/s3_upgrade/tasks/install.yml | 69 +++++++++ roles/s3_upgrade/tasks/install_dir_pkg.yml | 77 ++++++++++ roles/s3_upgrade/tasks/install_local_pkg.yml | 137 ++++++++++++++++++ roles/s3_upgrade/tasks/install_remote_pkg.yml | 109 ++++++++++++++ roles/s3_upgrade/tasks/install_repository.yml | 31 ++++ roles/s3_upgrade/tasks/main.yml | 4 + roles/s3_upgrade/tasks/yum/install.yml | 6 + roles/s3_upgrade/tasks/zypper/install.yml | 6 + roles/s3_upgrade/vars/main.yml | 10 ++ 14 files changed, 509 insertions(+) create mode 120000 roles/s3_upgrade/README.md create mode 100644 roles/s3_upgrade/defaults/main.yml create mode 100644 roles/s3_upgrade/handlers/main.yml create mode 100644 roles/s3_upgrade/meta/main.yml create mode 100644 roles/s3_upgrade/tasks/apt/install.yml create mode 100644 roles/s3_upgrade/tasks/install.yml create mode 100644 roles/s3_upgrade/tasks/install_dir_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_local_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_remote_pkg.yml create mode 100644 roles/s3_upgrade/tasks/install_repository.yml create mode 100644 roles/s3_upgrade/tasks/main.yml create mode 100644 roles/s3_upgrade/tasks/yum/install.yml create mode 100644 roles/s3_upgrade/tasks/zypper/install.yml create mode 100644 roles/s3_upgrade/vars/main.yml diff --git a/roles/s3_upgrade/README.md b/roles/s3_upgrade/README.md new file mode 120000 index 00000000..6a3df305 --- /dev/null +++ b/roles/s3_upgrade/README.md @@ -0,0 +1 @@ +../../docs/README.NFS.md \ No newline at end of file diff --git a/roles/s3_upgrade/defaults/main.yml b/roles/s3_upgrade/defaults/main.yml new file mode 100644 index 00000000..a694d569 --- /dev/null +++ b/roles/s3_upgrade/defaults/main.yml @@ -0,0 +1,20 @@ +--- +# Default variables for the IBM Spectrum Scale (S3) role - +# either edit this file or define your own variables to override the defaults + +## Specify the URL of the (existing) Spectrum Scale YUM/apt/zypper repository +#scale_install_s3_repository_rpms: http:///s3_rpms/ +#scale_install_s3_repository_debs: http:///s3_debs/ +#scale_install_s3_repository_rpms_sles: http:///s3_rpms/sles12/ + +## List of S3 packages to install +scale_s3_packages: +- noobaa-core +- gpfs.mms3 + +## Temporary directory to copy installation package to +## (local package installation method) +scale_install_localpkg_tmpdir_path: /tmp + +## Flag to install s3 debug package +scale_s3_install_debuginfo: true diff --git a/roles/s3_upgrade/handlers/main.yml b/roles/s3_upgrade/handlers/main.yml new file mode 100644 index 00000000..2e896124 --- /dev/null +++ b/roles/s3_upgrade/handlers/main.yml @@ -0,0 +1,4 @@ +--- +# handlers file for node +- name: yum-clean-metadata + command: yum clean metadata diff --git a/roles/s3_upgrade/meta/main.yml b/roles/s3_upgrade/meta/main.yml new file mode 100644 index 00000000..d32d632b --- /dev/null +++ b/roles/s3_upgrade/meta/main.yml @@ -0,0 +1,20 @@ +--- +galaxy_info: + author: IBM Corporation + description: Highly-customizable Ansible role for installing and configuring IBM Spectrum Scale (GPFS) + company: IBM + + license: Apache-2.0 + + min_ansible_version: 2.9 + + platforms: + - name: EL + versions: + - 7 + - 8 + + galaxy_tags: [] + +dependencies: + - ibm.spectrum_scale.core_common diff --git a/roles/s3_upgrade/tasks/apt/install.yml b/roles/s3_upgrade/tasks/apt/install.yml new file mode 100644 index 00000000..75fd2f00 --- /dev/null +++ b/roles/s3_upgrade/tasks/apt/install.yml @@ -0,0 +1,15 @@ +--- +- name: upgrade | Upgrade s3 packages + package: + name: "{{ scale_install_all_packages }}" + state: latest + when: scale_install_repository_url is defined + + +- name: upgrade | Upgrade GPFS S3 deb + apt: + deb: "{{ item }}" + state: latest + when: scale_install_repository_url is not defined + with_items: + - "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install.yml b/roles/s3_upgrade/tasks/install.yml new file mode 100644 index 00000000..88264e77 --- /dev/null +++ b/roles/s3_upgrade/tasks/install.yml @@ -0,0 +1,69 @@ +--- +# Install or update RPMs +# Ensure that installation method was chosen during previous role +- block: + - name: upgrade | Check for repository installation method + set_fact: + scale_installmethod: repository + when: + - scale_install_repository_url is defined + + - name: upgrade | Check for localpkg installation method + set_fact: + scale_installmethod: local_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is undefined + - scale_install_localpkg_path is defined + + - name: upgrade | Check for remotepkg installation method + set_fact: + scale_installmethod: remote_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is defined + + - name: upgrade | Check for directory package installation method + set_fact: + scale_installmethod: dir_pkg + when: + - scale_install_repository_url is undefined + - scale_install_remotepkg_path is undefined + - scale_install_localpkg_path is undefined + - scale_install_directory_pkg_path is defined + + - name: upgrade | Check installation method + assert: + that: scale_installmethod is defined + msg: >- + Please set the appropriate variable 'scale_install_*' for your desired + installation method! + run_once: true + delegate_to: localhost + +# Run chosen installation method to get list of RPMs + +- name: upgrade | Initialize list of packages + set_fact: + scale_install_all_packages: [] + +- name: upgrade | Set the extracted package directory path + set_fact: + s3_extracted_path: "{{ scale_extracted_path }}" + +- name: upgrade | Stat extracted packages directory + stat: + path: "{{ s3_extracted_path }}" + register: scale_extracted_gpfs_dir + +- include_tasks: install_{{ scale_installmethod }}.yml + +- import_tasks: apt/install.yml + when: ansible_distribution in scale_ubuntu_distribution + +- import_tasks: yum/install.yml + when: ansible_distribution in scale_rhel_distribution + +- import_tasks: zypper/install.yml + when: ansible_distribution in scale_sles_distribution + diff --git a/roles/s3_upgrade/tasks/install_dir_pkg.yml b/roles/s3_upgrade/tasks/install_dir_pkg.yml new file mode 100644 index 00000000..0dc1730a --- /dev/null +++ b/roles/s3_upgrade/tasks/install_dir_pkg.yml @@ -0,0 +1,77 @@ +--- +# Dir package installation method + +- block: ## run_once: true + - name: install | Stat directory installation package + stat: + path: "{{ scale_install_directory_pkg_path }}" + register: scale_install_dirpkg + + - name: install | Check directory installation package + assert: + that: scale_install_dirpkg.stat.exists + msg: >- + Please set the variable 'scale_install_directory_pkg_path' to point to the + local installation package (accessible on Ansible control machine)! + run_once: true + delegate_to: localhost + +- name: install| Creates default directory + file: + path: "{{ scale_extracted_path }}" + state: directory + mode: a+x + recurse: yes + +- name: install | Stat extracted packages + stat: + path: "{{ scale_extracted_path + '/' + scale_install_directory_pkg_path | basename }}" + register: scale_install_gpfs_packagedir + +# +# Copy installation directory package to default +# +- block: + - name: install | Copy installation package to node + copy: + src: "{{ scale_install_directory_pkg_path }}" + dest: "{{ scale_extracted_path }}" + mode: a+x + +- name: install | Set installation package path + set_fact: + dir_path: "{{ scale_extracted_path + '/' + scale_install_directory_pkg_path | basename }}" + +- name: install | gpfs base path + set_fact: + gpfs_path_url: "{{ dir_path }}" + when: scale_install_directory_pkg_path is defined + +# +# Find noobaa-core +# +# + +- block: ## when: host is defined as a protocol node + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ gpfs_path_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid GPFS (s3) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No S3 (noobaa-core) package found {{ gpfs_path_url }}noobaa-core*" + + - name: install | Add GPFS s3 package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_local_pkg.yml b/roles/s3_upgrade/tasks/install_local_pkg.yml new file mode 100644 index 00000000..27606923 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_local_pkg.yml @@ -0,0 +1,137 @@ +--- +# Local package installation method +- block: ## run_once: true + - name: install | Stat local installation package + stat: + path: "{{ scale_install_localpkg_path }}" + checksum_algorithm: md5 + register: scale_install_localpkg + + - name: install | Check local installation package + assert: + that: scale_install_localpkg.stat.exists + msg: >- + Please set the variable 'scale_install_localpkg_path' to point to the + local installation package (accessible on Ansible control machine)! + +# +# Optionally, verify package checksum +# + - name: install | Stat checksum file + stat: + path: "{{ scale_install_localpkg_path }}.md5" + register: scale_install_md5_file + + - block: ## when: scale_install_md5_file.stat.exists + - name: install | Read checksum from file + set_fact: + scale_install_md5_sum: "{{ lookup('file', scale_install_localpkg_path + '.md5') }}" + + - name: install | Compare checksums + assert: + that: scale_install_md5_sum.strip().split().0 == scale_install_localpkg.stat.checksum + msg: >- + Checksums don't match. Please check integritiy of your local + installation package! + + when: scale_install_md5_file.stat.exists + run_once: true + delegate_to: localhost + +# +# Copy installation package +# +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- block: ## when: not scale_install_gpfs_rpmdir.stat.exists + - name: install | Stat temporary directory + stat: + path: "{{ scale_install_localpkg_tmpdir_path }}" + register: scale_install_localpkg_tmpdir + + - name: install | Check temporary directory + assert: + that: + - scale_install_localpkg_tmpdir.stat.exists + - scale_install_localpkg_tmpdir.stat.isdir + msg: >- + Please set the variable 'scale_install_localpkg_tmpdir_path' to point + to a temporary directory on the remote system! + + - name: install | Copy installation package to node + copy: + src: "{{ scale_install_localpkg_path }}" + dest: "{{ scale_install_localpkg_tmpdir_path }}" + mode: a+x + when: not scale_install_gpfs_rpmdir.stat.exists + +# +# Extract installation package +# +- name: install | Extract installation package + vars: + localpkg: "{{ scale_install_localpkg_tmpdir_path + '/' + scale_install_localpkg_path | basename }}" + command: "{{ localpkg + ' --silent' }}" + args: + creates: "{{ s3_extracted_path }}" + +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Check extracted packages + assert: + that: + - scale_install_gpfs_rpmdir.stat.exists + - scale_install_gpfs_rpmdir.stat.isdir + msg: >- + The variable 'scale_version' doesn't seem to match the contents of the + local installation package! + +# Delete installation package +- name: install | Delete installation package from node + file: + path: "{{ scale_install_localpkg_tmpdir_path + '/' + scale_install_localpkg_path | basename }}" + state: absent + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +# Find s3 rpms +- block: ## when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ s3_extracted_path }}/{{ scale_s3_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid (noobaa-core) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No noobaa-core (noobaa-core) package found {{ s3_extracted_path }}/{{ scale_s3_url }}noobaa-core*" + + - name: install | Add noobaa-core package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + + when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_remote_pkg.yml b/roles/s3_upgrade/tasks/install_remote_pkg.yml new file mode 100644 index 00000000..56227dc2 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_remote_pkg.yml @@ -0,0 +1,109 @@ +--- +# Remote package installation method + +- name: install | Stat remote installation package + stat: + path: "{{ scale_install_remotepkg_path }}" + checksum_algorithm: md5 + register: scale_install_remotepkg + +- name: install | Check remote installation package + assert: + that: scale_install_remotepkg.stat.exists + msg: >- + Please set the variable 'scale_install_remotepkg_path' to point to the + remote installation package (accessible on Ansible managed node)! + +# +# Optionally, verify package checksum +# +- name: install | Stat checksum file + stat: + path: "{{ scale_install_remotepkg_path }}.md5" + register: scale_install_md5_file + +- block: ## when: scale_install_md5_file.stat.exists + - name: install | Read checksum from file + slurp: + src: "{{ scale_install_remotepkg_path }}.md5" + register: scale_install_md5_sum + + - name: install | Compare checksums + vars: + md5sum: "{{ scale_install_md5_sum.content | b64decode }}" + assert: + that: md5sum.strip().split().0 == scale_install_remotepkg.stat.checksum + msg: >- + Checksums don't match. Please check integritiy of your remote + installation package! + when: scale_install_md5_file.stat.exists + +# +# Extract installation package +# +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Make installation package executable + file: + path: "{{ scale_install_remotepkg_path }}" + mode: a+x + when: not scale_install_gpfs_rpmdir.stat.exists + +- name: install | Extract installation package + command: "{{ scale_install_remotepkg_path + ' --silent' }}" + args: + creates: "{{ s3_extracted_path }}" + +- name: install | Stat extracted packages + stat: + path: "{{ s3_extracted_path }}" + register: scale_install_gpfs_rpmdir + +- name: install | Check extracted packages + assert: + that: + - scale_install_gpfs_rpmdir.stat.exists + - scale_install_gpfs_rpmdir.stat.isdir + msg: >- + The variable 'scale_version' doesn't seem to match the contents of the + remote installation package! + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +# Find s3 rpms +- block: ## when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + + - name: install | Find noobaa-core (noobaa-core) package + find: + paths: "{{ s3_extracted_path }}/{{ scale_s3_url }}" + patterns: noobaa-core* + register: scale_install_gpfs_s3 + + - name: install | Check valid noobaa-core (noobaa-core) package + assert: + that: scale_install_gpfs_s3.matched > 0 + msg: "No S3 (noobaa-core) package found {{ s3_extracted_path }}/{{ scale_s3_url }}gpfs.s3*" + + - name: install | Add GPFS s3 package to list + vars: + current_package: "{{ item.path }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_s3.files }}" + + when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution + +- debug: + msg: "{{ scale_install_all_packages }}" diff --git a/roles/s3_upgrade/tasks/install_repository.yml b/roles/s3_upgrade/tasks/install_repository.yml new file mode 100644 index 00000000..201d7e69 --- /dev/null +++ b/roles/s3_upgrade/tasks/install_repository.yml @@ -0,0 +1,31 @@ +--- +- name: upgrade | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel8/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' + +- name: upgrade | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +- name: upgrade | Configure s3 YUM repository + yum_repository: + name: spectrum-scale-s3 + description: IBM Spectrum Scale (s3) + baseurl: "{{ scale_install_repository_url }}{{ scale_s3_url }}" + gpgcheck: "{{ scale_install_gpgcheck }}" + repo_gpgcheck: no + sslverify: no + state: present + notify: yum-clean-metadata + when: + - ansible_pkg_mgr == 'yum' or ansible_pkg_mgr == 'dnf' + - scale_install_repository_url is defined + - scale_install_repository_url != 'existing' + +- name: upgrade | Add GPFS s3 packages to list + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ item ] }}" + with_items: + - "{{ scale_s3_packages }}" diff --git a/roles/s3_upgrade/tasks/main.yml b/roles/s3_upgrade/tasks/main.yml new file mode 100644 index 00000000..fc33687b --- /dev/null +++ b/roles/s3_upgrade/tasks/main.yml @@ -0,0 +1,4 @@ +--- +# Install IBM Spectrum Scale (S3) +- import_tasks: install.yml + tags: upgrade diff --git a/roles/s3_upgrade/tasks/yum/install.yml b/roles/s3_upgrade/tasks/yum/install.yml new file mode 100644 index 00000000..9ddbc12e --- /dev/null +++ b/roles/s3_upgrade/tasks/yum/install.yml @@ -0,0 +1,6 @@ +--- +- name: upgrade | Upgrade GPFS S3 packages + yum: + name: "{{ scale_install_all_packages }}" + state: latest + disable_gpg_check: "{{ scale_disable_gpgcheck }}" diff --git a/roles/s3_upgrade/tasks/zypper/install.yml b/roles/s3_upgrade/tasks/zypper/install.yml new file mode 100644 index 00000000..2ea66d79 --- /dev/null +++ b/roles/s3_upgrade/tasks/zypper/install.yml @@ -0,0 +1,6 @@ +--- +- name: upgrade | Upgrade GPFS S3 packages + zypper: + name: "{{ scale_install_all_packages }}" + state: latest + disable_gpg_check: no diff --git a/roles/s3_upgrade/vars/main.yml b/roles/s3_upgrade/vars/main.yml new file mode 100644 index 00000000..5a6e9c01 --- /dev/null +++ b/roles/s3_upgrade/vars/main.yml @@ -0,0 +1,10 @@ +--- +# Variables for the IBM Spectrum Scale (GPFS) role - +# these variables are *not* meant to be overridden + +## Compute RPM version from Spectrum Scale version +scale_rpmversion: "{{ scale_version | regex_replace('^([0-9.]+)\\.([0-9])$', '\\1-\\2') }}" + +## Default scale extraction path +scale_extracted_default_path: "/usr/lpp/mmfs" +scale_extracted_path: "{{ scale_extracted_default_path }}/{{ scale_version }}" From 547f0688728b99ab8030dd3a018fcfec2876bf9b Mon Sep 17 00:00:00 2001 From: sujeet Date: Tue, 7 Jan 2025 10:23:46 +0100 Subject: [PATCH 4/4] Defect fixed for callhome config Signed-off-by: sujeet --- roles/callhome_configure/tasks/configure.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/roles/callhome_configure/tasks/configure.yml b/roles/callhome_configure/tasks/configure.yml index 14274003..c87df6f1 100755 --- a/roles/callhome_configure/tasks/configure.yml +++ b/roles/callhome_configure/tasks/configure.yml @@ -54,7 +54,7 @@ - name: configure| Setup the call home customer configuration shell: - cmd: "{{ scale_command_path }}mmcallhome info change --customer-name {{ scale_callhome_params.customer_name }} --customer-id {{ scale_callhome_params.customer_id }} --email {{ scale_callhome_params.customer_email}} --country-code {{ scale_callhome_params.customer_country }}" + cmd: "{{ scale_command_path }}mmcallhome info change --customer-name \"{{ scale_callhome_params.customer_name }}\" --customer-id {{ scale_callhome_params.customer_id }} --email {{ scale_callhome_params.customer_email}} --country-code {{ scale_callhome_params.customer_country }}" register: scale_callhome_customer_config - debug: