diff --git a/roles/ces_common/tasks/check.yml b/roles/ces_common/tasks/check.yml index 866fddd4..56fc9f97 100644 --- a/roles/ces_common/tasks/check.yml +++ b/roles/ces_common/tasks/check.yml @@ -105,11 +105,13 @@ Please define the CES shared root file system mount point in the inventory." - scale_protocols.export_ip_pool is defined - scale_protocols.export_ip_pool|length > 0 fail_msg: "CES export ip pool is not defined or empty" + when: scale_protocols.ces_existing is not defined run_once: true - name: check | Collect unique export ips set_fact: scale_export_ip: "{{ scale_protocols.export_ip_pool|unique }}" + when: scale_protocols.export_ip_pool is defined run_once: true - name: check | Check if export ips are duplicated @@ -118,6 +120,7 @@ Please define the CES shared root file system mount point in the inventory." - scale_protocols.export_ip_pool|length == scale_export_ip|length fail_msg: "Duplicate ips found in export ip pool" run_once: true + when: scale_protocols.export_ip_pool is defined when: scale_protocols.scale_ces_groups is not defined - name: check | Get list of GPFS nodes which are not reporting active state @@ -211,4 +214,6 @@ Please define the CES shared root file system mount point in the inventory." when: scale_ces_ipv4_list|length >0 and scale_ces_ipv6_list|length > 0 failed_when: scale_ces_ipv4_list|length >0 and scale_ces_ipv6_list|length > 0 - when: scale_protocols.scale_ces_groups is not defined + when: + - scale_protocols.scale_ces_groups is not defined + - scale_protocols.export_ip_pool is defined diff --git a/roles/ces_common/tasks/configure.yml b/roles/ces_common/tasks/configure.yml index c03485d7..c590a1ad 100644 --- a/roles/ces_common/tasks/configure.yml +++ b/roles/ces_common/tasks/configure.yml @@ -262,12 +262,16 @@ scale_export_ips: "{{ scale_export_ips + ',' + item|string }}" with_items: - "{{ scale_protocols.export_ip_pool }}" + when: + - scale_protocols.export_ip_pool is defined delegate_to: "{{ scale_protocol_node_list.0 }}" run_once: true - name: configure | Assign export ips as pool command: "{{ scale_command_path }}mmces address add --ces-ip {{ scale_export_ips[1:] }}" - when: scale_protocols.scale_ces_groups is not defined + when: + - scale_protocols.scale_ces_groups is not defined + - scale_protocols.export_ip_pool is defined delegate_to: "{{ scale_protocol_node_list.0 }}" run_once: true diff --git a/roles/core_upgrade/tasks/apt/install.yml b/roles/core_upgrade/tasks/apt/install.yml index a64c0104..f0b6849f 100644 --- a/roles/core_upgrade/tasks/apt/install.yml +++ b/roles/core_upgrade/tasks/apt/install.yml @@ -4,6 +4,26 @@ # - block: ## when: not scale_daemon_running - block: + - name: Create comma-separated list + set_fact: + my_list: "{{ scale_install_all_packages + scale_install_license_packages }}" + + - set_fact: + my_list: "{{my_list | join(' ')}}" + + - debug: + msg: "{{my_list }}" + + - name: Execute shell command + command: apt-get upgrade --allow-downgrades -y {{ my_list }} -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" + register: scale_install_debpackageresult + when: scale_install_directory_pkg_path is defined + + - debug: + msg: "{{scale_install_debpackageresult}}" + when: scale_install_directory_pkg_path is defined + + - name: upgrade | Upgrade GPFS packages apt: deb: "{{ item }}" @@ -11,6 +31,8 @@ register: scale_install_debpackageresult with_items: - "{{ scale_install_all_packages }}" + when: scale_install_directory_pkg_path is not defined + - name: upgrade | Upgrade GPFS License packages apt: @@ -18,6 +40,7 @@ only_upgrade: true with_items: - "{{ scale_install_license_packages }}" + when: scale_install_directory_pkg_path is not defined - name: upgrade | Check if GPFS packages were updated set_fact: diff --git a/roles/core_upgrade/tasks/install_dir_pkg.yml b/roles/core_upgrade/tasks/install_dir_pkg.yml index 6291b5d2..ec137ca7 100644 --- a/roles/core_upgrade/tasks/install_dir_pkg.yml +++ b/roles/core_upgrade/tasks/install_dir_pkg.yml @@ -74,13 +74,6 @@ patterns: gpfs.docs* register: scale_install_gpfs_doc -- name: upgrade | Check valid GPFS docs (gpfs.docs) package - assert: - that: scale_install_gpfs_doc.matched > 0 - msg: >- - No GPFS docs (gpfs.docs) package found: - {{ dir_path }}/gpfs.docs* - # # Find GPFS gpfs.msg.en_US # @@ -90,13 +83,6 @@ patterns: gpfs.msg.en* register: scale_install_gpfs_msg -- name: upgrade | Check valid GPFS (gpfs.msg.en_US) package - assert: - that: scale_install_gpfs_msg.matched > 0 - msg: >- - No GPFS BASE (gpfs.base) package found: - {{ dir_path }}/gpfs.msg.en* - # # Find GPFS gpfs.compression # @@ -106,12 +92,6 @@ patterns: gpfs.compression*{{ scale_architecture }}* register: scale_install_gpfs_compression -- name: upgrade | Check valid GPFS Compression(gpfs.compression) package - debug: - msg: >- - No GPFS Compression (gpfs.compression) package found: - {{ dir_path }}/gpfs.compression*{{ scale_architecture }}* - when: scale_install_gpfs_compression.matched < 1 # # Find GSKit @@ -122,13 +102,6 @@ patterns: gpfs.gskit*{{ scale_architecture }}* register: scale_install_gpfs_gskit -- name: upgrade | Check valid Global Security Kit (GSKit) package - assert: - that: scale_install_gpfs_gskit.matched > 0 - msg: >- - No Global Security Kit (GSKit) package found: - {{ dir_path }}/gpfs.gskit*{{ scale_architecture }}* - # # Add GPFS Packages # @@ -139,9 +112,33 @@ scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" with_items: - "{{ scale_install_gpfs_base.files.0.path | basename }}" + +- name: upgrade | Add GPFS packages to list + vars: + current_package: "{{ dir_path }}/{{ item }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: - "{{ scale_install_gpfs_doc.files.0.path | basename }}" + when: scale_install_gpfs_doc.matched > 0 + +- name: upgrade | Add GPFS packages to list + vars: + current_package: "{{ dir_path }}/{{ item }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: - "{{ scale_install_gpfs_msg.files.0.path | basename }}" + when: scale_install_gpfs_msg.matched > 0 + +- name: upgrade | Add GPFS packages to list + vars: + current_package: "{{ dir_path }}/{{ item }}" + set_fact: + scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" + with_items: - "{{ scale_install_gpfs_gskit.files.0.path | basename }}" + when: scale_install_gpfs_gskit.matched > 0 - name: upgrade | Add GPFS compression packages to list vars: @@ -170,19 +167,14 @@ patterns: gpfs.gpl* register: scale_install_gpfs_gpl -- name: upgrade | Check valid GPFS GPL (gpfs.gpl) package - assert: - that: scale_install_gpfs_gpl.matched > 0 - msg: >- - No GPFS GPL (gpfs.gpl) package found: - {{ dir_path }}/gpfs.gpl* - - name: upgrade | Add GPFS packages for building GPL module from source to list vars: current_package: "{{ dir_path }}/{{ item }}" set_fact: scale_install_all_packages: "{{ scale_install_all_packages + [ current_package ] }}" - when: scale_install_gplbin_package is undefined + when: + - scale_install_gplbin_package is undefined + - scale_install_gpfs_gpl.matched > 0 with_items: "{{ scale_install_gpfs_gpl.files.0.path | basename }}" # diff --git a/roles/core_upgrade/tasks/install_license_pkg.yml b/roles/core_upgrade/tasks/install_license_pkg.yml index 29601d3a..a039e9be 100644 --- a/roles/core_upgrade/tasks/install_license_pkg.yml +++ b/roles/core_upgrade/tasks/install_license_pkg.yml @@ -10,13 +10,6 @@ patterns: gpfs.license*{{ scale_architecture }}* register: scale_install_gpfs_license -- name: upgrade | Check valid GPFS License (gpfs.license) package - assert: - that: scale_install_gpfs_license.matched > 0 - msg: >- - No GPFS License (gpfs.license) package found: - "{{ scale_gpfs_path_url }}/gpfs.license*{{ scale_architecture }}*" - - name: upgrade | Check valid only one GPFS License (gpfs.license) package assert: that: @@ -25,12 +18,14 @@ msg: >- More than one GPFS License (gpfs.license) package found: "{{ scale_gpfs_path_url }}/gpfs.license*{{ scale_architecture }}*" + when: scale_install_directory_pkg_path is undefined - name: upgrade | Find GPFS License package vars: gpfs_license_package: "{{ scale_install_gpfs_license.files.0.path | basename }}" set_fact: scale_gpfs_license_package: "{{ gpfs_license_package }}" + when: scale_install_gpfs_license.matched > 0 - block: # @@ -51,10 +46,32 @@ patterns: gpfs.crypto*{{ scale_architecture }}* register: scale_install_gpfs_crypto when: + - scale_install_directory_pkg_path is undefined - '"gpfs.license.std" not in scale_gpfs_license_package' - '"gpfs.license.da" not in scale_gpfs_license_package' +- block: + # + # Find GPFS adv packgae + # + - name: upgrade | Find GPFS Advance (gpfs.adv) package + find: + paths: "{{ scale_gpfs_path_url }}" + patterns: gpfs.adv*{{ scale_architecture }}* + register: scale_install_gpfs_adv + + # + # Find GPFS crypto packgae + # + - name: upgrade | Find GPFS crypto (gpfs.crypto) package + find: + paths: "{{ scale_gpfs_path_url }}" + patterns: gpfs.crypto*{{ scale_architecture }}* + register: scale_install_gpfs_crypto + when: + - scale_install_directory_pkg_path is defined + # # Add GPFS packages # @@ -65,6 +82,7 @@ scale_install_license_packages: "{{ scale_install_license_packages + [ current_package ] }}" with_items: - "{{ scale_install_gpfs_license.files.0.path | basename }}" + when: scale_install_gpfs_license.matched > 0 # # Add GPFS packages @@ -79,5 +97,28 @@ - "{{ scale_install_gpfs_adv.files.0.path | basename }}" - "{{ scale_install_gpfs_crypto.files.0.path | basename }}" when: + - scale_install_directory_pkg_path is undefined - '"gpfs.license.std" not in scale_gpfs_license_package' - '"gpfs.license.da" not in scale_gpfs_license_package' + +- name: upgrade | Add GPFS Dependent License packages to list + vars: + current_package: "{{ scale_gpfs_path_url }}/{{ item }}" + set_fact: + scale_install_license_packages: "{{ scale_install_license_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_adv.files.0.path | basename }}" + when: + - scale_install_directory_pkg_path is defined + - scale_install_gpfs_adv.matched > 0 + +- name: upgrade | Add GPFS Dependent License packages to list + vars: + current_package: "{{ scale_gpfs_path_url }}/{{ item }}" + set_fact: + scale_install_license_packages: "{{ scale_install_license_packages + [ current_package ] }}" + with_items: + - "{{ scale_install_gpfs_crypto.files.0.path | basename }}" + when: + - scale_install_directory_pkg_path is defined + - scale_install_gpfs_crypto.matched > 0 diff --git a/roles/fal_install/tasks/install_repository.yml b/roles/fal_install/tasks/install_repository.yml index f98af3fd..56884956 100644 --- a/roles/fal_install/tasks/install_repository.yml +++ b/roles/fal_install/tasks/install_repository.yml @@ -22,7 +22,12 @@ set_fact: scale_fal_url: 'gpfs_rpms/rhel9/' when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' - + +- name: install | file audit logging path + set_fact: + scale_fal_url: 'gpfs_rpms/rhel9/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '10' + - name: install | file audit logging path set_fact: scale_fal_url: 'gpfs_debs/ubuntu/' @@ -86,6 +91,7 @@ repo: "{{ scale_install_repository_url }}{{ scale_fal_url }}" disable_gpg_check: no state: present + overwrite_multiple: yes when: - ansible_pkg_mgr == 'zypper' - scale_install_repository_url is defined diff --git a/roles/gui_install/tasks/install_repository.yml b/roles/gui_install/tasks/install_repository.yml index 70396d60..446e1cd5 100644 --- a/roles/gui_install/tasks/install_repository.yml +++ b/roles/gui_install/tasks/install_repository.yml @@ -37,7 +37,7 @@ zypper_repository: name: spectrum-scale-gui description: IBM Spectrum Scale (GUI) - repo: "{{ scale_install_repository_url }}/gpfs_rpms/" + repo: "{{ scale_install_repository_url }}gpfs_rpms/" disable_gpg_check: no state: present overwrite_multiple: yes diff --git a/roles/hdfs_configure/tasks/configure.yml b/roles/hdfs_configure/tasks/configure.yml index e489bc59..41f13e8c 100644 --- a/roles/hdfs_configure/tasks/configure.yml +++ b/roles/hdfs_configure/tasks/configure.yml @@ -265,7 +265,18 @@ loop_control: loop_var: add_dn_item - - lineinfile: + - name: configure | Set hadoop ident for scaleadmd + lineinfile: + dest: /var/mmfs/hadoop/etc/hadoop/hadoop-env.sh + state: present + regexp: '# export HADOOP_IDENT_STRING=.' + line: 'export HADOOP_IDENT_STRING=root' + when: + - scale_scaleadmd is defined + - scale_scaleadmd == true + + - name: configure | Remove hadoop workers + lineinfile: dest: /var/mmfs/hadoop/etc/hadoop/workers state: absent regexp: '(^\n)' @@ -279,7 +290,7 @@ register: initializedSharedEdits when: ha_enabled|bool and ces_hdfs_enabled|bool and ha_enable_dir.rc != 0 - - name: configure | suspend CES nodes + - name: configure | suspend CES nodes command: "/usr/lpp/mmfs/bin/mmces node suspend --stop -N {{ scale_hdfs_cluster.namenodes|join(',') }}" register: suspend_ces when: @@ -291,11 +302,11 @@ when: - check_hdfs_ces_group.rc == 0 - - name: configure | Enable CES + - name: configure | Enable CES command: "/usr/lpp/mmfs/bin/mmchnode --ces-group {{ scale_hdfs_ces_group_name }} -N {{ scale_hdfs_cluster.namenodes|join(',') }}" register: enable_ces - - name: configure | start CES nodes + - name: configure | start CES nodes command: "/usr/lpp/mmfs/bin/mmces node resume --start -N {{ scale_hdfs_cluster.namenodes|join(',') }}" register: start_ces when: diff --git a/roles/hdfs_install/defaults/main.yml b/roles/hdfs_install/defaults/main.yml index 2d6474e6..b14c3996 100644 --- a/roles/hdfs_install/defaults/main.yml +++ b/roles/hdfs_install/defaults/main.yml @@ -14,17 +14,5 @@ scale_install_localpkg_tmpdir_path: /tmp ## Flag to install hdfs debug package scale_hdfs_install_debuginfo: true -# Directory to install 3.1.1.x hdfs package -scale_hdfs_rhel_version_path: 'hdfs_rpms/rhel/hdfs_3.1.1.x/' - # Directory to install 3.3.6.x hdfs package scale_hdfs_rhel_version_path_336: 'hdfs_rpms/rhel/hdfs_3.3.6.x/' - -# Directory to install 3.2.2.x hdfs package -scale_hdfs_rhel_version_path_322: 'hdfs_rpms/rhel/hdfs_3.2.2.x/' - -# Directory to install 3.1.1.x hdfs package -scale_hdfs_sles_version_path: 'hdfs_rpms/rhel/hdfs_3.1.1.x/' - -# Directory to install 3.1.1.x hdfs package -scale_hdfs_ubuntu_version_path: 'hdfs_debs/ubuntu/hdfs_3.1.1.x/' diff --git a/roles/hdfs_install/tasks/install.yml b/roles/hdfs_install/tasks/install.yml index 3429fa19..1de3c997 100644 --- a/roles/hdfs_install/tasks/install.yml +++ b/roles/hdfs_install/tasks/install.yml @@ -1,7 +1,7 @@ --- # Install or update RPMs # Ensure that installation method was chosen during previous role -- name: global_var | Initialize +- name: install | Initialize set_fact: scale_hdfs_nodes_list: [] scale_hdfs_namenodes_list: [] @@ -9,29 +9,29 @@ scale_hdfs_cluster: [] scale_install_all_packages: [] -- name: global_var | initializing scale_hdfs_cluster +- name: install | initializing scale_hdfs_cluster set_fact: scale_hdfs_cluster: "{{ item }}" delegate_to: localhost run_once: true -- name: global_var | Collect all HDFS NameNodes +- name: install | Collect all HDFS NameNodes set_fact: scale_hdfs_namenodes_list: "{{ scale_hdfs_cluster.namenodes | unique }}" delegate_to: localhost run_once: true -- name: global_var | Collect all HDFS DataNodes +- name: install | Collect all HDFS DataNodes set_fact: scale_hdfs_datanodes_list: "{{ scale_hdfs_cluster.datanodes | unique }}" delegate_to: localhost run_once: true -- name: global_var | Get HDFS nodes +- name: install | Get HDFS nodes set_fact: scale_hdfs_nodes_list: "{{ scale_hdfs_namenodes_list + scale_hdfs_datanodes_list }}" -- name: global_var | make unique HDFS nodes +- name: install | make unique HDFS nodes set_fact: scale_hdfs_nodes_list: "{{ scale_hdfs_nodes_list | unique }}" @@ -80,47 +80,20 @@ - name: install | Set the extracted package directory path set_fact: hdfs_extracted_path: "{{ scale_extracted_path }}" - hdfs_version_path_selection_rhel: "{{ scale_hdfs_rhel_version_path }}" - name: install | Stat extracted packages directory stat: path: "{{ hdfs_extracted_path }}" register: scale_extracted_gpfs_dir -- include_tasks: prepare_env.yml - -- block: - - name: install | Fetch hdfs version - set_fact: - hdfs_version_path_selection_rhel: "{{ scale_hdfs_rhel_version_path_336 }}" - when: transparency_336_enabled|bool - - - name: install | Fetch hdfs version - set_fact: - hdfs_version_path_selection_rhel: "{{ scale_hdfs_rhel_version_path_322 }}" - when: transparency_322_enabled|bool - - - name: install | Fetch hdfs rpm dir path for rhel - set_fact: - hdfs_rpm_path_rhel: "{{ hdfs_version_path_selection_rhel }}" - - - name: install | Set correct hdfs rpm dir path for scale release lower 5.1.2 - set_fact: - hdfs_rpm_path_rhel: "{{ hdfs_rpm_path_rhel | replace('/rhel/','/rhel7/') }}" - when: scale_version is version_compare('5.1.2', '<') - - - name: install | Fetch hdfs rpm dir path for sles - set_fact: - hdfs_rpm_path_sles: "{{ scale_hdfs_sles_version_path }}" - - - name: install | Fetch hdfs rpm dir path for ubuntu - set_fact: - hdfs_rpm_path_ubuntu: "{{ scale_hdfs_ubuntu_version_path }}" - +- name: install | Fetch hdfs version + set_fact: + scale_hdfs_url: "{{ scale_hdfs_rhel_version_path_336 }}" + when: scale_hdfs_protocol.hdfs_transparency_version == '3.3.6' run_once: true - delegate_to: localhost - include_tasks: install_{{ scale_installmethod }}.yml - import_tasks: yum/install.yml when: ansible_distribution in scale_rhel_distribution + diff --git a/roles/hdfs_install/tasks/install_local_pkg.yml b/roles/hdfs_install/tasks/install_local_pkg.yml index a852ab3e..f9cac6a0 100644 --- a/roles/hdfs_install/tasks/install_local_pkg.yml +++ b/roles/hdfs_install/tasks/install_local_pkg.yml @@ -106,17 +106,6 @@ path: "{{ scale_install_localpkg_tmpdir_path + '/' + scale_install_localpkg_path | basename }}" state: absent - -- name: install | hdfs path on rhel7 - set_fact: - scale_hdfs_url: "{{ hdfs_rpm_path_rhel }}" - when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '7' - -- name: install | hdfs path on rhel8 - set_fact: - scale_hdfs_url: "{{ hdfs_rpm_path_rhel }}" - when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' - - block: - name: install | Find gpfs.hdfs-protocol package find: diff --git a/roles/hdfs_install/tasks/install_remote_pkg.yml b/roles/hdfs_install/tasks/install_remote_pkg.yml index 8b037795..74a0cc14 100644 --- a/roles/hdfs_install/tasks/install_remote_pkg.yml +++ b/roles/hdfs_install/tasks/install_remote_pkg.yml @@ -71,16 +71,6 @@ The variable 'scale_version' doesn't seem to match the contents of the remote installation package! -- name: install | hdfs path - set_fact: - scale_hdfs_url: "{{ hdfs_rpm_path_rhel }}" - when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '7' - -- name: install | hdfs path on rhel8 - set_fact: - scale_hdfs_url: "{{ hdfs_rpm_path_rhel }}" - when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' - - block: ## when: host is defined as a protocol node - name: install | Find gpfs.hdfs (gpfs.hdfs-protocol) package diff --git a/roles/hdfs_install/tasks/install_repository.yml b/roles/hdfs_install/tasks/install_repository.yml index a2a9a9ac..2a0d8f7d 100644 --- a/roles/hdfs_install/tasks/install_repository.yml +++ b/roles/hdfs_install/tasks/install_repository.yml @@ -1,18 +1,4 @@ --- -- name: install | hdfs path on rhel7 - set_fact: - scale_hdfs_url: "{{ hdfs_rpm_path_rhel }}" - when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '7' - -- name: install | hdfs path on rhel8 - set_fact: - scale_hdfs_url: "{{ hdfs_rpm_path_rhel }}" - when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '8' - -- name: install | hdfs path on rhel9 - set_fact: - scale_hdfs_url: "{{ hdfs_rpm_path_rhel }}" - when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' - name: install | Configure hdfs YUM repository yum_repository: diff --git a/roles/hdfs_install/tasks/prepare_env.yml b/roles/hdfs_install/tasks/prepare_env.yml deleted file mode 100644 index e69de29b..00000000 diff --git a/roles/hdfs_prepare/tasks/check.yml b/roles/hdfs_prepare/tasks/check.yml index 0426d9b8..9b0e1f0a 100644 --- a/roles/hdfs_prepare/tasks/check.yml +++ b/roles/hdfs_prepare/tasks/check.yml @@ -1,13 +1,5 @@ --- -- include_tasks: prepare_env.yml - -- debug: - msg: "transparency_336_enabled: {{ transparency_336_enabled|bool }}" - -- debug: - msg: "transparency_322_enabled: {{ transparency_322_enabled|bool }}" - -- name: global_var | Initialize +- name: check | Initialize set_fact: scale_hdfs_cluster: [] scale_protocol_nodes_list: [] diff --git a/roles/hdfs_prepare/tasks/java_home.yml b/roles/hdfs_prepare/tasks/java_home.yml index c21da290..1ec81ede 100644 --- a/roles/hdfs_prepare/tasks/java_home.yml +++ b/roles/hdfs_prepare/tasks/java_home.yml @@ -1,61 +1,61 @@ --- -- name: global_var | Initialize +- name: java_home | Initialize set_fact: scale_hdfs_nodes_list: [] scale_hdfs_namenodes_list: [] scale_hdfs_datanodes_list: [] -- name: global_var | Collect all HDFS NameNodes +- name: java_home | Collect all HDFS NameNodes set_fact: scale_hdfs_namenodes_list: "{{ item.namenodes | unique }}" delegate_to: localhost run_once: true -- name: global_var | Collect all HDFS DataNodes +- name: java_home | Collect all HDFS DataNodes set_fact: scale_hdfs_datanodes_list: "{{ item.datanodes | unique }}" delegate_to: localhost run_once: true -- name: global_var | Get HDFS nodes +- name: java_home | Get HDFS nodes set_fact: scale_hdfs_nodes_list: "{{ scale_hdfs_namenodes_list + scale_hdfs_datanodes_list }}" -- name: global_var | make unique HDFS nodes +- name: java_home | make unique HDFS nodes set_fact: scale_hdfs_nodes_list: "{{ scale_hdfs_nodes_list | unique }}" -- name: check | Check if atleast one hdfs node is configured +- name: java_home | Check if atleast one hdfs node is configured assert: that: - scale_hdfs_nodes_list|length > 0 fail_msg: "No hdfs nodes configured" -- name: check | Fetch JAVA_HOME path +- name: java_home | Fetch JAVA_HOME path shell: echo $JAVA_HOME register: java_path when: ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list -- name: check | Check JAVA_HOME path exist +- name: java_home | Check JAVA_HOME path exist stat: path: "{{ java_path.stdout }}" register: java_path_details when: ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list -- name: check | Assert JAVA_HOME path exist +- name: java_home | Assert JAVA_HOME path exist assert: that: - java_path_details.stat.exists fail_msg: The JAVA_HOME path does not exists ! when: ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list -- name: check | Set path of JAVA_HOME +- name: java_home | Set path of JAVA_HOME set_fact: javahome_path: "{{ java_path.stdout }}" when: - ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list -- name: check | verify JAVA +- name: java_home | verify JAVA command: "ls {{ javahome_path }}/bin/java" register: jvm_list when: @@ -68,51 +68,14 @@ - ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list - jvm_list.rc != 0 -- name: check | Fetch hdfs extracted tar - set_fact: - hdfs_dependency_jars_dir: "hadoop-3.1.4" - -- name: Check and fetch gpfs.hdfs-protocol version +- name: java_home | Check and fetch gpfs.hdfs-protocol version shell: "rpm -q gpfs.hdfs-protocol --qf %{VERSION}-%{RELEASE}" register: gpfs_hdfs_protocol_version when: - ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list - - transparency_322_enabled|bool ignore_errors: true + failed_when: false - debug: msg: "gpfs_hdfs_protocol_version: {{ gpfs_hdfs_protocol_version}}" -- name: Check gpfs.hdfs-protocol version for standalone installation - fail: - msg: > - "Standalone installation of gpfs.hdfs-protocol version is not supported. It can only be upgraded" - " from gpfs.hdfs-protocol version 3.2.2-5. For additional information, refer to the documentation available at the following link:" - " https://www.ibm.com/docs/en/storage-scale-bda?topic=hdfs-setup-transparency-cluster." - when: - - ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list - - transparency_322_enabled|bool - - gpfs_hdfs_protocol_version.rc == 0 - - gpfs_hdfs_protocol_version.stdout_lines[0] < '3.2.2-5' - -- debug: - msg: "hdfs_dependency_jars_dir: {{ hdfs_dependency_jars_dir }}" - -- name: check | verify dependency jars - command: "ls /opt/hadoop/jars/{{ hdfs_dependency_jars_dir }}" - register: dep_jars - when: - - ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list - - transparency_322_enabled|bool == False - - transparency_336_enabled|bool == False - -- fail: - msg: > - "Dependency jars not exist in /opt/hadoop/jars directory, which are essential prerequisites, For further details, " - "please consult the documentation via the following link: https://www.ibm.com/docs/en/storage-scale-bda?topic=hdfs-setup" - when: - - ansible_fqdn in scale_hdfs_nodes_list or inventory_hostname in scale_hdfs_nodes_list - - transparency_322_enabled|bool == False - - transparency_336_enabled|bool == False - - dep_jars.rc != 0 - diff --git a/roles/hdfs_prepare/tasks/prepare_env.yml b/roles/hdfs_prepare/tasks/prepare_env.yml deleted file mode 100644 index a42ed6af..00000000 --- a/roles/hdfs_prepare/tasks/prepare_env.yml +++ /dev/null @@ -1,37 +0,0 @@ ---- -- name: - set_fact: - transparency_336_enabled: "False" - transparency_322_enabled: "False" - transparency_version_336: "False" - transparency_version_322: "False" - -- name: - shell: "echo $SCALE_HDFS_TRANSPARENCY_VERSION_336_ENABLE" - register: transparency_version_336 - delegate_to: localhost - run_once: true - -- name: - shell: "echo $SCALE_HDFS_TRANSPARENCY_VERSION_322_ENABLE" - register: transparency_version_322 - delegate_to: localhost - run_once: true - -- name: - set_fact: - transparency_336_enabled: "{{ transparency_version_336.stdout|bool }}" - when: - - transparency_version_336.stdout is defined - - transparency_version_336.stdout|bool - delegate_to: localhost - run_once: true - -- name: - set_fact: - transparency_322_enabled: "{{ transparency_version_322.stdout|bool }}" - when: - - transparency_version_322.stdout is defined - - transparency_version_322.stdout|bool - delegate_to: localhost - run_once: true diff --git a/roles/nfs_install/tasks/install_repository.yml b/roles/nfs_install/tasks/install_repository.yml index a0fa71c3..b7e31170 100644 --- a/roles/nfs_install/tasks/install_repository.yml +++ b/roles/nfs_install/tasks/install_repository.yml @@ -13,6 +13,11 @@ set_fact: scale_nfs_url: 'ganesha_rpms/rhel9/' when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' + +- name: install | nfs path + set_fact: + scale_nfs_url: 'ganesha_rpms/rhel10/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '10' - name: install | nfs path set_fact: diff --git a/roles/perfmon_install/tasks/install_local_pkg.yml b/roles/perfmon_install/tasks/install_local_pkg.yml index ba944101..7159b977 100644 --- a/roles/perfmon_install/tasks/install_local_pkg.yml +++ b/roles/perfmon_install/tasks/install_local_pkg.yml @@ -126,6 +126,11 @@ scale_zimon_url: 'zimon_rpms/rhel9/' when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' +- name: install | zimon path + set_fact: + scale_zimon_url: 'zimon_rpms/rhel10/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '10' + - name: install | zimon path set_fact: scale_zimon_url: 'zimon_debs/ubuntu/ubuntu16/' diff --git a/roles/perfmon_install/tasks/install_remote_pkg.yml b/roles/perfmon_install/tasks/install_remote_pkg.yml index b1881d17..15f30245 100644 --- a/roles/perfmon_install/tasks/install_remote_pkg.yml +++ b/roles/perfmon_install/tasks/install_remote_pkg.yml @@ -100,6 +100,11 @@ scale_zimon_url: 'zimon_rpms/rhel9/' when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' +- name: install | zimon path + set_fact: + scale_zimon_url: 'zimon_rpms/rhel10/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '10' + - name: install | zimon path set_fact: scale_zimon_url: 'zimon_debs/ubuntu/ubuntu16/' diff --git a/roles/perfmon_install/tasks/install_repository.yml b/roles/perfmon_install/tasks/install_repository.yml index fdcae457..5538d131 100644 --- a/roles/perfmon_install/tasks/install_repository.yml +++ b/roles/perfmon_install/tasks/install_repository.yml @@ -24,6 +24,11 @@ scale_zimon_url: 'zimon_rpms/rhel9/' when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' +- name: install | zimon path + set_fact: + scale_zimon_url: 'zimon_rpms/rhel10/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '10' + - name: install | zimon path set_fact: scale_zimon_url: 'zimon_debs/ubuntu/ubuntu16/' diff --git a/roles/s3_install/tasks/install_local_pkg.yml b/roles/s3_install/tasks/install_local_pkg.yml index 27606923..c5115c79 100644 --- a/roles/s3_install/tasks/install_local_pkg.yml +++ b/roles/s3_install/tasks/install_local_pkg.yml @@ -111,10 +111,10 @@ # Find s3 rpms - block: ## when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution - - name: install | Find noobaa-core (noobaa-core) package + - name: install | Find s3 (noobaa-core, gpfs.mms3) package find: paths: "{{ s3_extracted_path }}/{{ scale_s3_url }}" - patterns: noobaa-core* + patterns: noobaa-core*,gpfs.mms3* register: scale_install_gpfs_s3 - name: install | Check valid (noobaa-core) package diff --git a/roles/s3_install/tasks/install_remote_pkg.yml b/roles/s3_install/tasks/install_remote_pkg.yml index 56227dc2..b140ffe0 100644 --- a/roles/s3_install/tasks/install_remote_pkg.yml +++ b/roles/s3_install/tasks/install_remote_pkg.yml @@ -84,10 +84,10 @@ # Find s3 rpms - block: ## when: ansible_distribution in scale_rhel_distribution or ansible_distribution in scale_sles_distribution - - name: install | Find noobaa-core (noobaa-core) package + - name: install | Find s3 (noobaa-core, gpfs.mms3) package find: paths: "{{ s3_extracted_path }}/{{ scale_s3_url }}" - patterns: noobaa-core* + patterns: noobaa-core*,gpfs.mms3* register: scale_install_gpfs_s3 - name: install | Check valid noobaa-core (noobaa-core) package diff --git a/roles/s3_install/tasks/install_repository.yml b/roles/s3_install/tasks/install_repository.yml index 9174e910..fbca776d 100644 --- a/roles/s3_install/tasks/install_repository.yml +++ b/roles/s3_install/tasks/install_repository.yml @@ -9,6 +9,11 @@ scale_s3_url: 's3_rpms/rhel9/' when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' +- name: install | s3 path + set_fact: + scale_s3_url: 's3_rpms/rhel10/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '10' + - name: install | Configure s3 YUM repository yum_repository: name: spectrum-scale-s3 diff --git a/roles/smb_install/tasks/install_repository.yml b/roles/smb_install/tasks/install_repository.yml index d7db02e2..fcbb5c03 100644 --- a/roles/smb_install/tasks/install_repository.yml +++ b/roles/smb_install/tasks/install_repository.yml @@ -14,6 +14,11 @@ scale_smb_url: 'smb_rpms/rhel9/' when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '9' +- name: install | smb path + set_fact: + scale_smb_url: 'smb_rpms/rhel10/' + when: ansible_distribution in scale_rhel_distribution and ansible_distribution_major_version == '10' + - name: install | smb path set_fact: scale_smb_url: 'smb_rpms/sles12/' diff --git a/samples/playbook_directory_upgrade.yml b/samples/playbook_directory_upgrade.yml new file mode 100755 index 00000000..fc2cc30a --- /dev/null +++ b/samples/playbook_directory_upgrade.yml @@ -0,0 +1,66 @@ +--- +# +# samples/playbook_directory_upgrade.yml +# + +# Playbook sample for deploying IBM Spectrum Scale (GPFS) cluster using the +# directory installation method. You need to keep all required Spectrum Scale +# packages in a single user-provided directory. + +# Note that specifying the variable 'scale_version' is *not* required for this +# installation method. + +# Note: This playbook can only help to upgrade core gpfs package , it doesn't support +# any other component package upgrade. if you have any futher query , please open +# an issue before running the upgrade + + +- hosts: cluster01 + collections: + - ibm.spectrum_scale + pre_tasks: + - name: cluster | Shutdown gpfs node + command: /usr/lpp/mmfs/bin/mmshutdown -N {{ inventory_hostname }} + register: shutdown_gpfs_cluster + when: scale_shutdownstartup_needed is defined and scale_shutdownstartup_needed | bool + - debug: + msg: "{{shutdown_gpfs_cluster}}" + vars: + - scale_version: 5.2.2.1 + - scale_install_directory_pkg_path: /usr/lpp/mmfs/5.2.2.1/gpfs_debs + roles: + - core_upgrade + post_tasks: + - name: cluster | Startup gpfs node + command: /usr/lpp/mmfs/bin/mmstartup -N {{ inventory_hostname }} + register: startup_gpfs_cluster + when: scale_shutdownstartup_needed is defined and scale_shutdownstartup_needed | bool + + - debug: + msg: "{{startup_gpfs_cluster}}" + +- hosts: cluster01 + collections: + - ibm.spectrum_scale + vars: + - scale_shutdownstartup_needed: true + vars: + - scale_version: 5.2.2.1 + - scale_install_directory_pkg_path: /opt/efix1 + pre_tasks: + - name: cluster | Shutdown gpfs node + command: /usr/lpp/mmfs/bin/mmshutdown -N {{ inventory_hostname }} + register: shutdown_gpfs_cluster + when: scale_shutdownstartup_needed is defined and scale_shutdownstartup_needed | bool + - debug: + msg: "{{shutdown_gpfs_cluster}}" + roles: + - core_upgrade + post_tasks: + - name: cluster | Startup gpfs node + command: /usr/lpp/mmfs/bin/mmstartup -N {{ inventory_hostname }} + register: startup_gpfs_cluster + when: scale_shutdownstartup_needed is defined and scale_shutdownstartup_needed | bool + + - debug: + msg: "{{startup_gpfs_cluster}}"