|
| 1 | +#!/usr/bin/env python3 |
| 2 | + |
| 3 | +import argparse |
| 4 | +import os |
| 5 | +import subprocess |
| 6 | + |
| 7 | +from jinja2 import Template |
| 8 | + |
| 9 | +from polaris.config import PolarisConfigParser |
| 10 | +from polaris.io import download |
| 11 | +from polaris.job import _clean_up_whitespace, get_slurm_options |
| 12 | + |
| 13 | + |
| 14 | +def make_build_script(machine, compiler, branch, build_only, mesh_filename, |
| 15 | + debug, clean, cmake_flags): |
| 16 | + """ |
| 17 | + Make a shell script for checking out Omega and its submodules, building |
| 18 | + Omega and its ctests, linking to testing data files, and running ctests. |
| 19 | + """ |
| 20 | + |
| 21 | + polaris_source_dir = os.environ['POLARIS_BRANCH'] |
| 22 | + metis_root = os.environ['METIS_ROOT'] |
| 23 | + parmetis_root = os.environ['PARMETIS_ROOT'] |
| 24 | + |
| 25 | + build_dir = f'build_{machine}_{compiler}' |
| 26 | + |
| 27 | + branch = os.path.abspath(branch) |
| 28 | + omega_submodule = os.path.join(polaris_source_dir, 'e3sm_submodules/Omega') |
| 29 | + update_omega_submodule = (branch == omega_submodule) |
| 30 | + |
| 31 | + this_dir = os.path.realpath( |
| 32 | + os.path.join(os.getcwd(), os.path.dirname(__file__))) |
| 33 | + |
| 34 | + template_filename = os.path.join(this_dir, 'build_and_ctest.template') |
| 35 | + |
| 36 | + with open(template_filename, 'r', encoding='utf-8') as f: |
| 37 | + template = Template(f.read()) |
| 38 | + |
| 39 | + if debug: |
| 40 | + build_type = 'Debug' |
| 41 | + else: |
| 42 | + build_type = 'Release' |
| 43 | + |
| 44 | + if cmake_flags is None: |
| 45 | + cmake_flags = '' |
| 46 | + |
| 47 | + script = template.render(update_omega_submodule=update_omega_submodule, |
| 48 | + polaris_source_dir=polaris_source_dir, |
| 49 | + omega_base_dir=branch, |
| 50 | + build_dir=build_dir, |
| 51 | + machine=machine, |
| 52 | + compiler=compiler, |
| 53 | + metis_root=metis_root, |
| 54 | + parmetis_root=parmetis_root, |
| 55 | + omega_mesh_filename=mesh_filename, |
| 56 | + run_ctest=(not build_only), |
| 57 | + build_type=build_type, |
| 58 | + clean=clean, |
| 59 | + cmake_flags=cmake_flags) |
| 60 | + |
| 61 | + script = _clean_up_whitespace(script) |
| 62 | + |
| 63 | + build_omega_dir = os.path.abspath('build_omega') |
| 64 | + os.makedirs(build_omega_dir, exist_ok=True) |
| 65 | + |
| 66 | + if build_only: |
| 67 | + script_filename = f'build_omega_{machine}_{compiler}.sh' |
| 68 | + else: |
| 69 | + script_filename = f'build_and_ctest_omega_{machine}_{compiler}.sh' |
| 70 | + |
| 71 | + script_filename = os.path.join(build_omega_dir, script_filename) |
| 72 | + |
| 73 | + with open(script_filename, 'w', encoding='utf-8') as f: |
| 74 | + f.write(script) |
| 75 | + |
| 76 | + return script_filename |
| 77 | + |
| 78 | + |
| 79 | +def download_mesh(config): |
| 80 | + """ |
| 81 | + Download and symlink a mesh to use for testing. |
| 82 | + """ |
| 83 | + base_url = config.get('download', 'server_base_url') |
| 84 | + database_root = config.get('paths', 'database_root') |
| 85 | + |
| 86 | + filepath = 'ocean/polaris_cache/global_convergence/icos/cosine_bell/' \ |
| 87 | + 'Icos480/mesh/mesh.230220.nc' |
| 88 | + |
| 89 | + url = f'{base_url}/{filepath}' |
| 90 | + download_path = os.path.join(database_root, filepath) |
| 91 | + download_target = download(url, download_path, config) |
| 92 | + return download_target |
| 93 | + |
| 94 | + |
| 95 | +def write_job_script(config, machine, compiler, submit): |
| 96 | + """ |
| 97 | + Write a job script for running the build script |
| 98 | + """ |
| 99 | + |
| 100 | + if config.has_option('parallel', 'account'): |
| 101 | + account = config.get('parallel', 'account') |
| 102 | + else: |
| 103 | + account = '' |
| 104 | + |
| 105 | + nodes = 1 |
| 106 | + |
| 107 | + partition, qos, constraint, _ = get_slurm_options( |
| 108 | + config, machine, nodes) |
| 109 | + |
| 110 | + wall_time = '0:15:00' |
| 111 | + |
| 112 | + # see if we can find a debug partition |
| 113 | + if config.has_option('parallel', 'partitions'): |
| 114 | + partition_list = config.getlist('parallel', 'partitions') |
| 115 | + for partition_local in partition_list: |
| 116 | + if 'debug' in partition_local: |
| 117 | + partition = partition_local |
| 118 | + break |
| 119 | + |
| 120 | + # see if we can find a debug qos |
| 121 | + if config.has_option('parallel', 'qos'): |
| 122 | + qos_list = config.getlist('parallel', 'qos') |
| 123 | + for qos_local in qos_list: |
| 124 | + if 'debug' in qos_local: |
| 125 | + qos = qos_local |
| 126 | + break |
| 127 | + |
| 128 | + job_name = f'omega_ctest_{machine}_{compiler}' |
| 129 | + |
| 130 | + this_dir = os.path.realpath( |
| 131 | + os.path.join(os.getcwd(), os.path.dirname(__file__))) |
| 132 | + template_filename = os.path.join(this_dir, 'job_script.template') |
| 133 | + |
| 134 | + with open(template_filename, 'r', encoding='utf-8') as f: |
| 135 | + template = Template(f.read()) |
| 136 | + |
| 137 | + build_dir = os.path.abspath( |
| 138 | + os.path.join('build_omega', f'build_{machine}_{compiler}')) |
| 139 | + |
| 140 | + script = template.render(job_name=job_name, account=account, |
| 141 | + nodes=f'{nodes}', wall_time=wall_time, qos=qos, |
| 142 | + partition=partition, constraint=constraint, |
| 143 | + build_dir=build_dir) |
| 144 | + script = _clean_up_whitespace(script) |
| 145 | + |
| 146 | + build_omega_dir = os.path.abspath('build_omega') |
| 147 | + script_filename = f'job_build_and_ctest_omega_{machine}_{compiler}.sh' |
| 148 | + script_filename = os.path.join(build_omega_dir, script_filename) |
| 149 | + |
| 150 | + with open(script_filename, 'w', encoding='utf-8') as f: |
| 151 | + f.write(script) |
| 152 | + |
| 153 | + if submit: |
| 154 | + args = ['sbatch', script_filename] |
| 155 | + print(f'\nRunning:\n {" ".join(args)}\n') |
| 156 | + subprocess.run(args=args, check=True) |
| 157 | + |
| 158 | + |
| 159 | +def main(): |
| 160 | + """ |
| 161 | + Main function for building Omega and performing ctests |
| 162 | + """ |
| 163 | + parser = argparse.ArgumentParser( |
| 164 | + description='Check out submodules, build Omega and run ctest') |
| 165 | + parser.add_argument('-o', '--omega_branch', dest='omega_branch', |
| 166 | + default='e3sm_submodules/Omega', |
| 167 | + help='The local Omega branch to test.') |
| 168 | + parser.add_argument('-c', '--clean', dest='clean', action='store_true', |
| 169 | + help='Whether to remove the build directory and start ' |
| 170 | + 'fresh') |
| 171 | + parser.add_argument('-s', '--submit', dest='submit', action='store_true', |
| 172 | + help='Whether to submit a job to run ctests') |
| 173 | + parser.add_argument('-d', '--debug', dest='debug', action='store_true', |
| 174 | + help='Whether to only build Omega in debug mode') |
| 175 | + parser.add_argument('--cmake_flags', dest='cmake_flags', |
| 176 | + help='Quoted string with additional cmake flags') |
| 177 | + |
| 178 | + args = parser.parse_args() |
| 179 | + |
| 180 | + machine = os.environ['POLARIS_MACHINE'] |
| 181 | + compiler = os.environ['POLARIS_COMPILER'] |
| 182 | + |
| 183 | + config = PolarisConfigParser() |
| 184 | + config.add_from_package('polaris', 'default.cfg') |
| 185 | + config.add_from_package('mache.machines', f'{machine}.cfg') |
| 186 | + config.add_from_package('polaris.machines', f'{machine}.cfg') |
| 187 | + |
| 188 | + submit = args.submit |
| 189 | + branch = args.omega_branch |
| 190 | + debug = args.debug |
| 191 | + clean = args.clean |
| 192 | + cmake_flags = args.cmake_flags |
| 193 | + |
| 194 | + if 'SLURM_JOB_ID' in os.environ: |
| 195 | + # already on a comptue node so we will just run ctests directly |
| 196 | + submit = False |
| 197 | + else: |
| 198 | + build_only = True |
| 199 | + |
| 200 | + mesh_filename = download_mesh(config=config) |
| 201 | + |
| 202 | + script_filename = make_build_script(machine=machine, compiler=compiler, |
| 203 | + branch=branch, build_only=build_only, |
| 204 | + mesh_filename=mesh_filename, |
| 205 | + debug=debug, clean=clean, |
| 206 | + cmake_flags=cmake_flags) |
| 207 | + |
| 208 | + # clear environment variables and start fresh with those from login |
| 209 | + # so spack doesn't get confused by conda |
| 210 | + subprocess.check_call(f'env -i HOME="$HOME" bash -l {script_filename}', |
| 211 | + shell=True) |
| 212 | + |
| 213 | + write_job_script(config=config, machine=machine, compiler=compiler, |
| 214 | + submit=submit) |
| 215 | + |
| 216 | + |
| 217 | +if __name__ == '__main__': |
| 218 | + main() |
0 commit comments