From 1efbc5a4302e2d5f8eb48ffebd1788f00ed40e24 Mon Sep 17 00:00:00 2001 From: Henry LE BERRE Date: Sun, 27 Aug 2023 12:51:07 -0700 Subject: [PATCH 1/3] mfc.sh: sched.py refactor --- toolchain/mfc/sched.py | 28 ++++++++++++++++++++++------ toolchain/mfc/test/case.py | 4 ++-- toolchain/mfc/test/test.py | 24 ++++++++---------------- 3 files changed, 32 insertions(+), 24 deletions(-) diff --git a/toolchain/mfc/sched.py b/toolchain/mfc/sched.py index 3966a43c3..71ad990f7 100644 --- a/toolchain/mfc/sched.py +++ b/toolchain/mfc/sched.py @@ -21,8 +21,10 @@ def run(self): @dataclasses.dataclass class WorkerThreadHolder: - thread: threading.Thread - ppn: int + thread: threading.Thread + ppn: int + load: float + devices: typing.Set[int] @dataclasses.dataclass @@ -30,12 +32,14 @@ class Task: ppn: int func: typing.Callable args: typing.List[typing.Any] + load: float -def sched(tasks: typing.List[Task], nThreads: int): +def sched(tasks: typing.List[Task], nThreads: int, devices: typing.Set[int]) -> None: nAvailable: int = nThreads threads: typing.List[WorkerThreadHolder] = [] + sched.LOAD = { id: 0.0 for id in devices } def join_first_dead_thread(progress, complete_tracker) -> None: nonlocal threads, nAvailable @@ -46,6 +50,9 @@ def join_first_dead_thread(progress, complete_tracker) -> None: raise threadHolder.thread.exc nAvailable += threadHolder.ppn + for device in threadHolder.devices: + sched.LOAD[device] -= threadHolder.load / threadHolder.ppn + progress.advance(complete_tracker) del threads[threadID] @@ -75,11 +82,18 @@ def join_first_dead_thread(progress, complete_tracker) -> None: # Launch Thread progress.advance(queue_tracker) - thread = WorkerThread(target=task.func, args=tuple(task.args)) + # Use the least loaded devices + devices = set() + for _ in range(task.ppn): + device = min(sched.LOAD.items(), key=lambda x: x[1])[0] + sched.LOAD[device] += task.load / task.ppn + + nAvailable -= task.ppn + + thread = WorkerThread(target=task.func, args=tuple(task.args) + (devices,)) thread.start() - threads.append(WorkerThreadHolder(thread, task.ppn)) - nAvailable -= task.ppn + threads.append(WorkerThreadHolder(thread, task.ppn, task.load, devices)) # Wait for the lasts tests to complete @@ -89,3 +103,5 @@ def join_first_dead_thread(progress, complete_tracker) -> None: # Do not overwhelm this core with this loop time.sleep(0.05) + +sched.LOAD = {} diff --git a/toolchain/mfc/test/case.py b/toolchain/mfc/test/case.py index dce20502c..adc868878 100644 --- a/toolchain/mfc/test/case.py +++ b/toolchain/mfc/test/case.py @@ -100,8 +100,8 @@ def __init__(self, trace: str, mods: dict, ppn: int = None) -> None: self.ppn = ppn if ppn is not None else 1 super().__init__({**BASE_CFG.copy(), **mods}) - def run(self, targets: typing.List[str], gpu: int) -> subprocess.CompletedProcess: - gpu_select = f"CUDA_VISIBLE_DEVICES={gpu}" + def run(self, targets: typing.List[str], gpus: typing.Set[int]) -> subprocess.CompletedProcess: + gpu_select = f"CUDA_VISIBLE_DEVICES={','.join([str(_) for _ in gpus])}" filepath = f'"{self.get_dirpath()}/case.py"' tasks = f"-n {self.ppn}" jobs = f"-j {ARG('jobs')}" if ARG("case_optimization") else "" diff --git a/toolchain/mfc/test/test.py b/toolchain/mfc/test/test.py index 527f8f7c1..a88eb8a7a 100644 --- a/toolchain/mfc/test/test.py +++ b/toolchain/mfc/test/test.py @@ -1,4 +1,4 @@ -import os, math, shutil +import os, math, typing, shutil from random import sample from ..printer import cons @@ -105,7 +105,6 @@ def test(): cons.print(f" tests/[bold magenta]UUID[/bold magenta] Summary") cons.print() - # Initialize GPU_LOAD to 0 for each GPU _handle_case.GPU_LOAD = { id: 0 for id in ARG("gpus") } # Select the correct number of threads to use to launch test CASES @@ -115,9 +114,9 @@ def test(): # engineer around this issue (for now). nThreads = ARG("jobs") if not ARG("case_optimization") else 1 tasks = [ - sched.Task(ppn=case.ppn, func=handle_case, args=[ case ]) for case in CASES + sched.Task(ppn=case.ppn, func=handle_case, args=[case], load=case.get_cell_count()) for case in CASES ] - sched.sched(tasks, nThreads) + sched.sched(tasks, nThreads, ARG("gpus")) cons.print() if nFAIL == 0: @@ -131,7 +130,7 @@ def test(): cons.unindent() -def _handle_case(test: TestCase): +def _handle_case(test: TestCase, devices: typing.Set[int]): if test.params.get("qbmm", 'F') == 'T': tol = 1e-10 elif test.params.get("bubbles", 'F') == 'T': @@ -144,11 +143,7 @@ def _handle_case(test: TestCase): test.delete_output() test.create_directory() - load = test.get_cell_count() - gpu_id = min(_handle_case.GPU_LOAD.items(), key=lambda x: x[1])[0] - _handle_case.GPU_LOAD[gpu_id] += load - - cmd = test.run(["pre_process", "simulation"], gpu=gpu_id) + cmd = test.run(["pre_process", "simulation"], gpus=devices) out_filepath = os.path.join(test.get_dirpath(), "out_pre_sim.txt") @@ -188,7 +183,7 @@ def _handle_case(test: TestCase): if ARG("test_all"): test.delete_output() - cmd = test.run(["pre_process", "simulation", "post_process"], gpu=gpu_id) + cmd = test.run(["pre_process", "simulation", "post_process"], gpus=devices) out_filepath = os.path.join(test.get_dirpath(), "out_post.txt") common.file_write(out_filepath, cmd.stdout) @@ -220,11 +215,8 @@ def _handle_case(test: TestCase): cons.print(f" [bold magenta]{test.get_uuid()}[/bold magenta] {test.trace}") - _handle_case.GPU_LOAD[gpu_id] -= load - -_handle_case.GPU_LOAD = {} -def handle_case(test: TestCase): +def handle_case(test: TestCase, devices: typing.Set[int]): global nFAIL nAttempts = 0 @@ -233,7 +225,7 @@ def handle_case(test: TestCase): nAttempts += 1 try: - _handle_case(test) + _handle_case(test, devices) except Exception as exc: if nAttempts < ARG("max_attempts"): cons.print(f"[bold yellow] Attempt {nAttempts}: Failed test {test.get_uuid()}. Retrying...[/bold yellow]") From 2b03dd1b2426ced3d2dc20e08dfc6b38a2697e5f Mon Sep 17 00:00:00 2001 From: Henry LE BERRE Date: Sun, 27 Aug 2023 18:06:26 -0700 Subject: [PATCH 2/3] mfc.sh: build refactor --- toolchain/mfc/args.py | 11 +- toolchain/mfc/bench.py | 14 +- toolchain/mfc/build.py | 372 +++++++++++++++++------------------ toolchain/mfc/run/engines.py | 92 ++++----- toolchain/mfc/run/input.py | 28 +-- toolchain/mfc/run/run.py | 32 +-- toolchain/mfc/test/test.py | 4 +- 7 files changed, 275 insertions(+), 278 deletions(-) diff --git a/toolchain/mfc/args.py b/toolchain/mfc/args.py index ef7bfa485..65f3adba0 100644 --- a/toolchain/mfc/args.py +++ b/toolchain/mfc/args.py @@ -1,6 +1,6 @@ import re, os.path, argparse, dataclasses -from .build import get_mfc_target_names, get_target_names, get_dependencies_names +from .build import TARGETS, DEFAULT_TARGETS, DEPENDENCY_TARGETS from .common import format_list_to_string from .test.test import CASES as TEST_CASES from .packer import packer @@ -44,8 +44,9 @@ def add_common_arguments(p, mask = None): mask = "" if "t" not in mask: - p.add_argument("-t", "--targets", metavar="TARGET", nargs="+", type=str.lower, choices=get_target_names(), - default=get_mfc_target_names(), help=f"Space separated list of targets to act upon. Allowed values are: {format_list_to_string(get_target_names())}.") + p.add_argument("-t", "--targets", metavar="TARGET", nargs="+", type=str.lower, choices=[ _.name for _ in TARGETS ], + default=[ _.name for _ in DEFAULT_TARGETS ], + help=f"Space separated list of targets to act upon. Allowed values are: {format_list_to_string([ _.name for _ in TARGETS ])}.") if "m" not in mask: for f in dataclasses.fields(config): @@ -61,8 +62,8 @@ def add_common_arguments(p, mask = None): p.add_argument("-v", "--verbose", action="store_true", help="Enables verbose compiler & linker output.") if "n" not in mask: - for name in get_dependencies_names(): - p.add_argument(f"--no-{name}", action="store_true", help=f"Do not build the {name} dependency. Use the system's instead.") + for target in DEPENDENCY_TARGETS: + p.add_argument(f"--no-{target.name}", action="store_true", help=f"Do not build the {target.name} dependency. Use the system's instead.") # === BUILD === add_common_arguments(build) diff --git a/toolchain/mfc/bench.py b/toolchain/mfc/bench.py index 55ba99f45..65bacbe90 100644 --- a/toolchain/mfc/bench.py +++ b/toolchain/mfc/bench.py @@ -4,12 +4,12 @@ from .printer import cons from .state import ARG -from .build import build_targets +from .build import PRE_PROCESS, SIMULATION, build_targets from .common import system, MFC_SUBDIR from . import sched def bench(): - build_targets(["pre_process", "simulation"]) + build_targets([PRE_PROCESS, SIMULATION]) cons.print("[bold]Benchmarking [magenta]simulation[/magenta]:[/bold]") cons.indent() @@ -21,7 +21,7 @@ def bench(): table.add_column("Case") table.add_column("(Simulation) Runtime (s)") - def __worker(case: str): + def __worker(case: str, devices: typing.Set[int]): nonlocal RESULTS system(["./mfc.sh", "run", f"examples/{case}/case.py", "--no-build", "-t", "pre_process"], stdout=subprocess.DEVNULL) @@ -39,11 +39,15 @@ def __worker(case: str): table.add_row(case, str(runtime)) tasks: typing.List[sched.Task] = [ - sched.Task(1, __worker, [ case ]) for case in CASES + sched.Task(1, __worker, [ case ], 1) for case in CASES ] cons.print() - sched.sched(tasks, 1 if ARG('case_optimization') else ARG('jobs')) + nThreads = min(ARG('jobs'), len(ARG('gpus'))) if ARG("gpu") else ARG('jobs') + if ARG('case_optimization'): + nThreads = 1 + + sched.sched(tasks, nThreads, ARG("gpus")) cons.print() cons.unindent() cons.print("[bold]Benchmark Results:[/bold]") diff --git a/toolchain/mfc/build.py b/toolchain/mfc/build.py index 577cde1fa..7505b03a9 100644 --- a/toolchain/mfc/build.py +++ b/toolchain/mfc/build.py @@ -1,8 +1,8 @@ import os, typing, dataclasses +from .common import MFCException, system, delete_directory, create_directory from .state import ARG, CFG from .printer import cons -from . import common from .run.input import MFCInputFile @@ -10,11 +10,11 @@ class MFCTarget: @dataclasses.dataclass class Dependencies: - all: typing.List[str] - cpu: typing.List[str] - gpu: typing.List[str] + all: typing.List + cpu: typing.List + gpu: typing.List - def compute(self) -> typing.List[str]: + def compute(self) -> typing.Set: r = self.all[:] r += self.gpu[:] if ARG("gpu") else self.cpu[:] @@ -26,74 +26,184 @@ def compute(self) -> typing.List[str]: isDefault: bool # Should it be built by default? (unspecified -t | --targets) isRequired: bool # Should it always be built? (no matter what -t | --targets is) requires: Dependencies # Build dependencies of the target + + def __hash__(self) -> int: + return hash(self.name) + + # Get path to directory that will store the build files + def get_build_dirpath(self) -> str: + return os.sep.join([ + os.getcwd(), + "build", + [CFG().make_slug(), 'dependencies'][int(self.isDependency)], + self.name + ]) + + # Get the directory that contains the target's CMakeLists.txt + def get_cmake_dirpath(self) -> str: + # The CMakeLists.txt file is located: + # * Regular: /CMakelists.txt + # * Dependency: /toolchain/dependencies/CMakelists.txt + return os.sep.join([ + os.getcwd(), + os.sep.join(["toolchain", "dependencies"]) if self.isDependency else "", + ]) + + def get_install_dirpath(self) -> str: + # The install directory is located: + # Regular: /build/install/ + # Dependency: /build/install/dependencies (shared) + return os.sep.join([ + os.getcwd(), + "build", + "install", + 'dependencies' if self.isDependency else CFG().make_slug() + ]) + + def get_install_binpath(self) -> str: + # /install//bin/ + return os.sep.join([self.get_install_dirpath(), "bin", self.name]) + + def is_configured(self) -> bool: + # We assume that if the CMakeCache.txt file exists, then the target is + # configured. (this isn't perfect, but it's good enough for now) + return os.path.isfile( + os.sep.join([self.get_build_dirpath(), "CMakeCache.txt"]) + ) + + def build(self, history: typing.Set[str] = None): + if history is None: + history = set() + + if self.name in history: + return + + history.add(self.name) + + build_targets(REQUIRED_TARGETS, history) + + cons.print(f"[bold]Building [magenta]{self.name}[/magenta]:[/bold]") + cons.indent() + + if ARG("no_build"): + cons.print("--no-build specified, skipping...") + cons.unindent() + return + + if self.isDependency and ARG(f"no_{self.name}"): + cons.print(f"--no-{self.name} given, skipping...") + cons.unindent() + return + + build_dirpath = self.get_build_dirpath() + cmake_dirpath = self.get_cmake_dirpath() + install_dirpath = self.get_install_dirpath() + + install_prefixes = ';'.join([install_dirpath, get_dependency_install_dirpath()]) + + flags: list = self.flags.copy() + [ + # Disable CMake warnings intended for developers (us). + # See: https://cmake.org/cmake/help/latest/manual/cmake.1.html. + f"-Wno-dev", + # Save a compile_commands.json file with the compile commands used to + # build the configured targets. This is mostly useful for debugging. + # See: https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_COMPILE_COMMANDS.html. + f"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", + # Set build type (e.g Debug, Release, etc.). + # See: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html + f"-DCMAKE_BUILD_TYPE={'Debug' if ARG('debug') else 'Release'}", + # Used by FIND_PACKAGE (/FindXXX) to search for packages, with the + # second heighest level of priority, still letting users manually + # specify _ROOT, which has precedence over CMAKE_PREFIX_PATH. + # See: https://cmake.org/cmake/help/latest/command/find_package.html. + f"-DCMAKE_PREFIX_PATH={install_prefixes}", + # First directory that FIND_LIBRARY searches. + # See: https://cmake.org/cmake/help/latest/command/find_library.html. + f"-DCMAKE_FIND_ROOT_PATH={install_prefixes}", + # Location prefix to install bin/, lib/, include/, etc. + # See: https://cmake.org/cmake/help/latest/command/install.html. + f"-DCMAKE_INSTALL_PREFIX={install_dirpath}", + ] + + if not self.isDependency: + flags.append(f"-DMFC_MPI={ 'ON' if ARG('mpi') else 'OFF'}") + flags.append(f"-DMFC_OpenACC={'ON' if ARG('gpu') else 'OFF'}") + + configure = ["cmake"] + flags + ["-S", cmake_dirpath, "-B", build_dirpath] + build = ["cmake", "--build", build_dirpath, + "--target", self.name, + "-j", ARG("jobs"), + "--config", 'Debug' if ARG('debug') else 'Release'] + if ARG('verbose'): + build.append("--verbose") + + install = ["cmake", "--install", build_dirpath] + + if not self.is_configured(): + build_targets(self.requires.compute(), history) + + delete_directory(build_dirpath) + create_directory(build_dirpath) + + if system(configure, no_exception=True) != 0: + raise MFCException(f"Failed to configure the [bold magenta]{self.name}[/bold magenta] target.") + + if not self.isDependency and ARG("command") == "build": + MFCInputFile("", "", {}).generate(self, bOnlyFPPs = True) + + system(build, exception_text=f"Failed to build the [bold magenta]{self.name}[/bold magenta] target.") + system(install, exception_text=f"Failed to install the [bold magenta]{self.name}[/bold magenta] target.") + + cons.print(no_indent=True) + cons.unindent() + def clean(self): + cons.print(f"[bold]Cleaning [magenta]{self.name}[/magenta]:[/bold]") + cons.indent() -FFTW = MFCTarget('fftw', ['-DMFC_FFTW=ON'], True, False, False, MFCTarget.Dependencies([], [], [])) -HDF5 = MFCTarget('hdf5', ['-DMFC_HDF5=ON'], True, False, False, MFCTarget.Dependencies([], [], [])) -SILO = MFCTarget('silo', ['-DMFC_SILO=ON'], True, False, False, MFCTarget.Dependencies(["hdf5"], [], [])) -PRE_PROCESS = MFCTarget('pre_process', ['-DMFC_PRE_PROCESS=ON'], False, True, False, MFCTarget.Dependencies([], [], [])) -SIMULATION = MFCTarget('simulation', ['-DMFC_SIMULATION=ON'], False, True, False, MFCTarget.Dependencies([], ["fftw"], [])) -POST_PROCESS = MFCTarget('post_process', ['-DMFC_POST_PROCESS=ON'], False, True, False, MFCTarget.Dependencies(['fftw', 'silo'], [], [])) -SYSCHECK = MFCTarget('syscheck', ['-DMFC_SYSCHECK=ON'], False, False, True, MFCTarget.Dependencies([], [], [])) -DOCUMENTATION = MFCTarget('documentation', ['-DMFC_DOCUMENTATION=ON'], False, False, False, MFCTarget.Dependencies([], [], [])) - -TARGETS: typing.List[MFCTarget] = [ FFTW, HDF5, SILO, PRE_PROCESS, SIMULATION, POST_PROCESS, SYSCHECK, DOCUMENTATION ] - -def get_mfc_target_names() -> typing.List[str]: - return [ target.name for target in TARGETS if target.isDefault ] - - -def get_dependencies_names() -> typing.List[str]: - return [ target.name for target in TARGETS if target.isDependency ] - + build_dirpath = self.get_build_dirpath() -def get_required_target_names() -> typing.List[str]: - return [ target.name for target in TARGETS if target.isRequired ] + if not os.path.isdir(build_dirpath): + cons.print("Target not configured. Nothing to clean.") + cons.unindent() + return + clean = ["cmake", "--build", build_dirpath, "--target", "clean", + "--config", "Debug" if ARG("debug") else "Release" ] -def get_target_names() -> typing.List[str]: - return [ target.name for target in TARGETS ] + if ARG("verbose"): + clean.append("--verbose") + system(clean, exception_text=f"Failed to clean the [bold magenta]{self.name}[/bold magenta] target.") -def get_target(name: str) -> MFCTarget: - for target in TARGETS: - if target.name == name: - return target + cons.unindent() - raise common.MFCException(f"Target '{name}' does not exist.") +FFTW = MFCTarget('fftw', ['-DMFC_FFTW=ON'], True, False, False, MFCTarget.Dependencies([], [], [])) +HDF5 = MFCTarget('hdf5', ['-DMFC_HDF5=ON'], True, False, False, MFCTarget.Dependencies([], [], [])) +SILO = MFCTarget('silo', ['-DMFC_SILO=ON'], True, False, False, MFCTarget.Dependencies([HDF5], [], [])) +PRE_PROCESS = MFCTarget('pre_process', ['-DMFC_PRE_PROCESS=ON'], False, True, False, MFCTarget.Dependencies([], [], [])) +SIMULATION = MFCTarget('simulation', ['-DMFC_SIMULATION=ON'], False, True, False, MFCTarget.Dependencies([], [FFTW], [])) +POST_PROCESS = MFCTarget('post_process', ['-DMFC_POST_PROCESS=ON'], False, True, False, MFCTarget.Dependencies([FFTW, SILO], [], [])) +SYSCHECK = MFCTarget('syscheck', ['-DMFC_SYSCHECK=ON'], False, False, True, MFCTarget.Dependencies([], [], [])) +DOCUMENTATION = MFCTarget('documentation', ['-DMFC_DOCUMENTATION=ON'], False, False, False, MFCTarget.Dependencies([], [], [])) -# Get path to directory that will store the build files -def get_build_dirpath(target: MFCTarget) -> str: - return os.sep.join([ - os.getcwd(), - "build", - [CFG().make_slug(), 'dependencies'][int(target.isDependency)], - target.name - ]) +TARGETS = { FFTW, HDF5, SILO, PRE_PROCESS, SIMULATION, POST_PROCESS, SYSCHECK, DOCUMENTATION } +DEFAULT_TARGETS = { target for target in TARGETS if target.isDefault } +REQUIRED_TARGETS = { target for target in TARGETS if target.isRequired } +DEPENDENCY_TARGETS = { target for target in TARGETS if target.isDependency } -# Get the directory that contains the target's CMakeLists.txt -def get_cmake_dirpath(target: MFCTarget) -> str: - # The CMakeLists.txt file is located: - # * Regular: /CMakelists.txt - # * Dependency: /toolchain/dependencies/CMakelists.txt - return os.sep.join([ - os.getcwd(), - os.sep.join(["toolchain", "dependencies"]) if target.isDependency else "", - ]) +TARGET_MAP = { target.name: target for target in TARGETS } +def get_target(target: typing.Union[str, MFCTarget]) -> MFCTarget: + if isinstance(target, MFCTarget): + return target + + if target in TARGET_MAP: + return TARGET_MAP[target] -def get_install_dirpath(target: MFCTarget) -> str: - # The install directory is located: - # Regular: /build/install/ - # Dependency: /build/install/dependencies (shared) - return os.sep.join([ - os.getcwd(), - "build", - "install", - 'dependencies' if target.isDependency else CFG().make_slug() - ]) + raise MFCException(f"Target '{target}' does not exist.") def get_dependency_install_dirpath() -> str: @@ -101,146 +211,28 @@ def get_dependency_install_dirpath() -> str: # the install directory of the first dependency we find. for target in TARGETS: if target.isDependency: - return get_install_dirpath(target) - - raise common.MFCException("No dependency target found.") - - -def is_target_configured(target: MFCTarget) -> bool: - # We assume that if the CMakeCache.txt file exists, then the target is - # configured. (this isn't perfect, but it's good enough for now) - return os.path.isfile( - os.sep.join([get_build_dirpath(target), "CMakeCache.txt"]) - ) + return target.get_install_dirpath() + raise MFCException("No dependency target found.") -def clean_target(name: str): - cons.print(f"[bold]Cleaning [magenta]{name}[/magenta]:[/bold]") - cons.indent() - target = get_target(name) - - build_dirpath = get_build_dirpath(target) - - if not os.path.isdir(build_dirpath): - cons.print("Target not configured. Nothing to clean.") - cons.unindent() - return - - clean = ["cmake", "--build", build_dirpath, "--target", "clean", - "--config", "Debug" if ARG("debug") else "Release" ] - - if ARG("verbose"): - clean.append("--verbose") +def build_targets(targets: typing.Iterable[typing.Union[MFCTarget, str]], history: typing.Set[str] = None): + if history is None: + history = set() + + for target in targets: + get_target(target).build(history) - common.system(clean, exception_text=f"Failed to clean the [bold magenta]{name}[/bold magenta] target.") - cons.unindent() +def clean_targets(targets: typing.Iterable[typing.Union[MFCTarget, str]]): + for target in targets: + get_target(target).clean() -def clean_targets(targets: typing.List[str]): - for target in targets: - clean_target(target) +def build(): + build_targets(ARG("targets")) def clean(): clean_targets(ARG("targets")) - -def build_target(name: str, history: typing.List[str] = None): - cons.print(f"[bold]Building [magenta]{name}[/magenta]:[/bold]") - cons.indent() - - if history is None: - history = [] - - if ARG("no_build"): - cons.print("--no-build specified, skipping...") - cons.unindent() - return - - if name in history: - cons.print("Already built, skipping...") - cons.unindent() - return - - history.append(name) - - target = get_target(name) - - if target.isDependency and ARG(f"no_{target.name}"): - cons.print(f"--no-{target.name} given, skipping...") - cons.unindent() - return - - build_dirpath = get_build_dirpath(target) - cmake_dirpath = get_cmake_dirpath(target) - install_dirpath = get_install_dirpath(target) - - install_prefixes = ';'.join([install_dirpath, get_dependency_install_dirpath()]) - - flags: list = target.flags.copy() + [ - # Disable CMake warnings intended for developers (us). - # See: https://cmake.org/cmake/help/latest/manual/cmake.1.html. - f"-Wno-dev", - # Save a compile_commands.json file with the compile commands used to - # build the configured targets. This is mostly useful for debugging. - # See: https://cmake.org/cmake/help/latest/variable/CMAKE_EXPORT_COMPILE_COMMANDS.html. - f"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON", - # Set build type (e.g Debug, Release, etc.). - # See: https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html - f"-DCMAKE_BUILD_TYPE={'Debug' if ARG('debug') else 'Release'}", - # Used by FIND_PACKAGE (/FindXXX) to search for packages, with the - # second heighest level of priority, still letting users manually - # specify _ROOT, which has precedence over CMAKE_PREFIX_PATH. - # See: https://cmake.org/cmake/help/latest/command/find_package.html. - f"-DCMAKE_PREFIX_PATH={install_prefixes}", - # First directory that FIND_LIBRARY searches. - # See: https://cmake.org/cmake/help/latest/command/find_library.html. - f"-DCMAKE_FIND_ROOT_PATH={install_prefixes}", - # Location prefix to install bin/, lib/, include/, etc. - # See: https://cmake.org/cmake/help/latest/command/install.html. - f"-DCMAKE_INSTALL_PREFIX={install_dirpath}", - ] - - if not target.isDependency: - flags.append(f"-DMFC_MPI={ 'ON' if ARG('mpi') else 'OFF'}") - flags.append(f"-DMFC_OpenACC={'ON' if ARG('gpu') else 'OFF'}") - - configure = ["cmake"] + flags + ["-S", cmake_dirpath, "-B", build_dirpath] - build = ["cmake", "--build", build_dirpath, - "--target", name, - "-j", ARG("jobs"), - "--config", 'Debug' if ARG('debug') else 'Release'] - if ARG('verbose'): - build.append("--verbose") - - install = ["cmake", "--install", build_dirpath] - - if not is_target_configured(target): - for dependency_name in target.requires.compute(): - build_target(dependency_name, history) - - common.delete_directory(build_dirpath) - common.create_directory(build_dirpath) - - if common.system(configure, no_exception=True) != 0: - raise common.MFCException(f"Failed to configure the [bold magenta]{name}[/bold magenta] target.") - - if not target.isDependency and ARG("command") == "build": - MFCInputFile("", "", {}).generate(name, bOnlyFPPs = True) - - common.system(build, exception_text=f"Failed to build the [bold magenta]{name}[/bold magenta] target.") - common.system(install, exception_text=f"Failed to install the [bold magenta]{name}[/bold magenta] target.") - - cons.print(no_indent=True) - cons.unindent() - - -def build_targets(targets): - for target in set(targets).union(set(get_required_target_names())): - build_target(target) - - -def build(): - build_targets(ARG("targets")) diff --git a/toolchain/mfc/run/engines.py b/toolchain/mfc/run/engines.py index 2115c75bb..066c04446 100644 --- a/toolchain/mfc/run/engines.py +++ b/toolchain/mfc/run/engines.py @@ -2,22 +2,25 @@ from ..state import ARG, ARGS from ..printer import cons -from .. import build, common +from ..build import MFCTarget, SYSCHECK +from ..common import MFCException, does_command_exist, isspace, system +from ..common import format_list_to_string, does_system_use_modules +from ..common import get_loaded_modules, file_write from ..run import queues, mpi_bins from ..run.input import MFCInputFile def profiler_prepend(): if ARG("ncu") is not None: - if not common.does_command_exist("ncu"): - raise common.MFCException("Failed to locate [bold green]NVIDIA Nsight Compute[/bold green] (ncu).") + if not does_command_exist("ncu"): + raise MFCException("Failed to locate [bold green]NVIDIA Nsight Compute[/bold green] (ncu).") return ["ncu", "--nvtx", "--mode=launch-and-attach", "--cache-control=none", "--clock-control=none"] + ARG("ncu") if ARG("nsys") is not None: - if not common.does_command_exist("nsys"): - raise common.MFCException("Failed to locate [bold green]NVIDIA Nsight Systems[/bold green] (nsys).") + if not does_command_exist("nsys"): + raise MFCException("Failed to locate [bold green]NVIDIA Nsight Systems[/bold green] (nsys).") return ["nsys", "profile", "--stats=true", "--trace=mpi,nvtx,openacc"] + ARG("nsys") @@ -38,15 +41,10 @@ def _init(self) -> None: pass def get_args(self) -> typing.List[str]: - raise common.MFCException(f"MFCEngine::get_args: not implemented for {self.name}.") + raise MFCException(f"MFCEngine::get_args: not implemented for {self.name}.") - def run(self, names: typing.List[str]) -> None: - raise common.MFCException(f"MFCEngine::run: not implemented for {self.name}.") - - def get_binpath(self, target: str) -> str: - # /install//bin/ - prefix = build.get_install_dirpath(build.get_target(target)) - return os.sep.join([prefix, "bin", target]) + def run(self, targets: typing.List[MFCTarget]) -> None: + raise MFCException(f"MFCEngine::run: not implemented for {self.name}.") def _interactive_working_worker(cmd: typing.List[str], q: multiprocessing.Queue): @@ -74,7 +72,7 @@ def get_args(self) -> str: MPI Binary (-b) {self.mpibin.bin}\ """ - def get_exec_cmd(self, target_name: str) -> typing.List[str]: + def get_exec_cmd(self, target: MFCTarget) -> typing.List[str]: cmd = [] if ARG("mpi"): @@ -82,12 +80,12 @@ def get_exec_cmd(self, target_name: str) -> typing.List[str]: cmd += profiler_prepend() - cmd.append(self.get_binpath(target_name)) + cmd.append(target.get_install_binpath()) return cmd - def run(self, names: typing.List[str]) -> None: + def run(self, targets: typing.List[MFCTarget]) -> None: if not self.bKnowWorks: # Fix MFlowCode/MFC#21: Check whether attempting to run a job will hang # forever. This can happen when using the wrong queue system. @@ -102,7 +100,7 @@ def run(self, names: typing.List[str]) -> None: p = multiprocessing.Process( target=_interactive_working_worker, args=( - [self.mpibin.bin] + self.mpibin.gen_params() + [os.sep.join([build.get_install_dirpath(build.SYSCHECK), "bin", "syscheck"])], + [self.mpibin.bin] + self.mpibin.gen_params() + [os.sep.join([SYSCHECK.get_install_dirpath(), "bin", "syscheck"])], q, )) @@ -110,7 +108,7 @@ def run(self, names: typing.List[str]) -> None: p.join(work_timeout) if p.is_alive(): - raise common.MFCException("""\ + raise MFCException("""\ The [bold magenta]Interactive Engine[/bold magenta] appears to hang. This may indicate that the wrong MPI binary is being used to launch parallel jobs. You can specify the correct one for your system using the <-b,--binary> option. For example: @@ -139,18 +137,18 @@ def run(self, names: typing.List[str]) -> None: else: error_txt += f"Evaluation timed out after {work_timeout}s." - raise common.MFCException(error_txt) + raise MFCException(error_txt) cons.print() cons.unindent() - for name in names: - cons.print(f"[bold]Running [magenta]{name}[/magenta][/bold]:") + for target in targets: + cons.print(f"[bold]Running [magenta]{target.name}[/magenta][/bold]:") cons.indent() if not ARG("dry_run"): start_time = time.monotonic() - common.system(self.get_exec_cmd(name), cwd=self.input.case_dirpath) + system(self.get_exec_cmd(target), cwd=self.input.case_dirpath) end_time = time.monotonic() cons.print(no_indent=True) @@ -173,17 +171,19 @@ def get_args(self) -> str: Email (-@) {ARG("email")} """ - def run(self, names: typing.List[str]) -> None: + def run(self, targets: typing.List[MFCTarget]) -> None: system = queues.get_system() cons.print(f"Detected the [bold magenta]{system.name}[/bold magenta] queue system.") - cons.print(f"Running {common.format_list_to_string(names, 'bold magenta')}:") + targets = [SYSCHECK] + targets + + cons.print(f"Running {format_list_to_string([_.name for _ in targets], 'bold magenta')}:") cons.indent() - self.__create_batch_file(system, names) + self.__create_batch_file(system, targets) if not ARG("dry_run"): - self.__execute_batch_file(system, names) + self.__execute_batch_file(system) cons.print("[bold yellow]INFO:[/bold yellow] Batch file submitted! Please check your queue system for the job status.") cons.print("[bold yellow]INFO:[/bold yellow] If an error occurs, please check the generated batch file and error logs for more information.") @@ -194,24 +194,24 @@ def run(self, names: typing.List[str]) -> None: def __get_batch_dirpath(self) -> str: return copy.copy(self.input.case_dirpath) - def __get_batch_filename(self, names: typing.List[str]) -> str: + def __get_batch_filename(self) -> str: return f"{ARG('name')}.sh" - def __get_batch_filepath(self, names: typing.List[str]): + def __get_batch_filepath(self): return os.path.abspath(os.sep.join([ self.__get_batch_dirpath(), - self.__get_batch_filename(names) + self.__get_batch_filename() ])) - def __generate_prologue(self, system: queues.QueueSystem, names: typing.List[str]) -> str: + def __generate_prologue(self, system: queues.QueueSystem) -> str: modules = f"" - if common.does_system_use_modules(): + if does_system_use_modules(): modules = f"""\ printf ":) Loading modules...\\n" module purge -module load {' '.join(common.get_loaded_modules())} +module load {' '.join(get_loaded_modules())} """ return f"""\ @@ -260,16 +260,16 @@ def __evaluate_expression(self, expr: str) -> str: try: # We assume eval is safe because we control the expression. r = str(eval(expr, ARGS())) - return r if not common.isspace(r) else None + return r if not isspace(r) else None except Exception as exc: - raise common.MFCException(f"BatchEngine: '{expr}' is not a valid expression in the template file. Please check your spelling.") + raise MFCException(f"BatchEngine: '{expr}' is not a valid expression in the template file. Please check your spelling.") - def __batch_evaluate(self, s: str, system: queues.QueueSystem, names: typing.List[str]): + def __batch_evaluate(self, s: str, system: queues.QueueSystem, targets: typing.List[MFCTarget]): replace_list = [ - ("{MFC::PROLOGUE}", self.__generate_prologue(system, names)), + ("{MFC::PROLOGUE}", self.__generate_prologue(system)), ("{MFC::PROFILER}", ' '.join(profiler_prepend())), ("{MFC::EPILOGUE}", self.__generate_epilogue()), - ("{MFC::BINARIES}", ' '.join([f"'{self.get_binpath(x)}'" for x in ["syscheck"] + names])), + ("{MFC::BINARIES}", ' '.join([f"'{target.get_install_binpath()}'" for target in targets])), ] for (key, value) in replace_list: @@ -293,23 +293,23 @@ def __batch_evaluate(self, s: str, system: queues.QueueSystem, names: typing.Lis return s - def __create_batch_file(self, system: queues.QueueSystem, names: typing.List[str]): + def __create_batch_file(self, system: queues.QueueSystem, targets: typing.List[MFCTarget]): cons.print("> Generating batch file...") - filepath = self.__get_batch_filepath(names) + filepath = self.__get_batch_filepath() cons.print("> Evaluating template file...") - content = self.__batch_evaluate(system.template, system, names) + content = self.__batch_evaluate(system.template, system, targets) cons.print("> Writing batch file...") - common.file_write(filepath, content) + file_write(filepath, content) - def __execute_batch_file(self, system: queues.QueueSystem, names: typing.List[str]): + def __execute_batch_file(self, system: queues.QueueSystem): # We CD to the case directory before executing the batch file so that # any files the queue system generates (like .err and .out) are created # in the correct directory. - cmd = system.gen_submit_cmd(self.__get_batch_filename(names)) + cmd = system.gen_submit_cmd(self.__get_batch_filename()) - if common.system(cmd, cwd=self.__get_batch_dirpath()) != 0: - raise common.MFCException(f"Submitting batch file for {system.name} failed. It can be found here: {self.__get_batch_filepath(target_name)}. Please check the file for errors.") + if system(cmd, cwd=self.__get_batch_dirpath()) != 0: + raise MFCException(f"Submitting batch file for {system.name} failed. It can be found here: {self.__get_batch_filepath()}. Please check the file for errors.") ENGINES = [ InteractiveEngine(), BatchEngine() ] @@ -324,6 +324,6 @@ def get_engine(slug: str) -> Engine: break if engine == None: - raise common.MFCException(f"Unsupported engine {slug}.") + raise MFCException(f"Unsupported engine {slug}.") return engine diff --git a/toolchain/mfc/run/input.py b/toolchain/mfc/run/input.py index 34320d8b6..2ed66b544 100644 --- a/toolchain/mfc/run/input.py +++ b/toolchain/mfc/run/input.py @@ -1,7 +1,7 @@ import os, re, json, math, dataclasses from ..printer import cons -from .. import common +from .. import common, build from ..state import ARG, ARGS from . import case_dicts @@ -29,11 +29,11 @@ def __is_ic_analytical(self, key: str, val: str) -> bool: return False - def __generate_inp(self, target_name: str) -> None: - cons.print(f"Generating [magenta]{target_name}.inp[/magenta].") + def __generate_inp(self, target) -> None: + cons.print(f"Generating [magenta]{target.name}.inp[/magenta].") cons.indent() - MASTER_KEYS: list = case_dicts.get_input_dict_keys(target_name) + MASTER_KEYS: list = case_dicts.get_input_dict_keys(target.name) ignored = [] @@ -62,12 +62,12 @@ def __generate_inp(self, target_name: str) -> None: contents = f"&user_inputs\n{dict_str}&end/\n" # Save .inp input file - common.file_write(f"{self.case_dirpath}/{target_name}.inp", contents) + common.file_write(f"{self.case_dirpath}/{target.name}.inp", contents) cons.unindent() - def __save_fpp(self, target_name: str, contents: str) -> None: - filepath = os.path.join(os.getcwd(), "src", target_name, "include", "case.fpp") + def __save_fpp(self, target, contents: str) -> None: + filepath = os.path.join(os.getcwd(), "src", target.name, "include", "case.fpp") # Check if this case already has a case.fpp file. # If so, we don't need to generate a new one, which @@ -164,7 +164,7 @@ def rhs_replace(match): #:enddef """ - self.__save_fpp("pre_process", content) + self.__save_fpp(build.PRE_PROCESS, content) cons.unindent() @@ -202,7 +202,7 @@ def __generate_sim_fpp(self) -> None: else: cons.print("[yellow]INFO:[/yellow] Case optimization is disabled. Use --case-optimization to enable it.") - self.__save_fpp("simulation", content) + self.__save_fpp(build.SIMULATION, content) cons.unindent() def __generate_post_fpp(self) -> None: @@ -212,21 +212,21 @@ def __generate_post_fpp(self) -> None: cons.unindent() pass - # Generate case.fpp & [target_name].inp - def generate(self, target_name: str, bOnlyFPPs = False) -> None: + # Generate case.fpp & [target.name].inp + def generate(self, target, bOnlyFPPs = False) -> None: if not bOnlyFPPs: - self.__generate_inp(target_name) + self.__generate_inp(target) cons.print() def _default(): - cons.print(f"No additional input file generation needed for [bold magenta]{target_name}[/bold magenta].") + cons.print(f"No additional input file generation needed for [bold magenta]{target.name}[/bold magenta].") { "pre_process" : self.__generate_pre_fpp, "simulation" : self.__generate_sim_fpp, "post_process" : self.__generate_post_fpp, - }.get(target_name, _default)() + }.get(target.name, _default)() # Load the input file diff --git a/toolchain/mfc/run/run.py b/toolchain/mfc/run/run.py index 337b17298..a1ea94e1f 100644 --- a/toolchain/mfc/run/run.py +++ b/toolchain/mfc/run/run.py @@ -1,33 +1,34 @@ import re, typing +from ..build import MFCTarget, get_target, build_targets from ..printer import cons from ..state import ARG +from ..common import MFCException, isspace from . import engines, input -from .. import common, build def validate_job_options() -> None: if not ARG("mpi") and any({ARG("nodes") > 1, ARG("tasks_per_node") > 1}): - raise common.MFCException("RUN: Cannot run on more than one rank with --no-mpi.") + raise MFCException("RUN: Cannot run on more than one rank with --no-mpi.") if ARG("nodes") <= 0: - raise common.MFCException("RUN: At least one node must be requested.") + raise MFCException("RUN: At least one node must be requested.") if ARG("tasks_per_node") <= 0: - raise common.MFCException("RUN: At least one task per node must be requested.") + raise MFCException("RUN: At least one task per node must be requested.") - if not common.isspace(ARG("email")): + if not isspace(ARG("email")): # https://stackoverflow.com/questions/8022530/how-to-check-for-valid-email-address if not re.match(r"\"?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)\"?", ARG("email")): - raise common.MFCException(f'RUN: {ARG("email")} is not a valid e-mail address.') + raise MFCException(f'RUN: {ARG("email")} is not a valid e-mail address.') -def run_targets(targets: typing.List[str]): +def run_targets(targets: typing.List[MFCTarget]): cons.print("[bold]Run[/bold]") cons.indent() - if len(ARG("targets")) == 0: + if len(targets) == 0: cons.print(f"> No target selected.") return @@ -48,22 +49,21 @@ def run_targets(targets: typing.List[str]): validate_job_options() - for name in ARG("targets"): - cons.print(f"Generating input files for [magenta]{name}[/magenta]...") + for target in targets: + cons.print(f"Generating input files for [magenta]{target.name}[/magenta]...") cons.indent() cons.print() - input_file.generate(name) + input_file.generate(target) cons.print() cons.unindent() - build.build_targets(targets) + build_targets(targets) + engine.run(targets) - engine.run(ARG("targets")) - -def run_target(target: str): +def run_target(target: MFCTarget): run_targets([target]) def run() -> None: - run_targets(ARG("targets")) + run_targets([ get_target(_) for _ in ARG("targets")]) diff --git a/toolchain/mfc/test/test.py b/toolchain/mfc/test/test.py index a88eb8a7a..7f244e6a1 100644 --- a/toolchain/mfc/test/test.py +++ b/toolchain/mfc/test/test.py @@ -8,7 +8,7 @@ from .cases import generate_cases from .. import sched from ..common import MFCException, does_command_exist, format_list_to_string, get_program_output -from ..build import build_targets, get_install_dirpath, HDF5 +from ..build import build_targets, HDF5 from ..packer import tol as packtol from ..packer import packer @@ -192,7 +192,7 @@ def _handle_case(test: TestCase, devices: typing.Set[int]): if not os.path.exists(silo_filepath): silo_filepath = os.path.join(test.get_dirpath(), 'silo_hdf5', 'p_all', 'p0', f'{t_step}.silo') - h5dump = f"{get_install_dirpath(HDF5)}/bin/h5dump" + h5dump = f"{HDF5.get_install_dirpath()}/bin/h5dump" if ARG("no_hdf5"): if not does_command_exist("h5dump"): From 53546603223f8ea1b8860a3d9b9cb669665ccf12 Mon Sep 17 00:00:00 2001 From: Henry LE BERRE Date: Sun, 27 Aug 2023 20:30:50 -0700 Subject: [PATCH 3/3] mfc.sh: refactor running cases from within mfc.sh --- misc/run-phoenix-release-gpu.sh | 2 +- toolchain/mfc/args.py | 14 ++++++-------- toolchain/mfc/common.py | 7 +++++-- toolchain/mfc/count.py | 12 +++++++----- toolchain/mfc/run/engines.py | 9 +++++++-- toolchain/mfc/run/run.py | 9 +++++++++ toolchain/mfc/test/case.py | 1 - 7 files changed, 35 insertions(+), 19 deletions(-) diff --git a/misc/run-phoenix-release-gpu.sh b/misc/run-phoenix-release-gpu.sh index 8079ba9ad..bb27af6b5 100644 --- a/misc/run-phoenix-release-gpu.sh +++ b/misc/run-phoenix-release-gpu.sh @@ -17,7 +17,7 @@ set -x . ./mfc.sh load -c p -m GPU gpu_count=$(nvidia-smi -L | wc -l) # number of GPUs on node -gpu_ids=$(seq -s ',' 0 $(($gpu_count-1))) # 0,1,2,...,gpu_count-1 +gpu_ids=$(seq -s ' ' 0 $(($gpu_count-1))) # 0,1,2,...,gpu_count-1 ./mfc.sh test -a -b mpirun -j $(nproc) \ --gpu -g $gpu_ids diff --git a/toolchain/mfc/args.py b/toolchain/mfc/args.py index 65f3adba0..a7193a6f0 100644 --- a/toolchain/mfc/args.py +++ b/toolchain/mfc/args.py @@ -65,11 +65,14 @@ def add_common_arguments(p, mask = None): for target in DEPENDENCY_TARGETS: p.add_argument(f"--no-{target.name}", action="store_true", help=f"Do not build the {target.name} dependency. Use the system's instead.") + if "g" not in mask: + p.add_argument("-g", "--gpus", nargs="+", type=int, default=[0], help="(GPU) List of GPU #s to use.") + # === BUILD === - add_common_arguments(build) + add_common_arguments(build, "g") # === CLEAN === - add_common_arguments(clean, "j") + add_common_arguments(clean, "jg") binaries = [ b.bin for b in BINARIES ] @@ -82,7 +85,6 @@ def add_common_arguments(p, mask = None): test.add_argument("-b", "--binary", choices=binaries, type=str, default=None, help="(Serial) Override MPI execution binary") test.add_argument("-r", "--relentless", action="store_true", default=False, help="Run all tests, even if multiple fail.") test.add_argument("-a", "--test-all", action="store_true", default=False, help="Run the Post Process Tests too.") - test.add_argument("-g", "--gpus", type=str, default="0", help="(GPU) Comma separated list of GPU #s to use.") test.add_argument("-%", "--percent", type=int, default=100, help="Percentage of tests to run.") test.add_argument("-m", "--max-attempts", type=int, default=3, help="Maximum number of attempts to run a test.") @@ -120,7 +122,7 @@ def add_common_arguments(p, mask = None): add_common_arguments(bench, "t") # === COUNT === - add_common_arguments(count) + add_common_arguments(count, "g") args: dict = vars(parser.parse_args()) @@ -150,8 +152,4 @@ def append_defaults_to_data(name: str, parser): if args[e] is not None: args[e] = os.path.abspath(args[e]) - # Turn GPU ID list into a comma separated string - if "gpus" in args: - args["gpus"] = [int(g) for g in args["gpus"].split(",")] - return args diff --git a/toolchain/mfc/common.py b/toolchain/mfc/common.py index 888733f7a..d0e718dbd 100644 --- a/toolchain/mfc/common.py +++ b/toolchain/mfc/common.py @@ -29,9 +29,12 @@ class MFCException(Exception): pass -def system(command: typing.List[str], no_exception: bool = False, exception_text=None, on_error=lambda: None, cwd=None, stdout=None, stderr=None) -> int: +def system(command: typing.List[str], no_exception: bool = False, exception_text=None, on_error=lambda: None, cwd=None, stdout=None, stderr=None, env: dict = None) -> int: cmd = [ str(x) for x in command if not isspace(str(x)) ] + if env is None: + env = os.environ.copy() + if stdout != subprocess.DEVNULL: cons.print(no_indent=True) @@ -40,7 +43,7 @@ def system(command: typing.List[str], no_exception: bool = False, exception_text if stdout != subprocess.DEVNULL: cons.print(no_indent=True) - r = subprocess.run(cmd, cwd=cwd, stdout=stdout, stderr=stderr) + r = subprocess.run(cmd, cwd=cwd, stdout=stdout, stderr=stderr, env=env) if r.returncode != 0: on_error() diff --git a/toolchain/mfc/count.py b/toolchain/mfc/count.py index 865a911e4..4ffabdf28 100644 --- a/toolchain/mfc/count.py +++ b/toolchain/mfc/count.py @@ -1,6 +1,7 @@ import os, glob, typing, typing -from .common import MFC_ROOTDIR +from .state import ARG +from .common import MFC_ROOTDIR, format_list_to_string from .printer import cons import rich.table @@ -20,12 +21,13 @@ def handle_dir(dirpath: str) -> typing.Tuple[typing.List[typing.Tuple[str, int]] return (files, total) def count(): - cons.print("[bold]Counting lines of code in [magenta]MFC[/magenta][/bold] (excluding whitespace lines)") - cons.print() + target_str_list = format_list_to_string(ARG('targets'), 'magenta') + + cons.print(f"[bold]Counting lines of code in {target_str_list}[/bold] (excluding whitespace lines)") cons.indent() total = 0 - for codedir in ['common', 'pre_process', 'simulation', 'post_process']: + for codedir in ['common'] + ARG("targets"): dirfiles, dircount = handle_dir(os.path.join(MFC_ROOTDIR, 'src', codedir)) table = rich.table.Table(show_header=True, box=rich.table.box.SIMPLE) table.add_column(f"File (in [magenta]{codedir}[/magenta])", justify="left") @@ -38,7 +40,7 @@ def count(): cons.raw.print(table) - cons.print(f"[bold]Total MFC lines: [bold cyan]{total}[/bold cyan].[/bold]") + cons.print(f"[bold]Total {target_str_list} lines: [bold cyan]{total}[/bold cyan].[/bold]") cons.print() cons.unindent() diff --git a/toolchain/mfc/run/engines.py b/toolchain/mfc/run/engines.py index 066c04446..b8c45f772 100644 --- a/toolchain/mfc/run/engines.py +++ b/toolchain/mfc/run/engines.py @@ -74,7 +74,6 @@ def get_args(self) -> str: def get_exec_cmd(self, target: MFCTarget) -> typing.List[str]: cmd = [] - if ARG("mpi"): cmd += [self.mpibin.bin] + self.mpibin.gen_params() + ARG("flags")[:] @@ -148,7 +147,13 @@ def run(self, targets: typing.List[MFCTarget]) -> None: if not ARG("dry_run"): start_time = time.monotonic() - system(self.get_exec_cmd(target), cwd=self.input.case_dirpath) + system( + self.get_exec_cmd(target), cwd=self.input.case_dirpath, + env={ + **os.environ.copy(), + 'CUDA_VISIBLE_DEVICES': ','.join([str(_) for _ in ARG('gpus')]) + } + ) end_time = time.monotonic() cons.print(no_indent=True) diff --git a/toolchain/mfc/run/run.py b/toolchain/mfc/run/run.py index a1ea94e1f..47fcd2fe2 100644 --- a/toolchain/mfc/run/run.py +++ b/toolchain/mfc/run/run.py @@ -67,3 +67,12 @@ def run_target(target: MFCTarget): def run() -> None: run_targets([ get_target(_) for _ in ARG("targets")]) + + +def run_targets_with(targets: typing.List[MFCTarget]): + pass + + +def run_target_with(target: MFCTarget): + run_targets_with([target]) + diff --git a/toolchain/mfc/test/case.py b/toolchain/mfc/test/case.py index adc868878..d83927492 100644 --- a/toolchain/mfc/test/case.py +++ b/toolchain/mfc/test/case.py @@ -78,7 +78,6 @@ 'sigV' : 0.1, 'rhoRV' : 0.0, - 'Monopole' : 'F', 'num_mono' : 1, 'Mono(1)%loc(1)' : 0.5,