diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 26b8dd50..f2edbe0d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -4,7 +4,8 @@ on: schedule: - cron: '0 7 * * *' # run at 7 AM UTC every day push: - branches: [ master ] + branches: + - master pull_request: branches: - master @@ -25,8 +26,18 @@ jobs: - name: Checkout repo uses: actions/checkout@v3 - - name: Setup Intel OneAPI Compilers - uses: modflowpy/install-intelfortran-action@v1 + - name: Setup Intel Fortran Classic + uses: awvwgk/setup-fortran@main + with: + compiler: intel-classic + version: 2021.7.0 + + - name: Set SETVARS_COMPLETED (temporary) + run: echo "SETVARS_COMPLETED=1" >> $GITHUB_ENV + + - name: Set CXX (temporary) + if: runner.os == 'Windows' + run: echo "CXX=icl" >> $GITHUB_ENV - name: Setup Graphviz if: runner.os == 'Linux' @@ -63,7 +74,7 @@ jobs: working-directory: ./autotest shell: cmd run: | - pytest -v -m="base" --durations=0 --cov=pymake --cov-report=xml + pytest -v -m="base" --durations=0 --cov=pymake --cov-report=xml --basetemp=pytest_temp - name: Print coverage report before upload working-directory: ./autotest diff --git a/.github/workflows/pymake-gcc.yml b/.github/workflows/pymake-gcc.yml index c04d1a81..547d794e 100644 --- a/.github/workflows/pymake-gcc.yml +++ b/.github/workflows/pymake-gcc.yml @@ -4,7 +4,8 @@ on: schedule: - cron: '0 7 * * *' # run at 7 AM UTC every day push: - branches: [ master ] + branches: + - master pull_request: branches: - master @@ -39,8 +40,11 @@ jobs: python -m pip install --upgrade pip pip install ".[test]" - - name: Install GNU Fortran - uses: modflowpy/install-gfortran-action@v1 + - name: Setup GNU Fortran + uses: awvwgk/setup-fortran@main + with: + compiler: gcc + version: 11 - name: Download examples for pytest runs run: | @@ -49,7 +53,7 @@ jobs: - name: Run pytest working-directory: ./autotest run: | - pytest -v --dist=loadfile -n=auto -m="base or regression" --durations=0 --cov=pymake --cov-report=xml + pytest -v --dist=loadfile -n=auto -m="base or regression" --durations=0 --cov=pymake --cov-report=xml --basetemp=pytest_temp - name: Print coverage report before upload working-directory: ./autotest diff --git a/.github/workflows/pymake-linting-install.yml b/.github/workflows/pymake-linting-install.yml index e768fe87..0c5eaee0 100644 --- a/.github/workflows/pymake-linting-install.yml +++ b/.github/workflows/pymake-linting-install.yml @@ -4,7 +4,8 @@ on: schedule: - cron: '0 3 * * 3' # run at 3 AM UTC every Wednesday push: - branches: [ master ] + branches: + - master pull_request: branches: - master diff --git a/.github/workflows/pymake-requests.yml b/.github/workflows/pymake-requests.yml index 63e20666..254a4293 100644 --- a/.github/workflows/pymake-requests.yml +++ b/.github/workflows/pymake-requests.yml @@ -4,7 +4,8 @@ on: schedule: - cron: '0 7 * * *' # run at 7 AM UTC every day push: - branches: [ master ] + branches: + - master pull_request: branches: - master diff --git a/.github/workflows/pymake-rtd.yml b/.github/workflows/pymake-rtd.yml index 92a3cac3..e5ac25e8 100644 --- a/.github/workflows/pymake-rtd.yml +++ b/.github/workflows/pymake-rtd.yml @@ -4,7 +4,8 @@ on: schedule: - cron: '0 3 * * 3' # run at 3 AM UTC every Wednesday push: - branches: [ master ] + branches: + - master pull_request: branches: - master diff --git a/pytest.ini b/autotest/pytest.ini similarity index 68% rename from pytest.ini rename to autotest/pytest.ini index 77dd21cf..00fcab6d 100644 --- a/pytest.ini +++ b/autotest/pytest.ini @@ -1,6 +1,7 @@ [pytest] markers = base: base tests + dependency: tests that depend on other tests (via pytest-dependency) regression: regression tests requests: usgsprograms requests tests schedule: tests to run if a scheduled job diff --git a/autotest/test_cli_cmds.py b/autotest/test_cli_cmds.py index 95dc8ca7..6413831a 100644 --- a/autotest/test_cli_cmds.py +++ b/autotest/test_cli_cmds.py @@ -1,6 +1,4 @@ import os -import pathlib as pl -import shutil import subprocess import pytest @@ -13,12 +11,6 @@ "crt", ) -# set up paths -dstpth = pl.Path( - f"temp_{os.path.basename(__file__).replace('.py', '')}" -).resolve() -dstpth.mkdir(parents=True, exist_ok=True) - def run_cli_cmd(cmd: list) -> None: process = subprocess.Popen( @@ -39,25 +31,16 @@ def run_cli_cmd(cmd: list) -> None: return -def clean_up() -> None: - print("Removing temporary build directories") - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) - return - - +@flaky(max_runs=RERUNS) @pytest.mark.dependency(name="make_program") @pytest.mark.base -@flaky(max_runs=RERUNS) @pytest.mark.parametrize("target", targets) -def test_make_program(target: str) -> None: +def test_make_program(module_tmpdir, target: str) -> None: cmd = [ "make-program", target, "--appdir", - str(dstpth), + str(module_tmpdir), "--verbose", ] run_cli_cmd(cmd) @@ -65,12 +48,12 @@ def test_make_program(target: str) -> None: @pytest.mark.dependency(name="make_program_all") @pytest.mark.schedule -def test_make_program_all() -> None: +def test_make_program_all(module_tmpdir) -> None: cmd = [ "make-program", ":", "--appdir", - str(dstpth / "all"), + str(module_tmpdir / "all"), "--verbose", "--dryrun", ] @@ -79,14 +62,14 @@ def test_make_program_all() -> None: @pytest.mark.dependency(name="mfpymake") @pytest.mark.base -def test_mfpymake() -> None: +def test_mfpymake(module_tmpdir) -> None: src = ( "program hello\n" + " ! This is a comment line; it is ignored by the compiler\n" + " print *, 'Hello, World!'\n" + "end program hello\n" ) - src_file = dstpth / "mfpymake_src/hello.f90" + src_file = module_tmpdir / "mfpymake_src" / "hello.f90" src_file.parent.mkdir(parents=True, exist_ok=True) with open(src_file, "w") as f: f.write(src) @@ -97,7 +80,7 @@ def test_mfpymake() -> None: "-mc", "--verbose", "--appdir", - str(dstpth), + str(module_tmpdir), "-fc", ] if os.environ.get("FC") is None: @@ -105,18 +88,5 @@ def test_mfpymake() -> None: else: cmd.append(os.environ.get("FC")) run_cli_cmd(cmd) - cmd = [dstpth / "hello"] + cmd = [module_tmpdir / "hello"] run_cli_cmd(cmd) - - -@pytest.mark.dependency(name="clean", depends=["make_program"]) -@pytest.mark.base -def test_clean_up() -> None: - clean_up() - return - - -if __name__ == "__main__": - for target in targets: - test_make_program(target) - test_clean_up() diff --git a/autotest/test_gridgen.py b/autotest/test_gridgen.py index ffec537b..ef1365c3 100644 --- a/autotest/test_gridgen.py +++ b/autotest/test_gridgen.py @@ -1,88 +1,46 @@ import os import pathlib as pl -import shutil import subprocess -import sys +from platform import system import pytest import pymake -# use the line below to set fortran compiler using environmental variables -# if sys.platform.lower() == "win32": -# os.environ["CC"] = "icl" -# else: -# os.environ["CC"] = "icc" - -# define program data -target = "gridgen" -if sys.platform.lower() == "win32": - target += ".exe" - -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) - -# set up paths -dstpth = pl.Path(f"temp_{os.path.basename(__file__).replace('.py', '')}") -dstpth.mkdir(parents=True, exist_ok=True) - -ver = prog_dict.version -pth = dstpth / prog_dict.dirname -expth = pth / "examples/biscayne" -exe_name = dstpth / target - -pm = pymake.Pymake(verbose=True) -pm.target = target -pm.appdir = str(dstpth) -env_var = os.environ.get("CC") -if env_var is not None: - pm.cc = env_var -else: - pm.cc = "g++" -pm.fc = None -pm.inplace = True -pm.makeclean = True - -biscayne_cmds = [ - "buildqtg action01_buildqtg.dfn", - "grid02qtg-to-usgdata action02_writeusgdata.dfn", - "grid01mfg-to-polyshapefile action03_shapefile.dfn", - "grid02qtg-to-polyshapefile action03_shapefile.dfn", - "grid01mfg-to-pointshapefile action03_shapefile.dfn", - "grid02qtg-to-pointshapefile action03_shapefile.dfn", - "canal_grid02qtg_lay1_intersect action04_intersect.dfn", - "chd_grid02qtg_lay1_intersect action04_intersect.dfn", - "grid01mfg-to-vtkfile action05_vtkfile.dfn", - "grid02qtg-to-vtkfile action05_vtkfile.dfn", - "grid02qtg-to-vtkfilesv action05_vtkfile.dfn", -] - - -def clean_up(): - print("Removing test files and directories") - - # finalize pymake object - pm.finalize() - if os.path.isfile(exe_name): - print(f"Removing {target}") - os.remove(exe_name) +@pytest.fixture(scope="module") +def target(module_tmpdir) -> pl.Path: + name = "gridgen" + ext = ".exe" if system() == "Windows" else "" + return module_tmpdir / f"{name}{ext}" + - print(f"Removing folder {pth}") - if pth.is_dir(): - shutil.rmtree(pth) +@pytest.fixture(scope="module") +def prog_data(target) -> dict: + return pymake.usgs_program_data.get_target(target.name) - dirs_temp = [dstpth] - for d in dirs_temp: - if d.is_dir(): - shutil.rmtree(d) - return +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> pl.Path: + return module_tmpdir / prog_data.dirname -def run_command(cmdlist, cwd): +@pytest.fixture(scope="module") +def pm(module_tmpdir, target) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(target) + pm.appdir = str(module_tmpdir) + pm.cc = os.environ.get("CXX", "g++") + pm.fc = os.environ.get("FC", "gfortran") + pm.inplace = True + pm.makeclean = True + yield pm + pm.finalize() + + +def run_command(args, cwd): p = subprocess.Popen( - cmdlist, + args, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, @@ -94,51 +52,44 @@ def run_command(cmdlist, cwd): return retval -def run_gridgen(cmd): - success = False - prog = os.path.abspath(exe_name) - if os.path.exists(prog): - testpth = os.path.abspath(expth) - - cmdlist = [prog] + cmd.split() - print(f"running {' '.join(cmdlist)}") - retcode = run_command(cmdlist, testpth) - if retcode == 0: - success = True - - return success +def run_gridgen(cmd, ws, exe): + args = [str(exe)] + cmd.split() + print(f"running {' '.join(args)}") + return run_command(args, ws) == 0 +@pytest.mark.dependency(name="download") @pytest.mark.base -def test_download(): - # Remove the existing target download directory if it exists - if dstpth.is_dir(): - shutil.rmtree(dstpth) - - # download the target - pm.download_target(target, download_path=dstpth) +def test_download(pm, module_tmpdir, target): + pm.download_target(target, download_path=module_tmpdir) assert pm.download, f"could not download {target} distribution" +@pytest.mark.dependency(name="build", depends=["download"]) @pytest.mark.base -def test_compile(): +def test_compile(pm, target): assert pm.build() == 0, f"could not compile {target}" +@pytest.mark.dependency(name="test", depends=["build"]) @pytest.mark.regression -@pytest.mark.parametrize("cmd", biscayne_cmds) -def test_gridgen(cmd): - assert run_gridgen(cmd), f"could not run {cmd}" - - -@pytest.mark.base -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - test_download() - test_compile() - # for cmd in biscayne_cmds: - # run_gridgen(cmd) - test_clean_up() +@pytest.mark.parametrize( + "cmd", + [ + "buildqtg action01_buildqtg.dfn", + "grid02qtg-to-usgdata action02_writeusgdata.dfn", + "grid01mfg-to-polyshapefile action03_shapefile.dfn", + "grid02qtg-to-polyshapefile action03_shapefile.dfn", + "grid01mfg-to-pointshapefile action03_shapefile.dfn", + "grid02qtg-to-pointshapefile action03_shapefile.dfn", + "canal_grid02qtg_lay1_intersect action04_intersect.dfn", + "chd_grid02qtg_lay1_intersect action04_intersect.dfn", + "grid01mfg-to-vtkfile action05_vtkfile.dfn", + "grid02qtg-to-vtkfile action05_vtkfile.dfn", + "grid02qtg-to-vtkfilesv action05_vtkfile.dfn", + ], +) +def test_gridgen(cmd, workspace, target): + assert run_gridgen( + cmd, workspace / "examples" / "biscayne", target + ), f"could not run {cmd}" diff --git a/autotest/test_mf2005.py b/autotest/test_mf2005.py index d9a6ce61..dbf361ea 100644 --- a/autotest/test_mf2005.py +++ b/autotest/test_mf2005.py @@ -1,157 +1,92 @@ -import os -import shutil import sys +from pathlib import Path import flopy import pytest import pymake -# use the line below to set fortran compiler using environmental variables -# os.environ["FC"] = "ifort" -# os.environ["CC"] = "icc" - - -# define program data -target = "mf2005" -if sys.platform.lower() == "win32": - target += ".exe" - -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) - -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) - -mfver = prog_dict.version -mfpth = os.path.join(dstpth, prog_dict.dirname) -expth = os.path.join(mfpth, "test-run") -epth = os.path.join(dstpth, target) -name_files = [ - "l1b2k_bath.nam", - "test1tr.nam", - "mnw1.nam", - "testsfr2.nam", - "bcf2ss.nam", - "restest.nam", - "etsdrt.nam", - "str.nam", - "tr2k_s3.nam", - "fhb.nam", - "twri.nam", - "ibs2k.nam", - "swtex4.nam", - "twrihfb.nam", - "l1a2k.nam", - "tc2hufv4.nam", - "twrip.nam", - "l1b2k.nam", - "test1ss.nam", -] -# add path to name_files -for idx, namefile in enumerate(name_files): - name_files[idx] = os.path.join(expth, namefile) - -pm = pymake.Pymake(verbose=True) -pm.target = target -pm.appdir = dstpth -pm.fflags = "-O3" -pm.cflags = "-O3" - - -def run_mf2005(namefile): - """ - Run the simulation. - - """ - if namefile is not None: - # Set nam as namefile name without path - nam = os.path.basename(namefile) - - # run test models - exe_name = os.path.abspath(epth) - msg = f"running model...{nam}" + f" using {exe_name}" - print(msg) - if os.path.exists(exe_name): - success, buff = flopy.run_model( - exe_name, nam, model_ws=expth, silent=True - ) - else: - success = False - - assert success, f"base model {nam} " + "did not run." - else: - success = False - errmsg = f"{target} does not exist" - - assert success, errmsg - - return - - -def cleanup(): - print("Removing test files and directories") - - # clean up makefile - print("Removing makefile") - files = ["makefile", "makedefaults"] - for fpth in files: - if os.path.isfile(fpth): - os.remove(fpth) - - # finalize pymake object - pm.finalize() - - if os.path.isfile(epth): - print("Removing " + target) - os.remove(epth) - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) +@pytest.fixture(scope="module") +def target(module_tmpdir) -> Path: + target = "mf2005" + if sys.platform.lower() == "win32": + target += ".exe" + return module_tmpdir / target - return +@pytest.fixture(scope="module") +def prog_data(target) -> dict: + return pymake.usgs_program_data.get_target(target.name) -@pytest.mark.base -def test_download(): - # Remove the existing target download directory if it exists - if os.path.isdir(mfpth): - shutil.rmtree(mfpth) - # download the target - pm.download_target(target, download_path=dstpth) - assert pm.download, f"could not download {target}" +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname -@pytest.mark.base -def test_compile(): - assert pm.build() == 0, f"could not compile {target}" +@pytest.fixture(scope="module") +def pm(module_tmpdir, target) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(target) + pm.appdir = str(module_tmpdir) + pm.fflags = "-O3" + pm.cflags = "-O3" + yield pm + pm.finalize() -@pytest.mark.regression -@pytest.mark.parametrize("fn", name_files) -def test_mf2005(fn): - run_mf2005(fn) - return +def run_mf2005(namefile, ws, exe): + print(f"running model {namefile} using {exe}") + success, _ = flopy.run_model(exe, namefile, model_ws=ws, silent=False) + return success +@pytest.mark.dependency(name="download") @pytest.mark.base -def test_cleanup(): - cleanup() - - return - +def test_download(pm, module_tmpdir, target): + pm.download_target(target, download_path=module_tmpdir) + assert pm.download, f"could not download {target}" -if __name__ == "__main__": - test_download() - test_compile() +@pytest.mark.dependency(name="build", depends=["download"]) +@pytest.mark.base +def test_compile(pm, target): + assert pm.build() == 0, f"could not compile {target}" - for namefile in name_files: - run_mf2005(namefile) - test_cleanup() +@pytest.mark.dependency(name="test", depends=["build"]) +@pytest.mark.regression +@pytest.mark.parametrize( + "namefile", + [ + "l1b2k_bath.nam", + "test1tr.nam", + "mnw1.nam", + "testsfr2.nam", + "bcf2ss.nam", + "restest.nam", + "etsdrt.nam", + "str.nam", + "tr2k_s3.nam", + "fhb.nam", + "twri.nam", + "ibs2k.nam", + "swtex4.nam", + "twrihfb.nam", + "l1a2k.nam", + "tc2hufv4.nam", + "twrip.nam", + "l1b2k.nam", + "test1ss.nam", + ], +) +def test_mf2005(namefile, workspace, target): + example_ws = workspace / "test-run" + if not (example_ws / namefile).is_file(): + pytest.skip(f"{namefile} does not exist") + + success, _ = flopy.run_model( + target, namefile, model_ws=example_ws, silent=False + ) + assert success, f"could not run {namefile} with {target}" diff --git a/autotest/test_mf6.py b/autotest/test_mf6.py index 6f59b0d3..8c8a3185 100644 --- a/autotest/test_mf6.py +++ b/autotest/test_mf6.py @@ -1,39 +1,15 @@ -import contextlib import os -import shutil import sys import time +from platform import system +from pathlib import Path import flopy import pytest +from modflow_devtools.misc import set_dir import pymake -# define program data -target = "mf6" -if sys.platform.lower() == "win32": - target += ".exe" - -sharedobject_target = "libmf6" -if sys.platform.lower() == "win32": - sharedobject_target += ".dll" -elif sys.platform.lower() == "darwin": - sharedobject_target += ".dylib" -else: - sharedobject_target += ".so" - -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) - -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) - -mf6ver = prog_dict.version -mf6pth = os.path.join(dstpth, prog_dict.dirname) -epth = os.path.join(dstpth, target) - # set fpth based on current path if os.path.basename(os.path.normpath(os.getcwd())) == "autotest": fpth = os.path.abspath( @@ -50,41 +26,66 @@ else: sim_dirs = [] -pm = pymake.Pymake(verbose=True) -pm.target = target -pm.appdir = dstpth -pm.makefile = True -pm.makeclean = True -pm.makefiledir = dstpth -pm.inplace = True -pm.networkx = True +@pytest.fixture(scope="module") +def target(module_tmpdir) -> Path: + name = "mf6" + ext = ".exe" if system() == "Windows" else "" + return module_tmpdir / f"{name}{ext}" + + +@pytest.fixture(scope="module") +def target_so(module_tmpdir) -> Path: + sharedobject_target = "libmf6" + if sys.platform.lower() == "win32": + sharedobject_target += ".dll" + elif sys.platform.lower() == "darwin": + sharedobject_target += ".dylib" + else: + sharedobject_target += ".so" + return module_tmpdir / sharedobject_target -@contextlib.contextmanager -def working_directory(path): - """Changes working directory and returns to previous on exit.""" - prev_cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(prev_cwd) + +@pytest.fixture(scope="module") +def prog_data(target) -> dict: + return pymake.usgs_program_data.get_target(target.name) + + +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname + + +@pytest.fixture(scope="module") +def pm(module_tmpdir, target) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(target) + pm.appdir = module_tmpdir + pm.makefile = True + pm.makeclean = True + pm.makefiledir = module_tmpdir + pm.inplace = True + pm.networkx = True + pm.verbose = True + yield pm + pm.finalize() -def build_with_makefile(makefile_target): +def build_with_makefile(pm, workspace, exe): + exe_path = Path(exe) success = False - with working_directory(dstpth): + with set_dir(workspace): if os.path.isfile("makefile"): # wait to delete on windows if sys.platform.lower() == "win32": time.sleep(6) # clean prior to make - print(f"clean {makefile_target} with makefile") + print(f"clean {exe} with makefile") os.system("make clean") # build MODFLOW 6 with makefile - print(f"build {makefile_target} with makefile") + print(f"build {exe} with makefile") return_code = os.system("make") # test if running on Windows with ifort, if True the makefile @@ -96,121 +97,63 @@ def build_with_makefile(makefile_target): success = False # verify that target was made else: - success = os.path.isfile(makefile_target) - - return success - - -def clean_up(): - # clean up makefile - print("Removing makefile") - files = [ - os.path.join(dstpth, file_name) - for file_name in ("makefile", "makedefaults") - ] - for fpth in files: - if os.path.isfile(fpth): - os.remove(fpth) - - # finalize pymake object - pm.finalize() - - if os.path.isfile(epth): - print("Removing " + target) - os.remove(epth) + success = exe_path.is_file() - print("Removing temporary build directories") - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) - return - - -def run_mf6(workspace): - success = False - exe_name = os.path.abspath(epth) - if os.path.exists(exe_name): - # run test models - print(f"running model...{os.path.basename(workspace)}") - success, buff = flopy.run_model( - exe_name, None, model_ws=workspace, silent=False - ) return success @pytest.mark.dependency(name="download") @pytest.mark.base -def test_download(): - # Remove the existing mf6 directory if it exists - if os.path.isdir(mf6pth): - shutil.rmtree(mf6pth) - - # download the modflow 6 release - pm.download_target(target, download_path=dstpth) +def test_download(pm, module_tmpdir, target): + pm.download_target(target, download_path=module_tmpdir) assert pm.download, f"could not download {target} distribution" @pytest.mark.dependency(name="build", depends=["download"]) @pytest.mark.base -def test_compile(): +def test_compile(pm, target): assert pm.build() == 0, f"could not compile {target}" @pytest.mark.dependency(name="test", depends=["build"]) @pytest.mark.regression @pytest.mark.parametrize("ws", sim_dirs) -def test_mf6(ws): - assert run_mf6(ws), f"could not run {ws}" +def test_mf6(ws, target): + success, _ = flopy.run_model(target, None, model_ws=ws, silent=False) + assert success, f"could not run {ws}" @pytest.mark.dependency(name="makefile", depends=["build"]) @pytest.mark.base -def test_makefile(): +def test_makefile(pm, module_tmpdir, target): assert build_with_makefile( - target + pm, module_tmpdir, target ), f"could not compile {target} with makefile" @pytest.mark.dependency(name="shared", depends=["makefile"]) @pytest.mark.base -def test_sharedobject(): - pm.target = sharedobject_target - prog_dict = pymake.usgs_program_data.get_target(pm.target) - pm.appdir = dstpth - pm.srcdir = os.path.join(mf6pth, prog_dict.srcdir) - pm.srcdir2 = os.path.join(mf6pth, "src") +def test_sharedobject(pm, module_tmpdir, workspace, target_so, prog_data): + # reconfigure pymake object + pm.target = str(target_so) + pm.appdir = module_tmpdir + pm.srcdir = workspace / prog_data.srcdir + pm.srcdir2 = workspace / "src" pm.excludefiles = [os.path.join(pm.srcdir2, "mf6.f90")] pm.makefile = True pm.makeclean = True pm.sharedobject = True pm.inplace = True pm.dryrun = False + + # build the target assert pm.build() == 0, f"could not compile {pm.target}" + assert target_so.is_file() @pytest.mark.dependency(name="shared_makefile", depends=["shared", "makefile"]) @pytest.mark.base -def test_sharedobject_makefile(): +def test_sharedobject_makefile(pm, module_tmpdir, target_so): assert build_with_makefile( - sharedobject_target - ), f"could not compile {sharedobject_target} with makefile" - - -@pytest.mark.dependency( - name="clean", depends=["build", "makefile", "shared_makefile"] -) -@pytest.mark.base -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - test_download() - test_compile() - for ws in sim_dirs: - run_mf6(ws) - test_makefile() - test_sharedobject() - test_sharedobject_makefile() - test_clean_up() + pm, module_tmpdir, target_so + ), f"could not compile {target_so} with makefile" diff --git a/autotest/test_mf6_existing_meson.py b/autotest/test_mf6_existing_meson.py index 5148bc19..a3bfd895 100644 --- a/autotest/test_mf6_existing_meson.py +++ b/autotest/test_mf6_existing_meson.py @@ -1,48 +1,55 @@ import os -import shutil import sys +from pathlib import Path +from typing import List -import flopy import pytest import pymake -target = "mf6" -ext = "" -shared_ext = ".so" -executables = [target, "zbud6", "mf5to6", "libmf6"] -if sys.platform.lower() == "win32": - ext = ".exe" - shared_ext = ".dll" -elif sys.platform.lower() == "darwin": - shared_ext = ".dylib" -for idx, executable in enumerate(executables[:3]): - executables[idx] += ext -executables[3] += shared_ext -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) +@pytest.fixture(scope="module") +def targets() -> List[Path]: + target = "mf6" + ext = "" + shared_ext = ".so" + executables = [target, "zbud6", "mf5to6", "libmf6"] + if sys.platform.lower() == "win32": + ext = ".exe" + shared_ext = ".dll" + elif sys.platform.lower() == "darwin": + shared_ext = ".dylib" + for idx, _ in enumerate(executables[:3]): + executables[idx] += ext + executables[3] += shared_ext + return executables -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) -mf6pth = os.path.join(dstpth, prog_dict.dirname) -mesondir = mf6pth +@pytest.fixture(scope="module") +def prog_data(targets) -> dict: + return pymake.usgs_program_data.get_target(targets[0]) -def clean_up(): - print("Removing temporary build directories") - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) - return +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname + + +@pytest.fixture(scope="module") +def pm(workspace, targets) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(targets[0]) + pm.appdir = str(workspace / "bin") + pm.meson = True + pm.makeclean = True + pm.mesondir = str(workspace) + pm.verbose = True + yield pm + pm.finalize() @pytest.mark.base -def test_build_with_existing_meson(): +def test_build_with_existing_meson(pm, module_tmpdir, workspace, targets): # set default compilers fc, cc = "gfortran", "gcc" @@ -72,21 +79,13 @@ def test_build_with_existing_meson(): # print fortran and c/c++ compilers print(f"fortran compiler={fc}\n" + f"c/c++ compiler={cc}\n") - pm = pymake.Pymake(verbose=True) - pm.target = target - pm.appdir = os.path.join(mesondir, "bin") - pm.meson = True - pm.makeclean = True - pm.mesondir = mesondir - pm.verbose = True - - # download the modflow 6 - pm.download_target(target, download_path=dstpth) - assert pm.download, f"could not download {target} distribution" + # download modflow 6 + pm.download_target(targets[0], download_path=module_tmpdir) + assert pm.download, f"could not download {targets[0]} distribution" # make modflow 6 with existing meson.build file returncode = pymake.meson_build( - mesondir, + workspace, fc, cc, appdir=pm.appdir, @@ -96,13 +95,6 @@ def test_build_with_existing_meson(): ), "could not build modflow 6 applications using existing meson.build file" # check that all of the executables exist - for executable in executables: + for executable in targets: exe_pth = os.path.join(pm.appdir, executable) assert os.path.isfile(exe_pth), f"{exe_pth} does not exist" - - # clean up test files - clean_up() - - -if __name__ == "__main__": - test_build_with_existing_meson() diff --git a/autotest/test_mflgr.py b/autotest/test_mflgr.py index 2c1d2101..1de8c968 100644 --- a/autotest/test_mflgr.py +++ b/autotest/test_mflgr.py @@ -1,74 +1,34 @@ -import os -import shutil +from pathlib import Path import pytest import pymake -# define program data -target = "mflgr" -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) +@pytest.fixture(scope="module") +def target(module_tmpdir) -> Path: + target = "mflgr" + return module_tmpdir / target -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) -mflgrpth = os.path.join(dstpth, prog_dict.dirname) +@pytest.fixture(scope="module") +def prog_dict(target) -> dict: + return pymake.usgs_program_data.get_target(target) -def compile_code(): - # Remove the existing mfusg directory if it exists - if os.path.isdir(mflgrpth): - shutil.rmtree(mflgrpth) +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_dict) -> Path: + return module_tmpdir / prog_dict.dirname - # compile MODFLOW-LGR + +def compile_code(ws, exe): return pymake.build_apps( - target, download_dir=dstpth, appdir=dstpth, verbose=True + str(exe), download_dir=ws, appdir=ws, verbose=True ) -def clean_up(): - print("Removing test files and directories") - - # clean up download directory - print("Removing folder " + mflgrpth) - if os.path.isdir(mflgrpth): - shutil.rmtree(mflgrpth) - - # get list of files with target in name - epths = [] - for file in os.listdir(dstpth): - fpth = os.path.join(dstpth, file) - if os.path.isfile(fpth): - if target in file: - epths.append(fpth) - - # clean up the executable - for epth in epths: - print("removing...'" + epth + "'") - os.remove(epth) - - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) - - return - - -@pytest.mark.base -def test_compile(): - assert compile_code() == 0, f"could not compile {target}" - - @pytest.mark.base -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - compile_code() - clean_up() +def test_compile(module_tmpdir, target): + assert ( + compile_code(module_tmpdir, target) == 0 + ), f"could not compile {target}" diff --git a/autotest/test_mfnwt.py b/autotest/test_mfnwt.py index 827f9330..f7cb8013 100644 --- a/autotest/test_mfnwt.py +++ b/autotest/test_mfnwt.py @@ -1,54 +1,47 @@ -import contextlib import os -import shutil import sys import time +from pathlib import Path import pytest +from modflow_devtools.misc import set_dir import pymake -# define program data -target = "mfnwt" -if sys.platform.lower() == "win32": - target += ".exe" -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) +@pytest.fixture(scope="module") +def target(module_tmpdir) -> str: + target = "mfnwt" + return module_tmpdir / target -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) -mfnwtpth = os.path.join(dstpth, prog_dict.dirname) +@pytest.fixture(scope="module") +def prog_data(target) -> dict: + return pymake.usgs_program_data.get_target(target.name) -srcpth = os.path.join(mfnwtpth, prog_dict.srcdir) -epth = os.path.join(dstpth, target) -pm = pymake.Pymake(verbose=True) -pm.target = target -pm.appdir = dstpth -pm.makefile = True -pm.makefiledir = dstpth -pm.inplace = True -pm.dryrun = False +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname -@contextlib.contextmanager -def working_directory(path): - """Changes working directory and returns to previous on exit.""" - prev_cwd = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(prev_cwd) +@pytest.fixture(scope="module") +def pm(module_tmpdir, target) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(target) + pm.appdir = str(module_tmpdir) + pm.makefile = True + pm.makefiledir = str(module_tmpdir) + pm.inplace = True + pm.dryrun = False + pm.verbose = True + yield pm + pm.finalize() -def build_with_makefile(): +def build_with_makefile(ws): success = True - with working_directory(dstpth): + with set_dir(ws): if os.path.isfile("makefile"): # wait to delete on windows if sys.platform.lower() == "win32": @@ -78,67 +71,21 @@ def build_with_makefile(): assert success, errmsg - return - - -def clean_up(): - print("Removing test files and directories") - - # clean up make file - print("Removing makefile") - files = [ - os.path.join(dstpth, file_name) - for file_name in ("makefile", "makedefaults") - ] - for fpth in files: - if os.path.isfile(fpth): - os.remove(fpth) - - # finalize pymake object - pm.finalize() - - # clean up MODFLOW-NWT - if os.path.isfile(epth): - print("Removing " + target) - os.remove(epth) - - print("Removing temporary build directories") - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) - - return - +@pytest.mark.dependency(name="download") @pytest.mark.base -def test_download(): - # Remove the existing mf2005 directory if it exists - if os.path.isdir(mfnwtpth): - shutil.rmtree(mfnwtpth) - - # download the modflow 2005 release - pm.download_target(target, download_path=dstpth) +def test_download(pm, module_tmpdir, target): + pm.download_target(target, download_path=module_tmpdir) assert pm.download, f"could not download {target} distribution" +@pytest.mark.dependency(name="build", depends=["download"]) @pytest.mark.base -def test_compile(): +def test_compile(pm, target): assert pm.build() == 0, f"could not compile {target}" +@pytest.mark.dependency(name="makefile", depends=["build"]) @pytest.mark.base -def test_makefile(): - build_with_makefile() - - -@pytest.mark.base -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - test_download() - test_compile() - build_with_makefile() - clean_up() +def test_makefile(workspace): + build_with_makefile(workspace) diff --git a/autotest/test_mfusg.py b/autotest/test_mfusg.py index fb2ebd4a..8a64a8f6 100644 --- a/autotest/test_mfusg.py +++ b/autotest/test_mfusg.py @@ -1,54 +1,45 @@ import os -import shutil -import sys +from platform import system +from pathlib import Path import flopy import pytest import pymake -# define program data -target = "mfusg" -target_gsi = "mfusg_gsi" -if sys.platform.lower() == "win32": - target += ".exe" - target_gsi += ".exe" - -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) - -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) - -mfusgpth = os.path.join(dstpth, prog_dict.dirname) -expth = os.path.join(mfusgpth, "test") - -srcpth = os.path.join(mfusgpth, prog_dict.srcdir) -epth = os.path.abspath(os.path.join(dstpth, target)) -epth_gsi = os.path.abspath(os.path.join(dstpth, target_gsi)) - -name_files = [ - "01A_nestedgrid_nognc/flow.nam", - "01B_nestedgrid_gnc/flow.nam", - "03A_conduit_unconfined/ex3A.nam", - "03B_conduit_unconfined/ex3B.nam", - "03C_conduit_unconfined/ex3C.nam", - "03D_conduit_unconfined/ex3D.nam", - "03_conduit_confined/ex3.nam", -] -# add path to name_files -for idx, namefile in enumerate(name_files): - name_files[idx] = os.path.join(expth, namefile) - -pm = pymake.Pymake(verbose=True) -pm.target = target -pm.appdir = dstpth - -pm_gsi = pymake.Pymake(verbose=True) -pm_gsi.target = target_gsi -pm_gsi.appdir = dstpth + +@pytest.fixture(scope="module") +def targets(module_tmpdir): + ext = ".exe" if system() == "Windows" else "" + return [module_tmpdir / f"{name}{ext}" for name in ["mfusg", "mfusg_gsi"]] + + +@pytest.fixture(scope="module") +def prog_data(targets) -> dict: + return pymake.usgs_program_data.get_target(targets[0].name) + + +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname + + +@pytest.fixture(scope="module") +def pm(module_tmpdir, targets) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(targets[0]) + pm.appdir = str(module_tmpdir) + yield pm + pm.finalize() + + +@pytest.fixture(scope="module") +def pm_gsi(module_tmpdir, targets) -> pymake.Pymake: + pm_gsi = pymake.Pymake(verbose=True) + pm_gsi.target = str(targets[1]) + pm_gsi.appdir = str(module_tmpdir) + yield pm_gsi + pm_gsi.finalize() def edit_namefile(namefile): @@ -68,85 +59,50 @@ def edit_namefile(namefile): f.close() -def clean_up(): - print("Removing test files and directories") - - # finalize pymake objects - pm.finalize() - pm_gsi.finalize() - - if os.path.isfile(epth): - print("Removing " + target) - os.remove(epth) - - if os.path.isfile(epth_gsi): - print("Removing " + target_gsi) - os.remove(epth_gsi) - - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) - - return - - def run_mfusg(fn, exe): - # edit namefile - edit_namefile(fn) - # run test models - print(f"running model...{os.path.basename(fn)}") - success, buff = flopy.run_model( + success, _ = flopy.run_model( exe, os.path.basename(fn), model_ws=os.path.dirname(fn), silent=False ) errmsg = f"could not run {fn} with {exe}" assert success, errmsg - return - +@pytest.mark.dependency(name="download") @pytest.mark.base -def test_download(): - # Remove the existing mf2005 directory if it exists - if os.path.isdir(mfusgpth): - shutil.rmtree(mfusgpth) - - # download the modflow-usg release - pm.download_target(target, download_path=dstpth) - assert pm.download, f"could not download {target}" +def test_download(pm, pm_gsi, module_tmpdir, targets): + pm.download_target(targets[0], download_path=module_tmpdir) + assert pm.download, f"could not download {targets[0]}" - # download the gsi version of modflow-usg - pm_gsi.download_target(target_gsi, download_path=dstpth) - assert pm_gsi.download, f"could not download {target_gsi}" + pm_gsi.download_target(targets[1], download_path=module_tmpdir) + assert pm_gsi.download, f"could not download {targets[1]}" +@pytest.mark.dependency(name="build", depends=["download"]) @pytest.mark.base -def test_compile(): - assert pm.build() == 0, f"could not compile {target}" - assert pm_gsi.build() == 0, f"could not compile {target_gsi}" - return +def test_compile(pm, pm_gsi, targets): + assert pm.build() == 0, f"could not compile {targets[0]}" + assert (targets[0]).is_file() - -@pytest.mark.regression -@pytest.mark.parametrize("fn", name_files) -def test_mfusg(fn): - run_mfusg(fn, epth) - run_mfusg(fn, epth_gsi) + assert pm_gsi.build() == 0, f"could not compile {targets[1]}" + assert targets[1].is_file() -@pytest.mark.base -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - test_download() - - test_compile() - - # run models - for namefile in name_files: - run_mfusg(namefile) - - # clean up - test_clean_up() +@pytest.mark.dependency(name="test", depends=["build"]) +@pytest.mark.regression +@pytest.mark.parametrize( + "namefile", + [ + "01A_nestedgrid_nognc/flow.nam", + "01B_nestedgrid_gnc/flow.nam", + "03A_conduit_unconfined/ex3A.nam", + "03B_conduit_unconfined/ex3B.nam", + "03C_conduit_unconfined/ex3C.nam", + "03D_conduit_unconfined/ex3D.nam", + "03_conduit_confined/ex3.nam", + ], +) +def test_mfusg(workspace, namefile, targets): + namefile_path = workspace / "test" / namefile + edit_namefile(namefile_path) + run_mfusg(namefile_path, targets[0]) + run_mfusg(namefile_path, targets[1]) diff --git a/autotest/test_misc_programs.py b/autotest/test_misc_programs.py index 1c1e7b15..93d4e30a 100644 --- a/autotest/test_misc_programs.py +++ b/autotest/test_misc_programs.py @@ -1,73 +1,24 @@ -import os -import shutil -import sys - import pytest import pymake -# define program data targets = [ "crt", "vs2dt", "zonbud3", ] -app_extension = "" -if sys.platform.lower() == "win32": - app_extension = ".exe" - -for idx, target in enumerate(targets): - target_dict = pymake.usgs_program_data.get_target(target) - extension = app_extension - targets[idx] = target + extension - -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) - -appdir = os.path.join(dstpth, "bin") -if not os.path.exists(appdir): - os.makedirs(appdir, exist_ok=True) - -exe_names = [os.path.join(appdir, target) for target in targets] - - -def clean_up(epth): - print("Removing test files and directories") - - assert os.path.isfile(epth), f"{os.path.basename(epth)} does not exist" - print("Removing " + os.path.basename(epth)) - os.remove(epth) - @pytest.mark.base @pytest.mark.parametrize("target", targets) -def test_compile(target): +def test_compile(module_tmpdir, target): + bin_dir = module_tmpdir / "bin" assert ( pymake.build_apps( - target, download_dir=dstpth, appdir=appdir, verbose=True + str(bin_dir / target), + download_dir=str(module_tmpdir), + appdir=str(bin_dir), + verbose=True, ) == 0 ), f"could not compile {target}" - - -@pytest.mark.base -@pytest.mark.parametrize("epth", exe_names) -def test_clean_up(epth): - clean_up(epth) - - -@pytest.mark.base -def test_finalize(): - if os.path.isdir(dstpth): - shutil.rmtree(dstpth) - - -if __name__ == "__main__": - for target in targets: - test_compile(target) - for exe_name in exe_names: - test_clean_up(exe_name) - test_finalize() diff --git a/autotest/test_mp6.py b/autotest/test_mp6.py index 1d05111b..ea0c4218 100644 --- a/autotest/test_mp6.py +++ b/autotest/test_mp6.py @@ -1,46 +1,47 @@ import os import shutil -import sys +from platform import system +from pathlib import Path import flopy import pytest import pymake -# define program data -target = "mp6" -if sys.platform.lower() == "win32": - target += ".exe" -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) +@pytest.fixture(scope="module") +def target(module_tmpdir) -> Path: + name = "mp6" + ext = ".exe" if system() == "Windows" else "" + return module_tmpdir / f"{name}{ext}" -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) -mp6pth = os.path.join(dstpth, prog_dict.dirname) -expth = os.path.join(mp6pth, "example-run") +@pytest.fixture(scope="module") +def prog_data(target) -> dict: + return pymake.usgs_program_data.get_target(target.name) -sim_files = [f"EXAMPLE-{n}.mpsim" for n in range(1, 10)] -exe_name = target -srcpth = os.path.join(mp6pth, prog_dict.srcdir) -epth = os.path.abspath(os.path.join(dstpth, target)) +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname -pm = pymake.Pymake(verbose=True) -pm.target = target -pm.appdir = dstpth + +@pytest.fixture(scope="module") +def pm(module_tmpdir, target) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(target) + pm.appdir = module_tmpdir + yield pm + pm.finalize() -def update_files(fn): +def update_files(fn, workspace): # rename a few files for linux replace_files = ["example-6", "example-7", "example-8"] for rf in replace_files: if rf in fn.lower(): - fname1 = os.path.join(expth, f"{rf}.locations") - fname2 = os.path.join(expth, f"{rf}_mod.locations") + fname1 = workspace / f"{rf}.locations" + fname2 = workspace / f"{rf}_mod.locations" print( "copy {} to {}".format( os.path.basename(fname1), os.path.basename(fname2) @@ -49,7 +50,7 @@ def update_files(fn): shutil.copy(fname1, fname2) print(f"deleting {os.path.basename(fname1)}") os.remove(fname1) - fname1 = os.path.join(expth, f"{rf.upper()}.locations") + fname1 = workspace / f"{rf.upper()}.locations" print( "renmae {} to {}".format( os.path.basename(fname2), os.path.basename(fname1) @@ -58,62 +59,31 @@ def update_files(fn): os.rename(fname2, fname1) -def run_modpath6(fn): - success = False - if os.path.exists(epth): - update_files(fn) - # run the model - print(f"running model...{fn}") - success, buff = flopy.run_model(epth, fn, model_ws=expth, silent=False) - return success - - -def clean_up(): - print("Removing test files and directories") - - # finalize pymake object - pm.finalize() - - if os.path.isfile(epth): - print("Removing " + target) - os.remove(epth) - - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) - - return - - +@pytest.mark.dependency(name="download") @pytest.mark.base -def test_download(): - if os.path.isdir(mp6pth): - shutil.rmtree(mp6pth) - - pm.download_target(target, download_path=dstpth) +def test_download(pm, module_tmpdir, target): + pm.download_target(target, download_path=module_tmpdir) assert pm.download, f"could not download {target} distribution" +@pytest.mark.dependency(name="build", depends=["download"]) @pytest.mark.base -def test_compile(): +def test_compile(pm, target): assert pm.build() == 0, f"could not compile {target}" +@pytest.mark.dependency(name="test", depends=["build"]) @pytest.mark.regression -@pytest.mark.parametrize("fn", sim_files) -def test_modpath6(fn): - assert run_modpath6(fn), f"could not run {fn}" - - -@pytest.mark.base -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - test_download() - test_compile() - for fn in sim_files: - run_modpath6(fn) - test_clean_up() +@pytest.mark.parametrize( + "namefile", [f"EXAMPLE-{n}.mpsim" for n in range(1, 10)] +) +def test_mp6(namefile, workspace, target): + example_ws = workspace / "example-run" + if not (example_ws / namefile).is_file(): + pytest.skip(f"Namefile {namefile} does not exist") + + update_files(namefile, example_ws) + success, _ = flopy.run_model( + target, namefile, model_ws=example_ws, silent=False + ) + assert success, f"could not run {namefile}" diff --git a/autotest/test_mp7.py b/autotest/test_mp7.py index 9c7b73c0..5284fde3 100644 --- a/autotest/test_mp7.py +++ b/autotest/test_mp7.py @@ -1,67 +1,40 @@ import os import shutil -import sys +from platform import system +from pathlib import Path import flopy import pytest import pymake -# define program data -target = "mp7" -if sys.platform.lower() == "win32": - target += ".exe" -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) +ext = ".exe" if system() == "Windows" else "" -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) -mp7pth = os.path.join(dstpth, prog_dict.dirname) -emp7 = os.path.abspath(os.path.join(dstpth, target)) +@pytest.fixture(scope="module") +def target(module_tmpdir): + name = "mp7" + return module_tmpdir / f"{name}{ext}" -mf2005_target = "mf2005" -emf2005 = os.path.abspath(os.path.join(dstpth, mf2005_target)) -mfusg_target = "mfusg" -emfusg = os.path.abspath(os.path.join(dstpth, mfusg_target)) +@pytest.fixture(scope="module") +def prog_data(target) -> dict: + return pymake.usgs_program_data.get_target(target.name) -mf6_target = "mf6" -emf6 = os.path.abspath(os.path.join(dstpth, mf6_target)) -if sys.platform.lower() == "win32": - emf2005 += ".exe" - emfusg += ".exe" - emf6 += ".exe" +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname -pm = pymake.Pymake(verbose=True) -pm.target = target -pm.appdir = dstpth -# MODPATH 7 examples -expth = os.path.join(mp7pth, "examples") - -name_files = [ - "ex01/modflow-2005/original/ex01a_mf2005.mpsim", - "ex01/modflow-2005/original/ex01b_mf2005.mpsim", - "ex01/modflow-6/original/ex01a_mf6.mpsim", - "ex01/modflow-6/original/ex01b_mf6.mpsim", - "ex02/modflow-6/original/ex02a_mf6.mpsim", - "ex02/modflow-6/original/ex02b_mf6.mpsim", - "ex02/modflow-usg/original/ex02a_mfusg.mpsim", - "ex02/modflow-usg/original/ex02b_mfusg.mpsim", - "ex03/modflow-6/original/ex03a_mf6.mpsim", - "ex04/modflow-6/original/ex04a_mf6.mpsim", -] -# add path to name_files -for idx, namefile in enumerate(name_files): - name_files[idx] = os.path.join(expth, namefile) - -# set up pths and exes -epths = [emp7, emf2005, emfusg, emf6] +@pytest.fixture(scope="module") +def pm(module_tmpdir, target) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(target) + pm.appdir = str(module_tmpdir) + yield pm + pm.finalize() def replace_data(dpth): @@ -104,7 +77,6 @@ def replace_data(dpth): content[idx] = line.replace(line, srepls[jdx]) with open(fpth, "w") as f: f.writelines(content) - return def set_lowercase(fpth): @@ -114,131 +86,108 @@ def set_lowercase(fpth): content[idx] = line.lower() with open(fpth, "w") as f: f.writelines(content) - return -def run_modpath7(fn): - success = False - if os.path.exists(emp7): - model_ws = os.path.dirname(fn) - # run the flow model - run = True - if "modflow-2005" in fn.lower(): - exe = emf2005 - v = flopy.which(exe) - if v is None: - run = False - nam = [ - name for name in os.listdir(model_ws) if ".nam" in name.lower() - ] - if len(nam) > 0: - fpth = nam[0] - # read and rewrite the name file - set_lowercase(os.path.join(model_ws, fpth)) - else: - fpth = None - run = False - elif "modflow-usg" in fn.lower(): - exe = emfusg - v = flopy.which(exe) - if v is None: - run = False - nam = [ - name for name in os.listdir(model_ws) if ".nam" in name.lower() - ] - if len(nam) > 0: - fpth = nam[0] - else: - fpth = None - run = False - elif "modflow-6" in fn.lower(): - exe = emf6 - v = flopy.which(exe) - if v is None: - run = False +def run_modpath7(namefile, mp7_exe, mf2005_exe, mfusg_exe, mf6_exe): + model_ws = (namefile).resolve().parent + # run the flow model + run = True + name = str(namefile).lower() + if "modflow-2005" in name: + v = shutil.which(mf2005_exe) + if v is None: + run = False + nam = [name for name in os.listdir(model_ws) if ".nam" in name.lower()] + if len(nam) > 0: + fpth = nam[0] + # read and rewrite the name file + set_lowercase(os.path.join(model_ws, fpth)) + else: fpth = None + run = False + elif "modflow-usg" in name: + v = shutil.which(mfusg_exe) + if v is None: + run = False + nam = [name for name in os.listdir(model_ws) if ".nam" in name.lower()] + if len(nam) > 0: + fpth = nam[0] else: + fpth = None run = False - if run: - # fix any known problems - replace_data(model_ws) - # run the model - msg = f"{exe}" - if fpth is not None: - msg += f" {os.path.basename(fpth)}" - success, buff = flopy.run_model( - exe, fpth, model_ws=model_ws, silent=False - ) - - if success: - # run the modpath model - print(f"running model...{fn}") - exe = emp7 - - fpth = os.path.basename(fn) - success, buff = flopy.run_model( - exe, fpth, model_ws=model_ws, silent=False - ) - - return success - - -def clean_up(): - print("Removing test files and directories") - - # finalize pymake object - pm.finalize() + elif "modflow-6" in name: + v = shutil.which(mf6_exe) + if v is None: + run = False + fpth = None + else: + run = False - # clean up compiled executables - for epth in epths: - if os.path.isfile(epth): - print("Removing...'" + epth + "'") - os.remove(epth) + success = False - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) + if run: + # fix any known problems + replace_data(model_ws) + # run the model + msg = f"{mp7_exe}" + if fpth is not None: + msg += f" {os.path.basename(fpth)}" + success, _ = flopy.run_model(v, fpth, model_ws=model_ws, silent=False) + + if success: + fpth = os.path.basename(namefile) + success, _ = flopy.run_model( + mp7_exe, fpth, model_ws=model_ws, silent=False + ) - return + return success +@pytest.mark.dependency(name="download") @pytest.mark.base -def test_download(): - # Remove the existing target download directory if it exists - if os.path.isdir(mp7pth): - shutil.rmtree(mp7pth) - - # download the target - pm.download_target(target, download_path=dstpth) +def test_download(pm, module_tmpdir, target): + pm.download_target(target, download_path=module_tmpdir) assert pm.download, f"could not download {target} distribution" +@pytest.mark.dependency(name="build", depends=["download"]) @pytest.mark.base -def test_compile(): +def test_compile(pm, target): assert pm.build() == 0, f"could not compile {target}" +@pytest.mark.dependency(name="download_exes") @pytest.mark.regression -def test_download_exes(): - pymake.getmfexes(dstpth, exes=("mf2005", "mfusg", "mf6"), verbose=True) +def test_download_exes(module_tmpdir): + pymake.getmfexes( + str(module_tmpdir), exes=("mf2005", "mfusg", "mf6"), verbose=True + ) +@pytest.mark.dependency( + name="test", depends=["download", "download_exes", "build"] +) @pytest.mark.regression -@pytest.mark.parametrize("fn", name_files) -def test_modpath7(fn): - assert run_modpath7(fn), f"could not run {fn}" - - -@pytest.mark.base -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - # test_download() - # test_compile() - # test_download_exes() - for fn in name_files: - run_modpath7(fn) - # test_clean_up() +@pytest.mark.parametrize( + "namefile", + [ + "ex01/modflow-2005/original/ex01a_mf2005.mpsim", + "ex01/modflow-2005/original/ex01b_mf2005.mpsim", + "ex01/modflow-6/original/ex01a_mf6.mpsim", + "ex01/modflow-6/original/ex01b_mf6.mpsim", + "ex02/modflow-6/original/ex02a_mf6.mpsim", + "ex02/modflow-6/original/ex02b_mf6.mpsim", + "ex02/modflow-usg/original/ex02a_mfusg.mpsim", + "ex02/modflow-usg/original/ex02b_mfusg.mpsim", + "ex03/modflow-6/original/ex03a_mf6.mpsim", + "ex04/modflow-6/original/ex04a_mf6.mpsim", + ], +) +def test_modpath7(module_tmpdir, namefile, workspace, target): + assert run_modpath7( + workspace / "examples" / namefile, + target, + module_tmpdir / f"mf2005{ext}", + module_tmpdir / f"mfusg{ext}", + module_tmpdir / f"mf6{ext}", + ), f"could not run {namefile}" diff --git a/autotest/test_mt3d.py b/autotest/test_mt3d.py index e3cdc259..f9c2ea0b 100644 --- a/autotest/test_mt3d.py +++ b/autotest/test_mt3d.py @@ -1,227 +1,171 @@ import os -import shutil import sys +from pathlib import Path import flopy import pytest import pymake -# define program data -target = "mt3dusgs" - -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) - -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) - -mtusgsver = prog_dict.version -mtusgspth = os.path.join(dstpth, prog_dict.dirname) -emtusgs = os.path.abspath(os.path.join(dstpth, target)) - -mfnwt_target = "mfnwt" -temp_dict = pymake.usgs_program_data().get_target(mfnwt_target) -emfnwt = os.path.abspath(os.path.join(dstpth, mfnwt_target)) - -mf6_target = "mf6" -temp_dict = pymake.usgs_program_data().get_target(mf6_target) -emf6 = os.path.abspath(os.path.join(dstpth, mf6_target)) - -if sys.platform.lower() == "win32": - ext = ".exe" - emtusgs += ext - emfnwt += ext - emf6 += ext - -# example path -expth = os.path.join(mtusgspth, "data") - -# set up pths and exes -epths = [emtusgs, emfnwt, emf6] - -pm = pymake.Pymake(verbose=True) -pm.appdir = dstpth -pm.makeclean = True - -sim_dirs = [ - "2ED5EAs", - "CTS1", - "CTS2", - "CTS3", - "CTS4", - "Keating", - "Keating_UZF", - "SFT_CrnkNic", - # "UZT_Disp_Lamb01_TVD", - # "UZT_Disp_Lamb1", - # "UZT_Disp_Lamb10", - # "UZT_NonLin", - "gwt", - "lkt", - "p01SpatialStresses(mf6)", -] - -# remove after MODFLOW 6 v6.1.2 release -for exclude in ( - "Keating", - "Keating_UZF", -): - if exclude in sim_dirs: - sim_dirs.remove(exclude) - -# CI fix -if pymake.usgs_program_data().get_version(mfnwt_target) == "1.2.0": - for exclude in ( - "UZT_NonLin", - "UZT_Disp_Lamb01_TVD", - "UZT_Disp_Lamb1", - "UZT_Disp_Lamb10", - ): - if exclude in sim_dirs: - sim_dirs.remove(exclude) - - -def run_mt3dusgs(temp_dir): - success = False - if os.path.exists(emtusgs): - model_ws = os.path.join(expth, temp_dir) - files = [ - f - for f in os.listdir(model_ws) - if os.path.isfile(os.path.join(model_ws, f)) - ] +@pytest.fixture(scope="module") +def target(module_tmpdir) -> Path: + return module_tmpdir / "mt3dusgs" - mf_nam = None - mt_nam = None - flow_model = None - for f in files: - if "_mf.nam" in f.lower(): - mf_nam = f - flow_model = "mfnwt" - if "_mt.nam" in f.lower(): - mt_nam = f - if f == "mfsim.nam": - mf_nam = f - flow_model = "mf6" - - msg = f"A MODFLOW name file not present in {model_ws}" - assert mf_nam is not None, msg - - msg = f"A MT3D-USGS name file not present in {model_ws}" - assert mt_nam is not None, msg - - # run the flow model - msg = f"{emfnwt}" - if mf_nam is not None: - msg += f" {os.path.basename(mf_nam)}" - if flow_model == "mfnwt": - nam = mf_nam - eapp = emfnwt - elif flow_model == "mf6": - nam = None - eapp = emf6 - success, buff = flopy.run_model( - eapp, nam, model_ws=model_ws, silent=False - ) - # run the MT3D-USGS model - if success: - print(f"running model...{mt_nam}") - success, buff = flopy.run_model( - emtusgs, - mt_nam, - model_ws=model_ws, - silent=False, - normal_msg="Program completed.", - ) +@pytest.fixture(scope="module") +def prog_data(target) -> dict: + return pymake.usgs_program_data.get_target(target.name) - return success +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname -def clean_up(): - print("Removing test files and directories") - # finalize pymake object +@pytest.fixture(scope="module") +def pm(module_tmpdir) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.appdir = str(module_tmpdir) + pm.makeclean = True + yield pm pm.finalize() - for epth in epths: - if os.path.isfile(epth): - print("Removing '" + epth + "'") - os.remove(epth) - dirs_temp = (dstpth,) - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) +def run_mt3dusgs(workspace, mt3dms_exe, mfnwt_exe, mf6_exe): + success = False + model_ws = workspace + + files = [ + f + for f in os.listdir(model_ws) + if os.path.isfile(os.path.join(model_ws, f)) + ] + + mf_nam = None + mt_nam = None + flow_model = None + for f in files: + if "_mf.nam" in f.lower(): + mf_nam = f + flow_model = "mfnwt" + if "_mt.nam" in f.lower(): + mt_nam = f + if f == "mfsim.nam": + mf_nam = f + flow_model = "mf6" + + msg = f"A MODFLOW name file not present in {model_ws}" + assert mf_nam is not None, msg + + msg = f"A MT3D-USGS name file not present in {model_ws}" + assert mt_nam is not None, msg + + # run the flow model + msg = f"{mfnwt_exe}" + if mf_nam is not None: + msg += f" {os.path.basename(mf_nam)}" + if flow_model == "mfnwt": + nam = mf_nam + eapp = mfnwt_exe + elif flow_model == "mf6": + nam = None + eapp = mf6_exe + success, _ = flopy.run_model(eapp, nam, model_ws=model_ws, silent=False) + + # run the MT3D-USGS model + if success: + print(f"running model...{mt_nam}") + success, _ = flopy.run_model( + mt3dms_exe, + mt_nam, + model_ws=model_ws, + silent=False, + normal_msg="Program completed.", + ) - return + return success +@pytest.mark.dependency(name="download_mt3dms") @pytest.mark.base -def test_download_mt3dms(): - # Remove the existing target download directory if it exists - if os.path.isdir(mtusgspth): - shutil.rmtree(mtusgspth) - +def test_download_mt3dms(pm, module_tmpdir): pm.target = "mt3dms" - pm.download_target(pm.target, download_path=dstpth) + pm.download_target(pm.target, download_path=module_tmpdir) assert pm.download, f"could not download {pm.target} distribution" +@pytest.mark.dependency(name="build_mt3dms", depends=["download_mt3dms"]) @pytest.mark.base -def test_compile_mt3dms(): +def test_compile_mt3dms(pm): assert pm.build() == 0, f"could not compile {pm.target}" +@pytest.mark.dependency(name="download") @pytest.mark.base -def test_download(): - # Remove the existing target download directory if it exists - if os.path.isdir(mtusgspth): - shutil.rmtree(mtusgspth) - - # reset the Pymake object for target - pm.reset(target) - - # download the target - pm.download_target(target, download_path=dstpth) +def test_download(pm, module_tmpdir, target): + pm.reset(str(target)) + pm.download_target(target, download_path=module_tmpdir) assert pm.download, f"could not download {target} distribution" +@pytest.mark.dependency(name="build", depends=["download"]) @pytest.mark.base -def test_compile(): +def test_compile(pm, target): assert pm.build() == 0, f"could not compile {target}" @pytest.mark.regression @pytest.mark.skipif(sys.platform == "darwin", reason="do not run on OSX") -def test_download_exes(): - pymake.getmfexes(dstpth, exes=("mfnwt", "mf6"), verbose=True) - return +def test_download_exes(module_tmpdir): + pymake.getmfexes(module_tmpdir, exes=("mfnwt", "mf6"), verbose=True) @pytest.mark.regression @pytest.mark.skipif(sys.platform == "darwin", reason="do not run on OSX") @pytest.mark.skipif(sys.platform == "win32", reason="do not run on Windows") -@pytest.mark.parametrize("ws", sim_dirs) -def test_mt3dusgs(ws): - assert run_mt3dusgs(ws), f"could not run {ws}" - - -@pytest.mark.base -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - test_download_mt3dms() - test_compile_mt3dms() - test_download_exes() - test_download() - test_compile() - for dn in sim_dirs: - run_mt3dusgs(dn) - test_clean_up() +@pytest.mark.parametrize( + "ws", + [ + "2ED5EAs", + "CTS1", + "CTS2", + "CTS3", + "CTS4", + "Keating", + "Keating_UZF", + "SFT_CrnkNic", + # "UZT_Disp_Lamb01_TVD", + # "UZT_Disp_Lamb1", + # "UZT_Disp_Lamb10", + # "UZT_NonLin", + "gwt", + "lkt", + "p01SpatialStresses(mf6)", + ], +) +def test_mt3dusgs(module_tmpdir, workspace, ws, target): + mfnwt_exe = module_tmpdir / "mfnwt" + if pymake.usgs_program_data().get_version(mfnwt_exe) == "1.2.0": + exclude = [ + "UZT_NonLin", + "UZT_Disp_Lamb01_TVD", + "UZT_Disp_Lamb1", + "UZT_Disp_Lamb10", + ] + if ws in exclude: + pytest.skip(reason="excluding {ws}") + + exclude = [ + "Keating", + "Keating_UZF", + ] + if ws in exclude: + pytest.skip(reason="excluding {ws}") + + assert run_mt3dusgs( + workspace / "data" / ws, + target, + mfnwt_exe, + module_tmpdir / "mf6", + ), f"could not run {ws}" diff --git a/autotest/test_requests.py b/autotest/test_requests.py index d480771b..7162a5a8 100644 --- a/autotest/test_requests.py +++ b/autotest/test_requests.py @@ -1,7 +1,6 @@ # Test the download_and_unzip functionality of pymake import json import os -import shutil import subprocess import sys @@ -11,7 +10,6 @@ import pymake RERUNS = 3 -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") def which(program): @@ -37,17 +35,9 @@ def is_exe(fpath): return None -def initialize_working_dir(): - # make sure the test directory exists - os.makedirs(dstpth, exist_ok=True) - - -def export_code_json(file_name="code.json"): - # make sure the test directory exists - initialize_working_dir() - +def export_code_json(ws, file_name="code.json"): # make the json file - fpth = os.path.join(dstpth, file_name) + fpth = os.path.join(ws, file_name) pymake.usgs_program_data.export_json( fpth=fpth, current=True, @@ -78,7 +68,6 @@ def run_cli_cmd(cmd: list) -> None: assert ( process.returncode == 0 ), f"'{' '.join(cmd)}' failed\n\tstatus code {process.returncode}\n" - return @pytest.mark.dependency("latest_version") @@ -95,7 +84,6 @@ def test_latest_version(): if version is not None: assert float(version) >= float(test_version), msg print(f"returned version...{version}") - return @pytest.mark.dependency("latest_assets") @@ -116,7 +104,6 @@ def test_latest_assets(): print(f"evaluating the availability of...{key}") msg = f"unknown key ({key}) found in github repo assets" assert key in test_keys, msg - return @pytest.mark.dependency("previous_assets") @@ -154,7 +141,7 @@ def test_previous_assets(): @pytest.mark.dependency("mfexes") @flaky(max_runs=RERUNS) @pytest.mark.requests -def test_mfexes_download_and_unzip_and_zip(): +def test_mfexes_download_and_unzip_and_zip(function_tmpdir): exclude_files = [ "code.json", "prms_constants.f90", @@ -162,110 +149,74 @@ def test_mfexes_download_and_unzip_and_zip(): "prms_time.f90", "utils_prms.f90", ] - pth = os.path.join( - f"temp_mfexes_{os.path.basename(__file__).replace('.py', '')}" - ) - pymake.getmfexes(pth, verbose=True) - for f in os.listdir(pth): - fpth = os.path.join(pth, f) + pymake.getmfexes(function_tmpdir, verbose=True) + for f in os.listdir(function_tmpdir): + fpth = os.path.join(function_tmpdir, f) if not os.path.isdir(fpth) and f not in exclude_files: errmsg = f"{fpth} not executable" assert which(fpth) is not None, errmsg # zip up exe's using files - zip_pth = os.path.join( - f"temp_mfexes_{os.path.basename(__file__).replace('.py', '')}", - "ziptest01.zip", - ) + zip_pth = function_tmpdir / "ziptest01.zip" print(f"creating '{zip_pth}'") success = pymake.zip_all( - zip_pth, file_pths=[os.path.join(pth, e) for e in os.listdir(pth)] + str(zip_pth), + file_pths=[ + os.path.join(function_tmpdir, e) + for e in os.listdir(function_tmpdir) + ], ) assert success, "could not create zipfile using file names" - os.remove(zip_pth) # zip up exe's using directories - zip_pth = os.path.join( - f"temp_mfexes_{os.path.basename(__file__).replace('.py', '')}", - "ziptest02.zip", - ) + zip_pth = function_tmpdir / "ziptest02.zip" print(f"creating '{zip_pth}'") - success = pymake.zip_all(zip_pth, dir_pths=pth) + success = pymake.zip_all(str(zip_pth), dir_pths=function_tmpdir) assert success, "could not create zipfile using directories" - os.remove(zip_pth) # zip up exe's using directories and a pattern - zip_pth = os.path.join( - f"temp_mfexes_{os.path.basename(__file__).replace('.py', '')}", - "ziptest03.zip", - ) + zip_pth = function_tmpdir / "ziptest03.zip" print(f"creating '{zip_pth}'") - success = pymake.zip_all(zip_pth, dir_pths=pth, patterns="mf") + success = pymake.zip_all( + str(zip_pth), dir_pths=function_tmpdir, patterns="mf" + ) assert success, "could not create zipfile using directories and a pattern" - os.remove(zip_pth) # zip up exe's using files and directories - zip_pth = os.path.join( - f"temp_mfexes_{os.path.basename(__file__).replace('.py', '')}", - "ziptest04.zip", - ) + zip_pth = function_tmpdir / "ziptest04.zip" print(f"creating '{zip_pth}'") success = pymake.zip_all( - zip_pth, - file_pths=[os.path.join(pth, e) for e in os.listdir(pth)], - dir_pths=pth, + str(zip_pth), + file_pths=[ + os.path.join(function_tmpdir, e) + for e in os.listdir(function_tmpdir) + ], + dir_pths=function_tmpdir, ) assert success, "could not create zipfile using files and directories" - os.remove(zip_pth) - - # clean up exe's - for f in os.listdir(pth): - fpth = os.path.join(pth, f) - if not os.path.isdir(fpth): - print("Removing " + f) - os.remove(fpth) - - # clean up directory - if os.path.isdir(pth): - print("Removing folder " + pth) - shutil.rmtree(pth) - - return @pytest.mark.dependency("nightly_download") @flaky(max_runs=RERUNS) @pytest.mark.requests -def test_nightly_download_and_unzip(): +def test_nightly_download_and_unzip(module_tmpdir): exclude_files = ["code.json"] - pth = os.path.join( - f"temp_nightly_{os.path.basename(__file__).replace('.py', '')}" - ) - pymake.getmfnightly(pth, verbose=True) - for f in os.listdir(pth): - fpth = os.path.join(pth, f) + pymake.getmfnightly(module_tmpdir, verbose=True) + for f in os.listdir(module_tmpdir): + fpth = os.path.join(module_tmpdir, f) print(f"downloaded: {fpth}") if not os.path.isdir(fpth) and f not in exclude_files: errmsg = f"{fpth} not executable" assert which(fpth) is not None, errmsg - # clean up directory - if os.path.isdir(pth): - print("\nRemoving folder " + pth) - shutil.rmtree(pth) - @pytest.mark.dependency("usgsprograms") @flaky(max_runs=RERUNS) @pytest.mark.requests def test_usgsprograms(): - print("test_usgsprograms()") upd = pymake.usgs_program_data().get_program_dict() - all_keys = list(upd.keys()) - get_keys = pymake.usgs_program_data.get_keys() - msg = "the keys from program_dict are not equal to .get_keys()" assert all_keys == get_keys, msg @@ -274,7 +225,6 @@ def test_usgsprograms(): @flaky(max_runs=RERUNS) @pytest.mark.requests def test_target_key_error(): - print("test_target_key_error()") with pytest.raises(KeyError): pymake.usgs_program_data.get_target("error") @@ -283,13 +233,11 @@ def test_target_key_error(): @flaky(max_runs=RERUNS) @pytest.mark.requests def test_target_keys(): - print("test_target_keys()") prog_dict = pymake.usgs_program_data().get_program_dict() targets = pymake.usgs_program_data.get_keys() for target in targets: target_dict = pymake.usgs_program_data.get_target(target) test_dict = prog_dict[target] - msg = ( f"dictionary from {target} " + "does not match dictionary from .get_target()" @@ -300,9 +248,9 @@ def test_target_keys(): @pytest.mark.dependency("export_json") @flaky(max_runs=RERUNS) @pytest.mark.requests -def test_usgsprograms_export_json(): +def test_usgsprograms_export_json(module_tmpdir): # export code.json and return json file path - fpth = export_code_json(file_name="code.export.json") + fpth = export_code_json(module_tmpdir, file_name="code.export.json") # test the json export with open(fpth, "r") as f: @@ -331,12 +279,8 @@ def test_usgsprograms_export_json(): @pytest.mark.dependency("load_json_error") @flaky(max_runs=RERUNS) @pytest.mark.requests -def test_usgsprograms_load_json_error(): - print("test_usgsprograms_load_json_error()") - - initialize_working_dir() - - fpth = os.path.join(dstpth, "code.test.error.json") +def test_usgsprograms_load_json_error(module_tmpdir): + fpth = os.path.join(module_tmpdir, "code.test.error.json") my_dict = {"mf2005": {"bad": 12, "key": True}} pymake.usgs_program_data.export_json( fpth=fpth, prog_data=my_dict, update=False @@ -349,11 +293,9 @@ def test_usgsprograms_load_json_error(): @pytest.mark.dependency("load_json") @flaky(max_runs=RERUNS) @pytest.mark.requests -def test_usgsprograms_load_json(): - print("test_usgsprograms_load_json()") - +def test_usgsprograms_load_json(module_tmpdir): # export code.json and return json file path - fpth = export_code_json(file_name="code.load.json") + fpth = export_code_json(module_tmpdir, file_name="code.load.json") json_dict = pymake.usgs_program_data.load_json(fpth) @@ -365,13 +307,8 @@ def test_usgsprograms_load_json(): @pytest.mark.dependency("list_json_error") @flaky(max_runs=RERUNS) @pytest.mark.requests -def test_usgsprograms_list_json_error(): - print("test_usgsprograms_list_json_error()") - - # make sure the example directory exists - initialize_working_dir() - - fpth = os.path.join(dstpth, "does.not.exist.json") +def test_usgsprograms_list_json_error(module_tmpdir): + fpth = os.path.join(module_tmpdir, "does.not.exist.json") with pytest.raises(IOError): pymake.usgs_program_data.list_json(fpth=fpth) @@ -379,13 +316,8 @@ def test_usgsprograms_list_json_error(): @pytest.mark.dependency("list_json") @flaky(max_runs=RERUNS) @pytest.mark.requests -def test_usgsprograms_list_json(): - print("test_usgsprograms_list_json()") - - # export code.json and return json file path - fpth = export_code_json(file_name="code.list.json") - - # list the contents of the json file +def test_usgsprograms_list_json(module_tmpdir): + fpth = export_code_json(module_tmpdir, file_name="code.list.json") pymake.usgs_program_data.list_json(fpth=fpth) @@ -393,7 +325,6 @@ def test_usgsprograms_list_json(): @flaky(max_runs=RERUNS) @pytest.mark.requests def test_shared(): - print("test_shared()") target_dict = pymake.usgs_program_data.get_target("libmf6") assert target_dict.shared_object, "libmf6 is a shared object" @@ -402,7 +333,6 @@ def test_shared(): @flaky(max_runs=RERUNS) @pytest.mark.requests def test_not_shared(): - print("test_not_shared()") target_dict = pymake.usgs_program_data.get_target("mf6") assert not target_dict.shared_object, "mf6 is not a shared object" @@ -410,25 +340,6 @@ def test_not_shared(): @pytest.mark.dependency(name="code_json") @flaky(max_runs=RERUNS) @pytest.mark.requests -def test_code_json() -> None: - cmd = ["make-code-json", "-f", f"{dstpth}/code.json"] +def test_code_json(module_tmpdir) -> None: + cmd = ["make-code-json", "-f", f"{module_tmpdir}/code.json"] run_cli_cmd(cmd) - shutil.rmtree(dstpth) - - -if __name__ == "__main__": - # test_previous_assets() - test_latest_version() - # test_latest_assets() - # test_nightly_download_and_unzip() - # test_download_and_unzip_and_zip() - test_usgsprograms() - test_target_key_error() - test_target_keys() - test_usgsprograms_export_json() - test_usgsprograms_load_json_error() - test_usgsprograms_load_json() - test_usgsprograms_list_json_error() - test_usgsprograms_list_json() - test_shared() - test_not_shared() diff --git a/autotest/test_seawat.py b/autotest/test_seawat.py index 14f22cc9..93ad96e5 100644 --- a/autotest/test_seawat.py +++ b/autotest/test_seawat.py @@ -1,58 +1,40 @@ import os -import shutil import sys +from platform import system +from pathlib import Path import flopy import pytest +from modflow_devtools.misc import is_in_ci import pymake -# determine if running on a continuous integration server -is_CI = "CI" in os.environ - -# define program data -target = "swtv4" -if sys.platform.lower() == "win32": - target += ".exe" - -# get program dictionary -prog_dict = pymake.usgs_program_data.get_target(target) - -# set up paths -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") -if not os.path.exists(dstpth): - os.makedirs(dstpth, exist_ok=True) - -swtpth = os.path.join(dstpth, prog_dict.dirname) -expth = os.path.join(swtpth, "examples") -deppth = os.path.join(swtpth, "dependencies") - -srcpth = os.path.join(swtpth, prog_dict.srcdir) -epth = os.path.abspath(os.path.join(dstpth, target)) - -name_files = sorted( - [ - "4_hydrocoin/seawat.nam", - "5_saltlake/seawat.nam", - "2_henry/1_classic_case1/seawat.nam", - "2_henry/4_VDF_uncpl_Trans/seawat.nam", - "2_henry/5_VDF_DualD_Trans/seawat.nam", - "2_henry/6_age_simulation/henry_mod.nam", - "2_henry/2_classic_case2/seawat.nam", - "2_henry/3_VDF_no_Trans/seawat.nam", - "1_box/case1/seawat.nam", - "1_box/case2/seawat.nam", - "3_elder/seawat.nam", - ] -) -# add path to name_files -for idx, namefile in enumerate(name_files): - name_files[idx] = os.path.join(expth, namefile) -pm = pymake.Pymake(verbose=True) -pm.target = target -pm.appdir = dstpth -pm.double = True +@pytest.fixture(scope="module") +def target(module_tmpdir) -> Path: + name = "swtv4" + ext = ".exe" if system() == "Windows" else "" + return module_tmpdir / f"{name}{ext}" + + +@pytest.fixture(scope="module") +def prog_data(target) -> dict: + return pymake.usgs_program_data.get_target(target.name) + + +@pytest.fixture(scope="module") +def workspace(module_tmpdir, prog_data) -> Path: + return module_tmpdir / prog_data.dirname + + +@pytest.fixture(scope="module") +def pm(module_tmpdir, target) -> pymake.Pymake: + pm = pymake.Pymake(verbose=True) + pm.target = str(target) + pm.appdir = str(module_tmpdir) + pm.double = True + yield pm + # pm.finalize() def edit_namefile(namefile): @@ -69,101 +51,79 @@ def edit_namefile(namefile): f.close() -def clean_up(): - print("Removing test files and directories") - - # finalize pymake object - pm.finalize() - - if os.path.isfile(epth): - print("Removing " + target) - os.remove(epth) - - dirs_temp = [dstpth] - for d in dirs_temp: - if os.path.isdir(d): - shutil.rmtree(d) - - return - - -def run_seawat(fn): - # edit the name files - edit_namefile(fn) - - # run the models - success, buff = flopy.run_model( - epth, os.path.basename(fn), model_ws=os.path.dirname(fn), silent=False - ) - errmsg = f"could not run...{os.path.basename(fn)}" - assert success, errmsg - return - - -def build_seawat_dependency_graphs(): +def build_seawat_dependency_graphs(src_path, dep_path): success = True build_graphs = True - if is_CI: + if is_in_ci(): if "linux" not in sys.platform.lower(): build_graphs = False if build_graphs: - if os.path.exists(epth): - # build dependencies output directory - if not os.path.exists(deppth): - os.makedirs(deppth, exist_ok=True) + # build dependencies output directory + if not os.path.exists(dep_path): + os.makedirs(dep_path, exist_ok=True) - # build dependency graphs - print("building dependency graphs") - pymake.make_plots(srcpth, deppth, verbose=True) + # build dependency graphs + print("building dependency graphs") + # todo support pathlike, not just str? + pymake.make_plots(str(src_path), dep_path, verbose=True) - # test that the dependency figure for the SEAWAT main exists - findf = os.path.join(deppth, "swt_v4.f.png") - success = os.path.isfile(findf) - assert success, f"could not find {findf}" + # test that the dependency figure for the SEAWAT main exists + findf = dep_path / "swt_v4.f.png" + assert findf.is_file(), f"could not find {findf}" assert success, "could not build dependency graphs" - return - +@pytest.mark.dependency(name="download") @pytest.mark.base -def test_download(): - # Remove the existing seawat directory if it exists - if os.path.isdir(swtpth): - shutil.rmtree(swtpth) - - # download the target - pm.download_target(target, download_path=dstpth) +def test_download(pm, module_tmpdir, target): + pm.download_target(target, download_path=module_tmpdir) assert pm.download, f"could not download {target}" +@pytest.mark.dependency(name="build", depends=["download"]) @pytest.mark.base -def test_compile(): +def test_compile(pm, target): assert pm.build() == 0, f"could not compile {target}" +@pytest.mark.dependency(name="test", depends=["build"]) @pytest.mark.regression -@pytest.mark.parametrize("fn", name_files) -def test_seawat(fn): - run_seawat(fn) - - -@pytest.mark.regression -def test_dependency_graphs(): - build_seawat_dependency_graphs() +@pytest.mark.parametrize( + "namefile", + sorted( + [ + "4_hydrocoin/seawat.nam", + "5_saltlake/seawat.nam", + "2_henry/1_classic_case1/seawat.nam", + "2_henry/4_VDF_uncpl_Trans/seawat.nam", + "2_henry/5_VDF_DualD_Trans/seawat.nam", + "2_henry/6_age_simulation/henry_mod.nam", + "2_henry/2_classic_case2/seawat.nam", + "2_henry/3_VDF_no_Trans/seawat.nam", + "1_box/case1/seawat.nam", + "1_box/case2/seawat.nam", + "3_elder/seawat.nam", + ] + ), +) +def test_seawat(namefile, workspace, target): + namefile_path = workspace / "examples" / namefile + edit_namefile(namefile_path) + + success, _ = flopy.run_model( + target, + os.path.basename(namefile_path), + model_ws=os.path.dirname(namefile_path), + silent=False, + ) + assert success, f"could not run...{os.path.basename(namefile_path)}" -@pytest.mark.base +@pytest.mark.dependency(name="graph", depends=["test"]) @pytest.mark.regression -def test_clean_up(): - clean_up() - - -if __name__ == "__main__": - test_download() - test_compile() - for fn in name_files: - run_seawat(fn) - test_dependency_graphs() - test_clean_up() +def test_dependency_graphs(workspace, prog_data): + src_path = workspace / prog_data.srcdir + dep_path = workspace / "dependencies" + build_seawat_dependency_graphs(src_path, dep_path) diff --git a/autotest/test_triangle_makefile.py b/autotest/test_triangle_makefile.py index f430c83f..45579d87 100644 --- a/autotest/test_triangle_makefile.py +++ b/autotest/test_triangle_makefile.py @@ -1,5 +1,4 @@ import os -import shutil import sys import flopy @@ -7,14 +6,9 @@ import pymake -# working directory -dstpth = os.path.join(f"temp_{os.path.basename(__file__).replace('.py', '')}") - @pytest.mark.base -def test_pymake_makefile(): - os.makedirs(dstpth, exist_ok=True) - +def test_pymake_makefile(module_tmpdir): target = "triangle" pm = pymake.Pymake(verbose=True) pm.makefile = True @@ -31,22 +25,22 @@ def test_pymake_makefile(): # change to working directory so triangle download directory is # a subdirectory in the working directory - os.chdir(dstpth) + os.chdir(module_tmpdir) # build triangle and makefile assert ( pymake.build_apps(target, clean=False, pymake_object=pm) == 0 ), f"could not build {target}" - if os.path.isfile(os.path.join(dstpth, "makefile")): + if os.path.isfile(os.path.join(module_tmpdir, "makefile")): print("cleaning with GNU make") # clean prior to make print(f"clean {target} with makefile") - success, buff = flopy.run_model( + success, _ = flopy.run_model( "make", None, cargs="clean", - model_ws=dstpth, + model_ws=module_tmpdir, report=True, normal_msg="rm -rf ./triangle", silent=False, @@ -55,10 +49,10 @@ def test_pymake_makefile(): # build triangle with makefile if success: print(f"build {target} with makefile") - success, buff = flopy.run_model( + success, _ = flopy.run_model( "make", None, - model_ws=dstpth, + model_ws=module_tmpdir, report=True, normal_msg="cc -O2 -o triangle ./obj_temp/triangle.o", silent=False, @@ -71,19 +65,5 @@ def test_pymake_makefile(): os.chdir(cwd) assert os.path.isfile( - os.path.join(dstpth, target) + os.path.join(module_tmpdir, target) ), f"could not build {target} with makefile" - - return - - -@pytest.mark.base -def test_clean_up(): - print("Removing test files and directories") - - shutil.rmtree(dstpth) - - -if __name__ == "__main__": - test_pymake_makefile() - test_clean_up() diff --git a/pyproject.toml b/pyproject.toml index a10b6c6d..833738b5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,6 +55,7 @@ test = [ "pytest-benchmark", "pytest-cases", "pytest-cov", + "pytest-dependency", "pytest-dotenv", "pytest-virtualenv", "pytest-xdist",