Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

test: require dev exes, warn/skip if download/rebuilt not found #1460

Merged
merged 2 commits into from
Nov 21, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
factor out try_get_target for temp devtools compatibility, refactor t…
…argets setup in conftest.py
  • Loading branch information
wpbonelli committed Nov 21, 2023
commit 0490cc0703c6fde73286a564b672f2864f8d2f44
134 changes: 79 additions & 55 deletions autotest/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,78 +39,102 @@ def should_compare(
return True


_lib_exts = {
"Darwin": ".dylib",
"Linux": ".so",
"Windows": ".dll",
}
_exe_ext, _lib_ext = get_suffixes(sys.platform)
_binaries_path = project_root_path / "bin"
_dl_bin_path = _binaries_path / "downloaded"
_rb_bin_path = _binaries_path / "rebuilt"
_binaries = {
"development": [
("mf6", _binaries_path / f"mf6{_exe_ext}"),
("libmf6", _binaries_path / f"libmf6{_lib_ext}"),
("mf5to6", _binaries_path / f"mf5to6{_exe_ext}"),
("zbud6", _binaries_path / f"zbud6{_exe_ext}"),
],
"downloaded": [
("mf2000", _dl_bin_path / f"mf2000{_exe_ext}"),
("mf2005", _dl_bin_path / f"mf2005dbl{_exe_ext}"),
("mfnwt", _dl_bin_path / f"mfnwtdbl{_exe_ext}"),
("mfusg", _dl_bin_path / f"mfusgdbl{_exe_ext}"),
("mflgr", _dl_bin_path / f"mflgrdbl{_exe_ext}"),
("mf2005s", _dl_bin_path / f"mf2005{_exe_ext}"),
("mt3dms", _dl_bin_path / f"mt3dms{_exe_ext}"),
("crt", _dl_bin_path / f"crt{_exe_ext}"),
("gridgen", _dl_bin_path / f"gridgen{_exe_ext}"),
("mp6", _dl_bin_path / f"mp6{_exe_ext}"),
("mp7", _dl_bin_path / f"mp7{_exe_ext}"),
("swtv4", _dl_bin_path / f"swtv4{_exe_ext}"),
("sutra", _dl_bin_path / f"sutra{_exe_ext}"),
("triangle", _dl_bin_path / f"triangle{_exe_ext}"),
("vs2dt", _dl_bin_path / f"vs2dt{_exe_ext}"),
("zonbudusg", _dl_bin_path / f"zonbudusg{_exe_ext}"),
],
"rebuilt": [
("mf6_regression", _rb_bin_path / f"mf6{_exe_ext}"),
("libmf6_regression", _rb_bin_path / f"libmf6{_lib_ext}"),
("mf5to6_regression", _rb_bin_path / f"mf5to6{_exe_ext}"),
("zbud6_regression", _rb_bin_path / f"zbud6{_exe_ext}"),
],
}


@pytest.fixture(scope="session")
def bin_path() -> Path:
return project_root_path / "bin"
return _binaries_path


@pytest.fixture(scope="session")
def libmf6_path(bin_path) -> Path:
ext = {
"Darwin": ".dylib",
"Linux": ".so",
"Windows": ".dll",
}[platform.system()]
lib_name = bin_path / f"libmf6{ext}"
return lib_name
def libmf6_path() -> Path:
return _binaries_path / f"libmf6{_lib_exts[platform.system()]}"


@pytest.fixture(scope="session")
def targets(bin_path) -> Executables:
exe_ext, lib_ext = get_suffixes(sys.platform)
dl_bin = bin_path / "downloaded"
rb_bin = bin_path / "rebuilt"
targets = dict()

# local development binaries
development = [
("mf6", bin_path / f"mf6{exe_ext}"),
("libmf6", bin_path / f"libmf6{lib_ext}"),
("mf5to6", bin_path / f"mf5to6{exe_ext}"),
("zbud6", bin_path / f"zbud6{exe_ext}"),
]

# downloaded executables
downloaded = [
("mf2000", dl_bin / f"mf2000{exe_ext}"),
("mf2005", dl_bin / f"mf2005dbl{exe_ext}"),
("mfnwt", dl_bin / f"mfnwtdbl{exe_ext}"),
("mfusg", dl_bin / f"mfusgdbl{exe_ext}"),
("mflgr", dl_bin / f"mflgrdbl{exe_ext}"),
("mf2005s", dl_bin / f"mf2005{exe_ext}"),
("mt3dms", dl_bin / f"mt3dms{exe_ext}"),
("crt", dl_bin / f"crt{exe_ext}"),
("gridgen", dl_bin / f"gridgen{exe_ext}"),
("mp6", dl_bin / f"mp6{exe_ext}"),
("mp7", dl_bin / f"mp7{exe_ext}"),
("swtv4", dl_bin / f"swtv4{exe_ext}"),
("sutra", dl_bin / f"sutra{exe_ext}"),
("triangle", dl_bin / f"triangle{exe_ext}"),
("vs2dt", dl_bin / f"vs2dt{exe_ext}"),
("zonbudusg", dl_bin / f"zonbudusg{exe_ext}"),
]

# binaries rebuilt from last release
rebuilt = [
("mf6_regression", rb_bin / f"mf6{exe_ext}"),
("libmf6_regression", rb_bin / f"libmf6{lib_ext}"),
("mf5to6_regression", rb_bin / f"mf5to6{exe_ext}"),
("zbud6_regression", rb_bin / f"zbud6{exe_ext}"),
]
def targets() -> Executables:
d = dict()

# require development binaries
for k, v in development:
for k, v in _binaries["development"]:
assert v.is_file(), f"Couldn't find binary '{k}' expected at: {v}"
targets[k] = v
d[k] = v

# downloaded/rebuilt binaries are optional
for k, v in downloaded + rebuilt:
for k, v in _binaries["downloaded"] + _binaries["rebuilt"]:
if v.is_file():
targets[k] = v
d[k] = v
else:
warn(f"Couldn't find binary '{k}' expected at: {v}")

return Executables(**targets)
return Executables(**d)


def try_get_target(targets: Executables, name: str) -> Path:
"""Try to retrieve the path to a binary. If the binary is a development
target and can't be found, an error is raised. Otherwise (if the binary
is downloaded or rebuilt) the test is skipped. This is to allow testing
without downloaded or rebuilt binaries, e.g. if the network is down."""

try:
# modflow-devtools >= 1.3
exe = targets.get(name)
if exe:
return exe
elif name in _binaries["development"]:
raise ValueError(f"Couldn't find binary '{name}'")
else:
pytest.skip(f"Couldn't find binary '{name}'")
except AttributeError:
# modflow-devtools < 1.3
try:
return targets[name]
except:
if name in _binaries["development"]:
raise ValueError(f"Couldn't find binary '{name}'")
else:
pytest.skip(f"Couldn't find binary 'gridgen'")


@pytest.fixture
Expand Down
6 changes: 3 additions & 3 deletions autotest/test_gwf_csub_distypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import pytest
from flopy.utils.gridgen import Gridgen

from conftest import try_get_target
from framework import TestFramework
from simulation import TestSimulation

Expand Down Expand Up @@ -425,17 +426,16 @@ def eval_zdis(sim):
+ f"z-displacement at time {totim}"
)

return


@pytest.mark.parametrize(
"idx, name",
list(enumerate(ex)),
)
def test_mf6model(idx, name, function_tmpdir, targets):
gridgen = try_get_target(targets, "gridgen")
ws = function_tmpdir
test = TestFramework()
test.build(lambda i, w: build_model(i, w, targets.gridgen), idx, ws)
test.build(lambda i, w: build_model(i, w, gridgen), idx, ws)
test.run(
TestSimulation(
name=name,
Expand Down
2 changes: 1 addition & 1 deletion autotest/test_gwf_returncodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,5 +271,5 @@ def compiler_argv(dir, exe):
),
)
def test_main(fn, function_tmpdir, targets):
mf6 = targets.as_dict()["mf6"]
mf6 = targets.mf6
eval(fn)(function_tmpdir, mf6)
20 changes: 8 additions & 12 deletions autotest/test_gwt_mt3dms_p01.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
import numpy as np
import pytest

from conftest import try_get_target

testgroup = "mt3dms_p01"


Expand Down Expand Up @@ -472,18 +474,12 @@ def p01mf6(
return sim, conc


def get_binaries(targets) -> Tuple[Path, Path]:
mf6 = targets["mf6"]
mf2005 = targets.get("mf2005s")
mt3dms = targets.get("mt3dms")

assert mf6, f"Couldn't find binary 'mf6'"
if not mf2005:
pytest.skip(f"Couldn't find binary 'mf2005s'")
if not mt3dms:
pytest.skip(f"Couldn't find binary 'mt3dms'")

return mf6, mf2005, mt3dms
def get_binaries(targets) -> Tuple[Path, Path, Path]:
return (
targets.mf6,
try_get_target(targets, "mf2005s"),
try_get_target(targets, "mt3dms"),
)


def test_mt3dmsp01a(function_tmpdir, targets):
Expand Down
3 changes: 2 additions & 1 deletion autotest/test_z03_examples.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import pytest

from conftest import should_compare
from simulation import TestSimulation

Expand Down Expand Up @@ -62,7 +63,7 @@ def test_scenario(function_tmpdir, example_scenario, targets):
workspace = function_tmpdir / model_name
sim = TestSimulation(
name=model_name,
exe_dict=targets.as_dict(),
exe_dict=targets,
mf6_regression=True,
cmp_verbose=False,
make_comparison=should_compare(
Expand Down
21 changes: 15 additions & 6 deletions autotest/test_z03_largetestmodels.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,22 @@
import pytest

from conftest import should_compare
from simulation import TestSimulation

excluded_models = []
excluded_comparisons = {
"test1004_mvlake_laksfr_tr": ["6.4.1",],
"test1004_mvlake_lak_tr": ["6.4.1",],
"test1003_MNW2_Fig28": ["6.2.1",],
"test1001_Peterson": ["6.2.1",],
"test1004_mvlake_laksfr_tr": [
"6.4.1",
],
"test1004_mvlake_lak_tr": [
"6.4.1",
],
"test1003_MNW2_Fig28": [
"6.2.1",
],
"test1001_Peterson": [
"6.2.1",
],
}


Expand All @@ -23,13 +32,13 @@ def test_model(

if name in excluded_models:
pytest.skip(f"Excluding large mf6 model '{name}'")

if "dev" in name and "not developmode" in markers:
pytest.skip(f"Skipping large mf6 model '{name}' (develop mode only)")

sim = TestSimulation(
name=name,
exe_dict=targets.as_dict(),
exe_dict=targets,
mf6_regression=not original_regression,
cmp_verbose=False,
make_comparison=should_compare(name, excluded_comparisons, targets),
Expand Down