Skip to content

Commit

Permalink
Update CI tests
Browse files Browse the repository at this point in the history
  • Loading branch information
gmaze committed Oct 1, 2024
1 parent 642aadd commit 706a7da
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 12 deletions.
2 changes: 1 addition & 1 deletion argopy/tests/test_fetchers_data_erddap.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,5 +213,5 @@ def test_fetching_cached(self, mocked_erddapserver, cached_fetcher):
@pytest.mark.parametrize("parallel_fetcher", VALID_PARALLEL_ACCESS_POINTS,
indirect=True,
ids=VALID_PARALLEL_ACCESS_POINTS_IDS)
def test_fetching_parallel(self, mocked_erddapserver, parallel_fetcher):
def test_fetching_parallel_thread(self, mocked_erddapserver, parallel_fetcher):
assert_fetcher(mocked_erddapserver, parallel_fetcher, cacheable=False)
4 changes: 2 additions & 2 deletions argopy/tests/test_fetchers_data_erddap_bgc.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ def cached_fetcher(self, request):
@pytest.fixture
def parallel_fetcher(self, request):
""" Fixture to create a parallel ERDDAP data fetcher for a given dataset and access point """
fetcher_args, access_point = self._setup_fetcher(request, parallel="erddap")
fetcher_args, access_point = self._setup_fetcher(request, parallel="thread")
yield create_fetcher(fetcher_args, access_point)

def teardown_class(self):
Expand All @@ -229,7 +229,7 @@ def test_fetching_cached(self, mocked_erddapserver, cached_fetcher):
@pytest.mark.parametrize("parallel_fetcher", VALID_PARALLEL_ACCESS_POINTS,
indirect=True,
ids=VALID_PARALLEL_ACCESS_POINTS_IDS)
def test_fetching_parallel(self, mocked_erddapserver, parallel_fetcher):
def test_fetching_parallel_thread(self, mocked_erddapserver, parallel_fetcher):
assert_fetcher(mocked_erddapserver, parallel_fetcher, cacheable=False)

@pytest.mark.parametrize("measured", [None, 'all', 'DOXY'],
Expand Down
23 changes: 22 additions & 1 deletion argopy/tests/test_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
log = logging.getLogger("argopy.tests.options")


def test_invalid_option_name():
def test_invalid_opt_name():
with pytest.raises(ValueError):
argopy.set_options(not_a_valid_options=True)

Expand Down Expand Up @@ -103,3 +103,24 @@ def test_opt_trust_env():
argopy.set_options(trust_env='toto')
with pytest.raises(ValueError):
argopy.set_options(trust_env=0)


@pytest.mark.parametrize("method", [
True,
False,
'thread',
'process',
# client
], indirect=False)
def test_opt_parallel(method):
with argopy.set_options(parallel=method):
assert OPTIONS['parallel'] == method


@pytest.mark.parametrize("method", [
2,
'dummy',
], indirect=False)
def test_invalid_opt_parallel(method):
with pytest.raises(OptionValueError):
argopy.set_options(parallel=method)
16 changes: 8 additions & 8 deletions argopy/tests/test_stores_fsspec.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def test_open_dataset(self):
ncfile = os.path.sep.join([self.ftproot, "dac/aoml/5900446/5900446_prof.nc"])
assert isinstance(self.fs.open_dataset(ncfile), xr.Dataset)

params = [(m, p, c) for m in ["seq", "thread", "process"] for p in [True, False] for c in [True, False]]
params = [(m, p, c) for m in ["sequential", "thread", "process"] for p in [True, False] for c in [True, False]]
ids_params = ["method=%s, progress=%s, concat=%s" % (p[0], p[1], p[2]) for p in params]
@pytest.mark.parametrize("params", params, indirect=False, ids=ids_params)
def test_open_mfdataset(self, params):
Expand All @@ -114,7 +114,7 @@ def test_open_mfdataset(self, params):
else:
assert is_list_of_datasets(ds)

params = [(m) for m in ["seq", "thread", "invalid"]]
params = [(m) for m in ["sequential", "thread", "invalid"]]
ids_params = ["method=%s" % (p) for p in params]
@pytest.mark.parametrize("params", params, indirect=False, ids=ids_params)
def test_open_mfdataset_error(self, params):
Expand Down Expand Up @@ -190,7 +190,7 @@ class Test_HttpStore:
# Parameters for multiple files opening
mf_params_nc = [
(m, p, c)
for m in ["seq", "thread", "process"]
for m in ["sequential", "thread", "process"]
for p in [True, False]
for c in [True, False]
]
Expand All @@ -202,7 +202,7 @@ class Test_HttpStore:
repo + "ftp/dac/csiro/5900865/profiles/D5900865_002.nc",
]

mf_params_js = [(m, p) for m in ["seq", "thread", "process"] for p in [True, False]]
mf_params_js = [(m, p) for m in ["sequential", "thread", "process"] for p in [True, False]]
mf_params_js_ids = ["method=%s, progress=%s" % (p[0], p[1]) for p in mf_params_js]
mf_js = [
"https://api.ifremer.fr/argopy/data/ARGO-FULL.json",
Expand Down Expand Up @@ -316,7 +316,7 @@ def test_open_mfdataset(self, params):
else:
assert is_list_of_datasets(ds)

params = [(m) for m in ["seq", "thread", "invalid"]]
params = [(m) for m in ["sequential", "thread", "invalid"]]
ids_params = ["method=%s" % (p) for p in params]

@pytest.mark.parametrize("params", params, indirect=False, ids=ids_params)
Expand Down Expand Up @@ -372,7 +372,7 @@ def test_open_mfjson(self, params):
)
assert is_list_of_dicts(lst)

params = [(m) for m in ["seq", "thread", "invalid"]]
params = [(m) for m in ["sequential", "thread", "invalid"]]
ids_params = ["method=%s" % (p) for p in params]

@pytest.mark.parametrize("params", params, indirect=False, ids=ids_params)
Expand Down Expand Up @@ -488,7 +488,7 @@ def test_open_dataset_error(self, store):

params = [
(m, p, c)
for m in ["seq", "process"]
for m in ["sequential", "process"]
for p in [True, False]
for c in [True, False]
]
Expand Down Expand Up @@ -522,7 +522,7 @@ def test(this_params):

test(params)

params = [(m) for m in ["seq", "process", "invalid"]]
params = [(m) for m in ["sequential", "process", "invalid"]]
ids_params = ["method=%s" % (p) for p in params]

@pytest.mark.parametrize("params", params, indirect=False, ids=ids_params)
Expand Down

0 comments on commit 706a7da

Please sign in to comment.