Skip to content

Commit

Permalink
Merge pull request #209 from euroargodev/release0.0.11 [skip-ci]
Browse files Browse the repository at this point in the history
Prepare for "v0.1.11 Rooster release 🐓"
  • Loading branch information
gmaze authored Apr 13, 2022
2 parents 4480503 + e28161a commit c3810ac
Show file tree
Hide file tree
Showing 14 changed files with 63 additions and 55 deletions.
2 changes: 1 addition & 1 deletion argopy/data_fetchers/proto.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def _cname(self) -> str:

@property
def sha(self) -> str:
""" Returns a unique SHA for a specifc cname / fetcher implementation"""
""" Returns a unique SHA for a specific cname / fetcher implementation"""
path = "%s-%s" % (self.definition, self.cname())
return hashlib.sha256(path.encode()).hexdigest()

Expand Down
2 changes: 1 addition & 1 deletion argopy/plot/dashboards.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def open_dashboard(
* "ea", "data": the `Euro-Argo data selection dashboard <https://dataselection.euro-argo.eu>`_
* "meta": the `Euro-Argo fleet monitoring dashboard <https://fleetmonitoring.euro-argo.eu>`_
* "op", "ocean-ops": the `Ocean-OPS Argo dashboard <https://www.ocean-ops.org/board?t=argo>`_
* "bgc": the `Argo-BGC specific dashbaord <https://maps.biogeochemical-argo.com/bgcargo>`_
* "bgc": the `Argo-BGC specific dashboard <https://maps.biogeochemical-argo.com/bgcargo>`_
* "argovis": the `Colorado Argovis dashboard <https://argovis.colorado.edu>`_
url_only: bool, optional, default: False
If set to True, will only return the URL toward the dashboard
Expand Down
8 changes: 4 additions & 4 deletions argopy/tests/test_fetchers_data_gdac.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def core(fargs, apts):
return fct_safe_to_server_errors(core)(fetcher_args, access_point, xfail=xfail)


def assert_fetcher(this_fetcher, cachable=False):
def assert_fetcher(this_fetcher, cacheable=False):
"""Assert a data fetcher.
This should be used by all tests
Expand All @@ -102,7 +102,7 @@ def assert_fetcher(this_fetcher, cachable=False):
assert is_list_of_strings(this_fetcher.uri)
assert (this_fetcher.N_RECORDS >= 1) # Make sure we loaded the index file content
assert (this_fetcher.N_FILES >= 1) # Make sure we found results
if cachable:
if cacheable:
assert is_list_of_strings(this_fetcher.cachepath)


Expand Down Expand Up @@ -184,7 +184,7 @@ def test_hosts_invalid(self, ftp_host):
def test_fetching(self, _make_a_fetcher):
@safe_to_server_errors
def test(this_fetcher):
assert_fetcher(this_fetcher, cachable=False)
assert_fetcher(this_fetcher, cacheable=False)
test(_make_a_fetcher)

# @skip_for_debug
Expand All @@ -193,7 +193,7 @@ def test_fetching_cached(self, _make_a_cached_fetcher):
@safe_to_server_errors
def test(this_fetcher):
# Assert the fetcher (this trigger data fetching, hence caching as well):
assert_fetcher(this_fetcher, cachable=True)
assert_fetcher(this_fetcher, cacheable=True)

# Make sure we can clear the cache:
this_fetcher.clear_cache()
Expand Down
8 changes: 4 additions & 4 deletions argopy/tests/test_fetchers_index_gdac.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,13 +78,13 @@ def core(fargs, apts):
return fct_safe_to_server_errors(core)(fetcher_args, access_point, xfail=xfail)


def assert_fetcher(this_fetcher, cachable=False):
def assert_fetcher(this_fetcher, cacheable=False):
""" Assert structure of a fetcher """
assert isinstance(this_fetcher.to_dataframe(), pd.core.frame.DataFrame)
# assert is_list_of_strings(this_fetcher.uri)
assert (this_fetcher.N_RECORDS >= 1) # Make sure we loaded the index file content
assert (this_fetcher.N_FILES >= 1) # Make sure we found results
if cachable:
if cacheable:
assert is_list_of_strings(this_fetcher.cachepath)


Expand Down Expand Up @@ -167,7 +167,7 @@ def test_hosts_invalid(self, ftp_host):
def test_fetching(self, _make_a_fetcher):
@safe_to_server_errors
def test(this_fetcher):
assert_fetcher(this_fetcher, cachable=False)
assert_fetcher(this_fetcher, cacheable=False)
test(_make_a_fetcher)

# @skip_for_debug
Expand All @@ -176,7 +176,7 @@ def test_fetching_cached(self, _make_a_cached_fetcher):
@safe_to_server_errors
def test(this_fetcher):
# Assert the fetcher (this trigger data fetching, hence caching as well):
assert_fetcher(this_fetcher, cachable=True)
assert_fetcher(this_fetcher, cacheable=True)

# Make sure we can clear the cache:
this_fetcher.clear_cache()
Expand Down
18 changes: 9 additions & 9 deletions argopy/tests/test_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def test_nocache(self):
with pytest.raises(FileSystemHasNoCache):
fs.cachepath("dummy_uri")

def test_cachable(self):
def test_cacheable(self):
fs = httpstore(cache=True)
assert isinstance(fs.fs, fsspec.implementations.cached.WholeFileCacheFileSystem)

Expand Down Expand Up @@ -265,7 +265,7 @@ def test_nocache(self):
with pytest.raises(FileSystemHasNoCache):
fs.cachepath("dummy_uri")

def test_cachable(self):
def test_cacheable(self):
fs = memorystore(cache=True)
assert isinstance(fs.fs, fsspec.implementations.cached.WholeFileCacheFileSystem)

Expand Down Expand Up @@ -519,20 +519,20 @@ def a_search(self, request):
srch = request.param[1]
yield run_a_search(self.new_idx, {'host': host, 'cache': True}, srch)

def assert_index(self, this_idx, cachable=False):
def assert_index(self, this_idx, cacheable=False):
assert hasattr(this_idx, 'index')
assert this_idx.shape[0] == this_idx.index.shape[0]
assert this_idx.N_RECORDS == this_idx.index.shape[0]
assert is_list_of_strings(this_idx.uri_full_index) and len(this_idx.uri_full_index) == this_idx.N_RECORDS
if cachable:
if cacheable:
assert is_list_of_strings(this_idx.cachepath('index'))

def assert_search(self, this_idx, cachable=False):
def assert_search(self, this_idx, cacheable=False):
assert hasattr(this_idx, 'search')
assert this_idx.N_MATCH == this_idx.search.shape[0]
assert this_idx.N_FILES == this_idx.N_MATCH
assert is_list_of_strings(this_idx.uri) and len(this_idx.uri) == this_idx.N_MATCH
if cachable:
if cacheable:
assert is_list_of_strings(this_idx.cachepath('search'))

# @skip_this
Expand Down Expand Up @@ -572,7 +572,7 @@ def new_idx():
def test_search(self, a_search):
@safe_to_server_errors
def test(this_searched_store):
self.assert_search(this_searched_store, cachable=False)
self.assert_search(this_searched_store, cacheable=False)
test(a_search)

# @skip_this
Expand Down Expand Up @@ -606,14 +606,14 @@ def test_to_dataframe_search(self):

def test_caching_index(self):
idx = self.new_idx(cache=True)
self.assert_index(idx, cachable=True)
self.assert_index(idx, cacheable=True)

# @skip_this
def test_caching_search(self):
idx = self.new_idx(cache=True)
wmo = [s['wmo'] for s in valid_searches if 'wmo' in s.keys()][0]
idx.search_wmo(wmo)
self.assert_search(idx, cachable=True)
self.assert_search(idx, cacheable=True)

# @skip_this
def test_read_wmo(self):
Expand Down
3 changes: 2 additions & 1 deletion argopy/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -576,6 +576,7 @@ def show_versions(file=sys.stdout, conda=False): # noqa: C901
("dask", lambda mod: mod.__version__), # This could go away from requirements ?
("toolz", lambda mod: mod.__version__),
("gsw", lambda mod: mod.__version__), # Used by xarray accessor to compute new variables
("aiohttp", lambda mod: mod.__version__),
]),
'ext.misc': sorted([
("pyarrow", lambda mod: mod.__version__),
Expand Down Expand Up @@ -2142,7 +2143,7 @@ def load(self, errors: str = "ignore"):


def argo_split_path(this_path): # noqa C901
""" Split path from a GDAC ftp style Argo netcdf file and return informations
""" Split path from a GDAC ftp style Argo netcdf file and return information
>>> argo_split_path('coriolis/6901035/profiles/D6901035_001D.nc')
>>> argo_split_path('https://data-argo.ifremer.fr/dac/csiro/5903939/profiles/D5903939_103.nc')
Expand Down
2 changes: 1 addition & 1 deletion ci/requirements/py3.7-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ dependencies:
- numpy=1.21.5
- pandas=1.3.5
- packaging=21.3
- pip=22.0.3
- pytest=7.0.1
- seaborn=0.11.2
- setuptools=59.8.0
Expand All @@ -32,6 +31,7 @@ dependencies:
- ipywidgets=7.6.5
- pyarrow=4.0.1
# Not showing up with argopy.show_versions():
- pip=22.0.3
- cfgrib=0.9.10
- black=22.1.0
- flake8=4.0.1
Expand Down
3 changes: 2 additions & 1 deletion ci/requirements/py3.7-min.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,5 @@ dependencies:
- ipywidgets=7.5
- pandas=1.1
- numpy=1.18
- scipy=1.5
- scipy=1.5
- packaging=20.4
35 changes: 20 additions & 15 deletions ci/requirements/py3.8-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,34 +3,39 @@ channels:
- conda-forge
dependencies:
- python=3.8
- xarray=2022.3.0
- scipy=1.8.0
- scikit-learn=1.0.2
- netcdf4=1.5.8

- aiohttp=3.8.1
- dask=2022.02.1
- toolz=0.11.2
- erddapy=1.2.0
- fsspec=2022.02.0
- gsw=3.4.0
- aiohttp=3.8.1
- bottleneck=1.3.4
- cartopy=0.20.2
- cftime=1.5.2
- netcdf4=1.5.8
- packaging=21.3
- scipy=1.8.0
- scikit-learn=1.0.2
- toolz=0.11.2
- xarray=2022.3.0

- distributed=2022.2.1
- pyarrow=4.0.1
- tqdm=4.63.0

- ipython=8.0.1
- cartopy=0.20.2
- ipykernel=6.9.1
- ipywidgets=7.6.5
- matplotlib=3.5.1
- seaborn=0.11.2

- bottleneck=1.3.4
- cftime=1.5.2
- numpy=1.22.2
- pandas=1.4.1
- packaging=21.3
- pip=22.0.3
- pytest=7.0.1
- seaborn=0.11.2
- setuptools=59.8.0
- zarr=2.11.0
- tqdm=4.63.0
- ipykernel=6.9.1
- ipywidgets=7.6.5
- pyarrow=4.0.1

# Not showing up with argopy.show_versions():
- cfgrib=0.9.10
- black=22.1.0
Expand Down
3 changes: 2 additions & 1 deletion ci/requirements/py3.8-min.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,5 @@ dependencies:
- ipywidgets=7.5
- pandas=1.1
- numpy=1.18
- scipy=1.5
- scipy=1.5
- packaging=20.4
4 changes: 2 additions & 2 deletions docs/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ sphinx-book-theme
# sphinx-bootstrap-theme

xarray>=0.16.1
scipy>=1.1.0
scipy>=1.5
scikit-learn>=0.23.2
dask>=2021.9.1
distributed>=2.30.0
Expand All @@ -26,7 +26,7 @@ distributed>=2.30.0
erddapy>=0.7.2
gsw>=3.4.0

fsspec>=0.7.4, <2022.3.1
fsspec>=0.7.4 #, <2022.3.1
aiohttp>=3.7.4

#matplotlib>=3.3.2
Expand Down
8 changes: 4 additions & 4 deletions docs/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ What's New

|pypi dwn| |conda dwn|

v0.1.XX (X XXX. 2022)
---------------------
v0.1.11 (13 Apr. 2022)
----------------------

**Features and front-end API**

Expand All @@ -27,7 +27,7 @@ v0.1.XX (X XXX. 2022)
.. note::

The new ``gdac`` fetcher uses Argo index to determine which profile files to load. Hence, this fetcher may show poor performances when used with a ``region`` access point. Don't hesitate to check :ref:`Performances` to try to improve performances, otherwise, we recommand to use a webAPI access point (``erddap`` or ``argovis``).
The new ``gdac`` fetcher uses Argo index to determine which profile files to load. Hence, this fetcher may show poor performances when used with a ``region`` access point. Don't hesitate to check :ref:`Performances` to try to improve performances, otherwise, we recommend to use a webAPI access point (``erddap`` or ``argovis``).

.. warning::

Expand Down Expand Up @@ -63,7 +63,7 @@ We added the Ocean-OPS (former JCOMMOPS) dashboard for all floats and the Argo-B
# or
argopy.dashboard(5904797, 12, type='bgc')
- New utility function :class:`argopy.utilities.ArgoNVSReferenceTables` to retrieve Argo Reference Tables. (:commit:`cc8fdbe132874b71b35203053626cc29ae7d19c4`) by `G. Maze <http://www.github.com/gmaze>`_.
- **New utility function :class:`argopy.utilities.ArgoNVSReferenceTables` to retrieve Argo Reference Tables**. (:commit:`cc8fdbe132874b71b35203053626cc29ae7d19c4`) by `G. Maze <http://www.github.com/gmaze>`_.

.. code-block:: python
Expand Down
20 changes: 10 additions & 10 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
xarray>=0.15.1
scipy>=1.1.0
scikit-learn<2.0 # Using 'preprocessing.LabelEncoder()' in xarray accessor, used by filters
netCDF4<1.5.9
dask>=2.9 # This could go away ?
xarray>=0.16
scipy>=1.5
scikit-learn>=0.23 #<2.0 # Using 'preprocessing.LabelEncoder()' in xarray accessor, used by filters
netCDF4>=1.5.3 #<1.5.9
dask>=2.3 # This could go away ?
toolz>=0.8.2
erddapy>=0.6 # This could go away ?
fsspec>=0.7.4
gsw<=3.4.0 # Used by xarray accessor to compute new variables, so not necessary to core functionnalities
aiohttp>=3.6.2
packaging>= 20.4 # Using 'version' to make API compatible with several fsspec releases
erddapy>=0.7 # This could go away ?
fsspec>=0.8
gsw<=3.4.0 # Used by xarray accessor to compute new variables, so not necessary to core functionalities
aiohttp>=3.7
packaging>=20.4 # Using 'version' to make API compatible with several fsspec releases
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

setuptools.setup(
name='argopy',
version='0.1.10',
version='0.1.11',
author="argopy Developers",
author_email="gmaze@ifremer.fr",
description="A python library for Argo data beginners and experts",
Expand Down

0 comments on commit c3810ac

Please sign in to comment.