From baa3033e5997582bd84bace65b02bbae2757cfbd Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Mon, 19 Aug 2024 15:17:19 +0200 Subject: [PATCH 01/23] Update whats-new.rst --- docs/whats-new.rst | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/whats-new.rst b/docs/whats-new.rst index 312f28fa..288d56af 100644 --- a/docs/whats-new.rst +++ b/docs/whats-new.rst @@ -8,8 +8,8 @@ What's New |pypi dwn| |conda dwn| -Coming up next --------------- +v0.1.16 (xx Aug. 2024) +---------------------- **Features and front-end API** @@ -35,7 +35,7 @@ Coming up next **Internals** -- Update :class:`argopy.ArgoNVSReferenceTables` to handle new NVS server output format +- Update :class:`argopy.ArgoNVSReferenceTables` to handle new NVS server output format. (:pr:`378`) by `G. Maze `_ - Pin upper bound on xarray < 2024.3 to fix failing upstream tests because of ``AttributeError: 'ScipyArrayWrapper' object has no attribute 'oindex'``, `reported here `_. (:pr:`326`) by `G. Maze `_ @@ -50,7 +50,6 @@ Coming up next - Fix for fsspec > 2023.10.0. (:pr:`318`) by `G. Maze `_. - v0.1.15 (12 Dec. 2023) ---------------------- From 0c227bf22754fb480b23765273658df091a8efb0 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Mon, 19 Aug 2024 15:24:41 +0200 Subject: [PATCH 02/23] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 5187feae..6c7c3320 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ setuptools.setup( name="argopy", - version="0.1.15", + version="0.1.16", author="argopy Developers", author_email="gmaze@ifremer.fr", description="A python library for Argo data beginners and experts", From e52a9ea47d0c7442df03c7ab0a5c575e8f4f3f49 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Mon, 19 Aug 2024 15:24:48 +0200 Subject: [PATCH 03/23] Codespell --- .codespellrc | 2 +- CODE_OF_CONDUCT.md | 2 +- argopy/stores/argo_index_proto.py | 2 +- argopy/stores/filesystems.py | 2 +- docs/impact.rst | 6 +++--- docs/whats-new.rst | 4 ++-- docs/why.rst | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.codespellrc b/.codespellrc index a11a40ac..439f6ea7 100644 --- a/.codespellrc +++ b/.codespellrc @@ -1,5 +1,5 @@ [codespell] -skip = *.nc,*.ipynb,./local_work,./float_source,./binder,./.github,*.log,./.git,./docs/_build,./docs/_static,./argopy/tests/test_data,./build,./docs/mycache_folder,./docs/examples/cache_bgc +skip = *.nc,*.ipynb,./local_work,./float_source,./binder,./.github,*.log,./.git,./docs/_build,./docs/_static,./argopy/tests/test_data,./build,./docs/mycache_folder,./docs/examples/cache_bgc,./argopy/static/assets/*.json count = quiet-level = 3 ignore-words-list = PRES, pres, idel \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b71b0dde..905f2f94 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -6,7 +6,7 @@ In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, -level of experience, education, socio-economic status, nationality, personal +level of experience, education, socioeconomic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards diff --git a/argopy/stores/argo_index_proto.py b/argopy/stores/argo_index_proto.py index d62c9dfa..d3481467 100644 --- a/argopy/stores/argo_index_proto.py +++ b/argopy/stores/argo_index_proto.py @@ -457,7 +457,7 @@ def _write(self, fs, path, obj, fmt="pq"): if isinstance(fs, memorystore): fs.fs.touch(this_path) # Fix for https://github.com/euroargodev/argopy/issues/345 # fs.fs.touch(this_path) # Fix for https://github.com/euroargodev/argopy/issues/345 - # This is an f* mistery to me, why do we need 2 calls to trigger file creation FOR REAL ???? + # This is an f* mystery to me, why do we need 2 calls to trigger file creation FOR REAL ???? # log.debug("memorystore touched this path before open context: '%s'" % this_path) with fs.open(this_path, "wb") as handle: write_this[fmt](obj, handle) diff --git a/argopy/stores/filesystems.py b/argopy/stores/filesystems.py index b5031da9..b668fa76 100644 --- a/argopy/stores/filesystems.py +++ b/argopy/stores/filesystems.py @@ -259,7 +259,7 @@ def cachepath(self, uri: str, errors: str = "raise"): ) def _clear_cache_item(self, uri): - """Remove medadata and file for fsspec cache uri""" + """Remove metadata and file for fsspec cache uri""" fn = os.path.join(self.fs.storage[-1], "cache") self.fs.load_cache() # Read set of stored blocks from file and populate self.cached_files cache = self.cached_files[-1] diff --git a/docs/impact.rst b/docs/impact.rst index 51202c7d..9ba9b19b 100644 --- a/docs/impact.rst +++ b/docs/impact.rst @@ -4,19 +4,19 @@ Impact of argopy Papers & proceedings mentioning argopy -------------------------------------- -- Bartlett, Jenna, "An investigation of geostationary satellite imagery to compare developing and non-developing African easterly waves" (2022). Theses and Dissertations. 5600. https://scholarsjunction.msstate.edu/td/5600 +- Bartlett, Jenna, "An investigation of geostationary satellite imagery to compare developing and non-developing African easterly waves" (2022). Thesis and Dissertations. 5600. https://scholarsjunction.msstate.edu/td/5600 - Chafik, et.al, "The Faroe-Shetland Channel Jet: Structure, Variability, and Driving Mechanisms", 2023, JGR Oceans, https://doi.org/10.1029/2022JC019083 - Dan E. Kelley, Jaimie Harbin, Clark Richards, "argoFloats: An R Package for Analyzing Argo Data", 2021 Frontiers in Marine Science, https://doi.org/10.3389/fmars.2021.635922 -- de Solo, Sofia M., "What makes a hurricane fall apart? A multi-platform assessment of tropical cyclone weakening By" (2021). Theses and Dissertations. 5274. https://scholarsjunction.msstate.edu/td/5274 +- de Solo, Sofia M., "What makes a hurricane fall apart? A multi-platform assessment of tropical cyclone weakening By" (2021). Thesis and Dissertations. 5274. https://scholarsjunction.msstate.edu/td/5274 - Dunnington et al., (2021). argodata: An R interface to oceanographic data from the International Argo Program. Journal of Open Source Software, 6(68), 3659, https://doi.org/10.21105/joss.03659 - Gonzalez A., "The Argo Online School: An e-learning tool to get started with Argo" (2023), The Journal of Open Source Education (Under review) -- Huda, Md Nurul, "Machine Learning for Improvement of Ocean Data Resolution for Weather Forecasting and Climatological Research" (2023). Theses and Dissertations, Virginia Tech, http://hdl.handle.net/10919/116504 +- Huda, Md Nurul, "Machine Learning for Improvement of Ocean Data Resolution for Weather Forecasting and Climatological Research" (2023). Thesis and Dissertations, Virginia Tech, http://hdl.handle.net/10919/116504 - Steinberg, J. M., Piecuch, C. G., Hamlington, B. D., Thompson, P. R., & Coats, S. (2024). Influence of deep-ocean warming on coastal sea-level decadal trends in the Gulf of Mexico. Journal of Geophysical Research: Oceans, 129, e2023JC019681. https://doi.org/10.1029/2023JC019681 diff --git a/docs/whats-new.rst b/docs/whats-new.rst index 288d56af..26ddb9b8 100644 --- a/docs/whats-new.rst +++ b/docs/whats-new.rst @@ -173,7 +173,7 @@ v0.1.14 (29 Sep. 2023) - New utility class :class:`utils.MonitoredThreadPoolExecutor` to handle parallelization with a multi-threading Pool that provide a notebook or terminal computation progress dashboard. This class is used by the httpstore open_mfdataset method for erddap requests. -- New utilites to handle a collection of datasets: :func:`utils.drop_variables_not_in_all_datasets` will drop variables that are not in all datasets (the lowest common denominator) and :func:`utils.fill_variables_not_in_all_datasets` will add empty variables to dataset so that all the collection have the same data_vars and coords. These functions are used by stores to concat/merge a collection of datasets (chunks). +- New utilities to handle a collection of datasets: :func:`utils.drop_variables_not_in_all_datasets` will drop variables that are not in all datasets (the lowest common denominator) and :func:`utils.fill_variables_not_in_all_datasets` will add empty variables to dataset so that all the collection have the same data_vars and coords. These functions are used by stores to concat/merge a collection of datasets (chunks). - :func:`related.load_dict` now relies on :class:`ArgoNVSReferenceTables` instead of static pickle files. @@ -288,7 +288,7 @@ v0.1.14rc2 (27 Jul. 2023) - New utility class :class:`utils.MonitoredThreadPoolExecutor` to handle parallelization with a multi-threading Pool that provide a notebook or terminal computation progress dashboard. This class is used by the httpstore open_mfdataset method for erddap requests. -- New utilites to handle a collection of datasets: :func:`utils.drop_variables_not_in_all_datasets` will drop variables that are not in all datasets (the lowest common denominator) and :func:`utils.fill_variables_not_in_all_datasets` will add empty variables to dataset so that all the collection have the same data_vars and coords. These functions are used by stores to concat/merge a collection of datasets (chunks). +- New utilities to handle a collection of datasets: :func:`utils.drop_variables_not_in_all_datasets` will drop variables that are not in all datasets (the lowest common denominator) and :func:`utils.fill_variables_not_in_all_datasets` will add empty variables to dataset so that all the collection have the same data_vars and coords. These functions are used by stores to concat/merge a collection of datasets (chunks). - :func:`related.load_dict` now relies on :class:`ArgoNVSReferenceTables` instead of static pickle files. diff --git a/docs/why.rst b/docs/why.rst index 315671bb..a24c7803 100644 --- a/docs/why.rst +++ b/docs/why.rst @@ -6,7 +6,7 @@ Why argopy ? Surprisingly, the Argo community never provided its user base with a Python software to easily access and manipulate Argo measurements: **argopy** aims to fill this gap. -Despite, or because, its tremendous success in data management and in developping good practices and well calibrated procedures [ADMT]_, the Argo dataset is very complex: with thousands of different variables, tens of reference tables and a `user manual `_ more than 100 pages long: +Despite, or because, its tremendous success in data management and in developing good practices and well calibrated procedures [ADMT]_, the Argo dataset is very complex: with thousands of different variables, tens of reference tables and a `user manual `_ more than 100 pages long: **argopy** aims to help you navigate this complex realm. For non-experts of the Argo dataset, it has become rather complicated to get access to Argo measurements. From 755de3c908a56a7c8dc1ea8407b3e733859d566c Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Mon, 19 Aug 2024 15:50:59 +0200 Subject: [PATCH 04/23] black + flake8 --- .flake8 | 3 +- argopy/data_fetchers/argovis_data.py | 74 +++-- argopy/data_fetchers/erddap_data.py | 37 ++- argopy/data_fetchers/gdacftp_data.py | 4 +- argopy/plot/plot.py | 307 ++++++++++------- argopy/plot/utils.py | 8 +- argopy/related/argo_documentation.py | 2 +- argopy/stores/argo_index_proto.py | 2 +- argopy/stores/argo_index_proto_s3.py | 5 +- argopy/utils/__init__.py | 14 +- argopy/utils/checkers.py | 2 +- argopy/utils/format.py | 65 ++-- argopy/utils/locals.py | 25 +- argopy/xarray.py | 312 ++++++++++-------- .../working-with-argo-data/owc_workflow_eg.py | 3 +- 15 files changed, 497 insertions(+), 366 deletions(-) diff --git a/.flake8 b/.flake8 index c9f9129c..845125b6 100644 --- a/.flake8 +++ b/.flake8 @@ -10,7 +10,8 @@ ignore = E501, # line break before binary operator W503 - + # whitespace before ':' (https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#e203) + E203 exclude = # No need to traverse our git directory .git, diff --git a/argopy/data_fetchers/argovis_data.py b/argopy/data_fetchers/argovis_data.py index 0fc4b6b9..a8362b0e 100644 --- a/argopy/data_fetchers/argovis_data.py +++ b/argopy/data_fetchers/argovis_data.py @@ -1,9 +1,3 @@ -#!/bin/env python -# -*coding: UTF-8 -*- -# -# Argo data fetcher for Argovis. -# - import numpy as np import pandas as pd import xarray as xr @@ -23,7 +17,7 @@ access_points = ["wmo", "box"] exit_formats = ["xarray"] dataset_ids = ["phy"] # First is default -api_server = "https://argovis-api.colorado.edu" +api_server = "https://argovis-api.colorado.edu" api_server_check = "https://argovis-api.colorado.edu/ping" log = logging.getLogger("argopy.argovis.data") @@ -58,7 +52,7 @@ def __init__( chunks: str = "auto", chunks_maxsize: dict = {}, api_timeout: int = 0, - **kwargs + **kwargs, ): """Instantiate an Argovis Argo data loader @@ -95,7 +89,7 @@ def __init__( "cachedir": cachedir, "timeout": timeout, # "size_policy": "head", # deprecated - "client_kwargs": {"headers": {'x-argokey': OPTIONS['argovis_api_key']}}, + "client_kwargs": {"headers": {"x-argokey": OPTIONS["argovis_api_key"]}}, } self.fs = kwargs["fs"] if "fs" in kwargs else httpstore(**self.store_opts) @@ -134,9 +128,12 @@ def __repr__(self): summary = [""] summary.append("Name: %s" % self.definition) summary.append("API: %s" % api_server) - api_key = self.fs.fs.client_kwargs['headers']['x-argokey'] - if api_key == DEFAULT['argovis_api_key']: - summary.append("API KEY: '%s' (get a free key at https://argovis-keygen.colorado.edu)" % api_key) + api_key = self.fs.fs.client_kwargs["headers"]["x-argokey"] + if api_key == DEFAULT["argovis_api_key"]: + summary.append( + "API KEY: '%s' (get a free key at https://argovis-keygen.colorado.edu)" + % api_key + ) else: summary.append("API KEY: '%s'" % api_key) summary.append("Domain: %s" % format_oneline(self.cname())) @@ -286,24 +283,32 @@ def json2dataframe(self, profiles): for profile in data: # construct metadata dictionary that will be repeated for each level metadict = { - 'date': profile['timestamp'], - 'date_qc': profile['timestamp_argoqc'], - 'lat': profile['geolocation']['coordinates'][1], - 'lon': profile['geolocation']['coordinates'][0], - 'cycle_number': profile['cycle_number'], - 'DATA_MODE': profile['data_info'][2][0][1], - 'DIRECTION': profile['profile_direction'], - 'platform_number': profile['_id'].split('_')[0], - 'position_qc': profile['geolocation_argoqc'], - 'index': 0 + "date": profile["timestamp"], + "date_qc": profile["timestamp_argoqc"], + "lat": profile["geolocation"]["coordinates"][1], + "lon": profile["geolocation"]["coordinates"][0], + "cycle_number": profile["cycle_number"], + "DATA_MODE": profile["data_info"][2][0][1], + "DIRECTION": profile["profile_direction"], + "platform_number": profile["_id"].split("_")[0], + "position_qc": profile["geolocation_argoqc"], + "index": 0, } # construct a row for each level in the profile - for i in range(len(profile['data'][profile['data_info'][0].index('pressure')])): + for i in range( + len(profile["data"][profile["data_info"][0].index("pressure")]) + ): row = { - 'temp': profile['data'][profile['data_info'][0].index('temperature')][i], - 'pres': profile['data'][profile['data_info'][0].index('pressure')][i], - 'psal': profile['data'][profile['data_info'][0].index('salinity')][i], - **metadict + "temp": profile["data"][ + profile["data_info"][0].index("temperature") + ][i], + "pres": profile["data"][profile["data_info"][0].index("pressure")][ + i + ], + "psal": profile["data"][profile["data_info"][0].index("salinity")][ + i + ], + **metadict, } rows.append(row) df = pd.DataFrame(rows) @@ -375,8 +380,8 @@ def to_xarray(self, errors: str = "ignore"): ds.attrs["Fetched_from"] = self.server try: ds.attrs["Fetched_by"] = getpass.getuser() - except: - ds.attrs["Fetched_by"] = 'anonymous' + except: # noqa: E722 + ds.attrs["Fetched_by"] = "anonymous" ds.attrs["Fetched_date"] = pd.to_datetime("now", utc=True).strftime("%Y/%m/%d") ds.attrs["Fetched_constraints"] = self.cname() ds.attrs["Fetched_uri"] = self.uri @@ -435,9 +440,9 @@ def init(self, WMO=[], CYC=None, **kwargs): def get_url(self, wmo: int, cyc: int = None) -> str: """Return path toward the source file of a given wmo/cyc pair""" if cyc is None: - return f'{self.server}/argo?platform={str(wmo)}&data=pressure,temperature,salinity' + return f"{self.server}/argo?platform={str(wmo)}&data=pressure,temperature,salinity" else: - return f'{self.server}/argo?id={str(wmo)}_{str(cyc).zfill(3)}&data=pressure,temperature,salinity' + return f"{self.server}/argo?id={str(wmo)}_{str(cyc).zfill(3)}&data=pressure,temperature,salinity" @property def uri(self): @@ -488,10 +493,7 @@ def init(self, box: list, **kwargs): def get_url(self): """Return the URL used to download data""" - shape = [ - [self.BOX[0], self.BOX[2]], # ll - [self.BOX[1], self.BOX[3]] # ur - ] + shape = [[self.BOX[0], self.BOX[2]], [self.BOX[1], self.BOX[3]]] # ll # ur strShape = str(shape).replace(" ", "") url = self.server + "/argo?data=pressure,temperature,salinity&box=" + strShape url += "&startDate={}".format( @@ -558,4 +560,4 @@ def uri(self): for box in boxes: urls.append(Fetch_box(box=box, ds=self.dataset_id).get_url()) - return self.url_encode(urls) \ No newline at end of file + return self.url_encode(urls) diff --git a/argopy/data_fetchers/erddap_data.py b/argopy/data_fetchers/erddap_data.py index b1419810..b6716664 100644 --- a/argopy/data_fetchers/erddap_data.py +++ b/argopy/data_fetchers/erddap_data.py @@ -19,7 +19,6 @@ from abc import abstractmethod import getpass from typing import Union -import fnmatch from aiohttp import ClientResponseError import logging @@ -27,8 +26,10 @@ from ..utils.format import format_oneline from ..stores import httpstore from ..errors import ErddapServerError, DataNotFound -from ..stores import indexstore_pd as ArgoIndex # make sure we work with the Pandas index store -from ..utils import is_list_of_strings, to_list,Chunker +from ..stores import ( + indexstore_pd as ArgoIndex, +) # make sure we work with the Pandas index store +from ..utils import is_list_of_strings, to_list, Chunker from .proto import ArgoDataFetcherProto @@ -172,11 +173,15 @@ def __init__( # noqa: C901 # This will be used to: # - retrieve the list of BGC variables to ask the erddap server # - get _data_mode information because we can't get it from the server - self.indexfs = kwargs['indexfs'] if 'indexfs' in kwargs else ArgoIndex( - index_file='argo_synthetic-profile_index.txt', # the only available in the erddap - cache=kwargs['cache_index'] if 'cache_index' in kwargs else cache, - cachedir=cachedir, - timeout=timeout, + self.indexfs = ( + kwargs["indexfs"] + if "indexfs" in kwargs + else ArgoIndex( + index_file="argo_synthetic-profile_index.txt", # the only available in the erddap + cache=kwargs["cache_index"] if "cache_index" in kwargs else cache, + cachedir=cachedir, + timeout=timeout, + ) ) # To handle bugs in the erddap server, we need the list of parameters on the server: @@ -616,7 +621,9 @@ def N_POINTS(self) -> int: "Erddap server can't return ncHeader for this url. " ) - def post_process(self, this_ds, add_dm: bool = True, URI: list = None): # noqa: C901 + def post_process( + self, this_ds, add_dm: bool = True, URI: list = None + ): # noqa: C901 """Post-process a xarray.DataSet created from a netcdf erddap response This method can also be applied on a regular dataset to re-enforce format compliance @@ -668,8 +675,8 @@ def post_process(self, this_ds, add_dm: bool = True, URI: list = None): # noqa: # In the case of a parallel download, this is a trick to preserve the chunk uri in the chunk dataset: # (otherwise all chunks have the same list of uri) - Fetched_url = this_ds.attrs.get('Fetched_url', False) - Fetched_constraints = this_ds.attrs.get('Fetched_constraints', False) + Fetched_url = this_ds.attrs.get("Fetched_url", False) + Fetched_constraints = this_ds.attrs.get("Fetched_constraints", False) # Finally overwrite erddap attributes with those from argopy: this_ds.attrs = {} @@ -685,12 +692,14 @@ def post_process(self, this_ds, add_dm: bool = True, URI: list = None): # noqa: this_ds.attrs["Fetched_from"] = self.erddap.server try: this_ds.attrs["Fetched_by"] = getpass.getuser() - except: - this_ds.attrs["Fetched_by"] = 'anonymous' + except: # noqa: E722 + this_ds.attrs["Fetched_by"] = "anonymous" this_ds.attrs["Fetched_date"] = pd.to_datetime("now", utc=True).strftime( "%Y/%m/%d" ) - this_ds.attrs["Fetched_constraints"] = self.cname() if not Fetched_constraints else Fetched_constraints + this_ds.attrs["Fetched_constraints"] = ( + self.cname() if not Fetched_constraints else Fetched_constraints + ) this_ds.attrs["Fetched_uri"] = URI if not Fetched_url else Fetched_url this_ds = this_ds[np.sort(this_ds.data_vars)] diff --git a/argopy/data_fetchers/gdacftp_data.py b/argopy/data_fetchers/gdacftp_data.py index aae219d7..10442340 100644 --- a/argopy/data_fetchers/gdacftp_data.py +++ b/argopy/data_fetchers/gdacftp_data.py @@ -283,7 +283,7 @@ def _preprocess_multiprof(self, ds): ds.attrs["Fetched_from"] = self.server try: ds.attrs["Fetched_by"] = getpass.getuser() - except: + except: # noqa: E722 ds.attrs["Fetched_by"] = 'anonymous' ds.attrs["Fetched_date"] = pd.to_datetime("now", utc=True).strftime("%Y/%m/%d") ds.attrs["Fetched_constraints"] = self.cname() @@ -352,7 +352,7 @@ def to_xarray(self, errors: str = "ignore"): ds.attrs["Fetched_from"] = self.server try: ds.attrs["Fetched_by"] = getpass.getuser() - except: + except: # noqa: E722 ds.attrs["Fetched_by"] = 'anonymous' ds.attrs["Fetched_date"] = pd.to_datetime("now", utc=True).strftime("%Y/%m/%d") ds.attrs["Fetched_constraints"] = self.cname() diff --git a/argopy/plot/plot.py b/argopy/plot/plot.py index 8db1b22e..fd5486ca 100644 --- a/argopy/plot/plot.py +++ b/argopy/plot/plot.py @@ -43,34 +43,39 @@ log = logging.getLogger("argopy.plot.plot") -def open_sat_altim_report(WMO: Union[str, list] = None, embed: Union[str, None] = "dropdown", **kwargs): - """ Insert the CLS Satellite Altimeter Report figure in notebook cell +def open_sat_altim_report( + WMO: Union[str, list] = None, embed: Union[str, None] = "dropdown", **kwargs +): + """Insert the CLS Satellite Altimeter Report figure in notebook cell - This is the method called when using the facade fetcher methods ``plot``:: + This is the method called when using the facade fetcher methods ``plot``:: - DataFetcher().float(6902745).plot('qc_altimetry') + DataFetcher().float(6902745).plot('qc_altimetry') - Parameters - ---------- - WMO: int or list - The float WMO to display. By default, this is set to None and will insert the general dashboard. - embed: str, default='dropdown' - Set the embedding method. If set to None, simply return the list of urls to figures. - Possible values are: ``dropdown``, ``slide`` and ``list``. + Parameters + ---------- + WMO: int or list + The float WMO to display. By default, this is set to None and will insert the general dashboard. + embed: str, default='dropdown' + Set the embedding method. If set to None, simply return the list of urls to figures. + Possible values are: ``dropdown``, ``slide`` and ``list``. - Returns - ------- - list of Image with ``list`` embed or a dict with URLs + Returns + ------- + list of Image with ``list`` embed or a dict with URLs - Notes - ----- - Requires IPython to work as expected. If IPython is not available only URLs are returned. + Notes + ----- + Requires IPython to work as expected. If IPython is not available only URLs are returned. """ - warnUnless(has_ipython, "requires IPython to work as expected, only URLs are returned otherwise") + warnUnless( + has_ipython, + "requires IPython to work as expected, only URLs are returned otherwise", + ) - if 'api_server' in kwargs: - api_server = kwargs['api_server'] + if "api_server" in kwargs: + api_server = kwargs["api_server"] else: api_server = "https://data-argo.ifremer.fr" @@ -79,7 +84,10 @@ def open_sat_altim_report(WMO: Union[str, list] = None, embed: Union[str, None] urls = [] urls_dict = {} for this_wmo in WMOs: - url = "%s/etc/argo-ast9-item13-AltimeterComparison/figures/%i.png" % (api_server, this_wmo) + url = "%s/etc/argo-ast9-item13-AltimeterComparison/figures/%i.png" % ( + api_server, + this_wmo, + ) log.debug(url) if has_ipython and embed == "list": urls.append(Image(url, embed=True)) @@ -90,19 +98,25 @@ def open_sat_altim_report(WMO: Union[str, list] = None, embed: Union[str, None] # Prepare rendering: if has_ipython and embed is not None: if has_ipywidgets and embed == "dropdown": + def f(Float): return Image(url=urls_dict[int(Float)]) + return ipywidgets.interact(f, Float=[str(wmo) for wmo in WMOs]) elif has_ipywidgets and embed == "slide": + def f(Float): return Image(url=urls[Float]) + return ipywidgets.interact( f, Float=ipywidgets.IntSlider(min=0, max=len(urls) - 1, step=1) ) elif embed == "list": return display(*urls) else: - raise ValueError("Invalid value for 'embed' argument. Must be: 'dropdown', 'slide', 'list' or None") + raise ValueError( + "Invalid value for 'embed' argument. Must be: 'dropdown', 'slide', 'list' or None" + ) else: return urls_dict @@ -117,7 +131,7 @@ def plot_trajectory( with_seaborn: bool = has_seaborn, **kwargs ): - """ Plot trajectories for an Argo index dataframe + """Plot trajectories for an Argo index dataframe This function is called by the Data and Index fetchers method 'plot' with the 'trajectory' option:: @@ -161,9 +175,18 @@ def plot_trajectory( # Set up the figure and axis: defaults = {"figsize": (10, 6), "dpi": 90} if with_cartopy: - opts = {**defaults, **{'x': 'longitude', 'y': 'latitude', 'hue': 'wmo', - 'traj': True, 'legend': add_legend, 'set_global': set_global, - 'cmap': palette}} + opts = { + **defaults, + **{ + "x": "longitude", + "y": "latitude", + "hue": "wmo", + "traj": True, + "legend": add_legend, + "set_global": set_global, + "cmap": palette, + }, + } opts = {**opts, **kwargs} return scatter_map(df, **opts) else: @@ -238,7 +261,7 @@ def bar_plot( with_seaborn: bool = has_seaborn, **kwargs ): - """ Create a bar plot for an Argo index dataframe + """Create a bar plot for an Argo index dataframe This is the method called when using the facade fetcher methods ``plot`` with the ``dac`` or ``profiler`` arguments:: @@ -284,24 +307,24 @@ def bar_plot( def scatter_map( # noqa: C901 - data: Union[xr.Dataset, pd.core.frame.DataFrame], - x: Union[str] = None, - y: Union[str] = None, - hue: Union[str] = None, - markersize: int = 36, - markeredgesize: float = 0.5, - markeredgecolor: str = 'default', - cmap: Union[str] = None, - traj: bool = True, - traj_axis: Union[str] = None, - traj_color: str = 'default', - legend: bool = True, - legend_title: str = 'default', - legend_location: Union[str, int] = 0, - cbar: bool = False, - cbarlabels: Union[str, list] = 'auto', - set_global: bool = False, - **kwargs + data: Union[xr.Dataset, pd.core.frame.DataFrame], + x: Union[str] = None, + y: Union[str] = None, + hue: Union[str] = None, + markersize: int = 36, + markeredgesize: float = 0.5, + markeredgecolor: str = "default", + cmap: Union[str] = None, + traj: bool = True, + traj_axis: Union[str] = None, + traj_color: str = "default", + legend: bool = True, + legend_title: str = "default", + legend_location: Union[str, int] = 0, + cbar: bool = False, + cbarlabels: Union[str, list] = "auto", + set_global: bool = False, + **kwargs ): """Try-to-be generic function to create a scatter plot on a map from **argopy** :class:`xarray.Dataset` or :class:`pandas.DataFrame` data @@ -393,40 +416,48 @@ def scatter_map( # noqa: C901 if isinstance(data, xr.Dataset) and data.argo._type == "point": # data = data.argo.point2profile(drop=True) - raise InvalidDatasetStructure('Function only available to a collection of profiles') + raise InvalidDatasetStructure( + "Function only available to a collection of profiles" + ) # Try to guess the default hue, i.e. name for WMO: def guess_trajvar(data): - for v in ['WMO', 'PLATFORM_NUMBER']: + for v in ["WMO", "PLATFORM_NUMBER"]: if v.lower() in data: return v.lower() if v.upper() in data: return v.upper() - raise ValueError("Can't guess the variable name for default hue/trajectory grouping (WMO)") + raise ValueError( + "Can't guess the variable name for default hue/trajectory grouping (WMO)" + ) + hue = guess_trajvar(data) if hue is None else hue if isinstance(data, xr.Dataset) and data.argo.N_LEVELS > 1: - warnings.warn("More than one N_LEVELS found in this dataset, scatter_map will use the first level only") + warnings.warn( + "More than one N_LEVELS found in this dataset, scatter_map will use the first level only" + ) data = data.isel(N_LEVELS=0) # Try to guess the colormap to use as a function of the 'hue' variable: def guess_cmap(hue): if hue.lower() in ArgoColors().list_valid_known_colormaps: cmap = hue.lower() - elif 'qc' in hue.lower(): - cmap = 'qc' - elif 'mode' in hue.lower(): - cmap = 'data_mode' - elif 'status_code' in hue.lower(): - cmap = 'deployment_status' + elif "qc" in hue.lower(): + cmap = "qc" + elif "mode" in hue.lower(): + cmap = "data_mode" + elif "status_code" in hue.lower(): + cmap = "deployment_status" else: - cmap = STYLE['palette'] + cmap = STYLE["palette"] return cmap + cmap = guess_cmap(hue) if cmap is None else cmap # Try to guess the x and y variables: def guess_xvar(data): - for v in ['lon', 'long', 'longitude', 'x']: + for v in ["lon", "long", "longitude", "x"]: if v.lower() in data: return v.lower() if v.upper() in data: @@ -434,15 +465,18 @@ def guess_xvar(data): if isinstance(data, xr.Dataset): for v in data.coords: - if '_CoordinateAxisType' in data[v].attrs and data[v].attrs['_CoordinateAxisType'] == 'Lon': + if ( + "_CoordinateAxisType" in data[v].attrs + and data[v].attrs["_CoordinateAxisType"] == "Lon" + ): return v - if 'axis' in data[v].attrs and data[v].attrs['axis'] == 'X': + if "axis" in data[v].attrs and data[v].attrs["axis"] == "X": return v raise ValueError("Can't guess the variable name for longitudes") def guess_yvar(data): - for v in ['lat', 'lati', 'latitude', 'y']: + for v in ["lat", "lati", "latitude", "y"]: if v.lower() in data: return v.lower() if v.upper() in data: @@ -450,28 +484,36 @@ def guess_yvar(data): if isinstance(data, xr.Dataset): for v in data.coords: - if '_CoordinateAxisType' in data[v].attrs and data[v].attrs['_CoordinateAxisType'] == 'Lat': + if ( + "_CoordinateAxisType" in data[v].attrs + and data[v].attrs["_CoordinateAxisType"] == "Lat" + ): return v - if 'axis' in data[v].attrs and data[v].attrs['axis'] == 'Y': + if "axis" in data[v].attrs and data[v].attrs["axis"] == "Y": return v raise ValueError("Can't guess the variable name for latitudes") + x = guess_xvar(data) if x is None else x y = guess_yvar(data) if y is None else y # Adjust legend title: - if legend_title == 'default': + if legend_title == "default": legend_title = str(hue) # Load Argo colors: - nHue = len(data.groupby(hue).first()) if isinstance(data, pd.DataFrame) else len(data.groupby(hue)) + nHue = ( + len(data.groupby(hue).first()) + if isinstance(data, pd.DataFrame) + else len(data.groupby(hue)) + ) mycolors = ArgoColors(cmap, nHue) COLORS = mycolors.COLORS - if markeredgecolor == 'default': - markeredgecolor = COLORS['DARKBLUE'] + if markeredgecolor == "default": + markeredgecolor = COLORS["DARKBLUE"] - if traj_color == 'default': + if traj_color == "default": traj_color = markeredgecolor # Try to guess the trajectory grouping variable, i.e. name for WMO @@ -482,7 +524,13 @@ def guess_yvar(data): subplot_kw = {"projection": ccrs.PlateCarree()} fig, ax = plt.subplots(**{**defaults, **kwargs}, subplot_kw=subplot_kw) - ax.add_feature(land_feature, color=COLORS['BLUE'], edgecolor=COLORS['CYAN'], linewidth=.1, alpha=0.3) + ax.add_feature( + land_feature, + color=COLORS["BLUE"], + edgecolor=COLORS["CYAN"], + linewidth=0.1, + alpha=0.3, + ) # vmin = data[hue].min() if vmin == 'auto' else vmin # vmax = data[hue].max() if vmax == 'auto' else vmax @@ -490,48 +538,58 @@ def guess_yvar(data): patches = [] for k, [name, group] in enumerate(data.groupby(hue)): if mycolors.registered and name not in mycolors.lookup: - log.info("Found '%s' values not available in the '%s' colormap" % (name, mycolors.definition['name'])) + log.info( + "Found '%s' values not available in the '%s' colormap" + % (name, mycolors.definition["name"]) + ) else: scatter_opts = { - 'color': mycolors.lookup[name] if mycolors.registered else mycolors.cmap(k), - 'label': "%s: %s" % (name, mycolors.ticklabels[name]) if mycolors.registered else name, - 'zorder': 10, - 'sizes': [markersize], - 'edgecolor': markeredgecolor, - 'linewidths': markeredgesize, + "color": mycolors.lookup[name] + if mycolors.registered + else mycolors.cmap(k), + "label": "%s: %s" % (name, mycolors.ticklabels[name]) + if mycolors.registered + else name, + "zorder": 10, + "sizes": [markersize], + "edgecolor": markeredgecolor, + "linewidths": markeredgesize, } if isinstance(data, pd.DataFrame) and not legend: - scatter_opts['legend'] = False # otherwise Pandas will add a legend even if we set legend=False - sc = group.plot.scatter( - x=x, y=y, - ax=ax, - **scatter_opts - ) + scatter_opts[ + "legend" + ] = False # otherwise Pandas will add a legend even if we set legend=False + sc = group.plot.scatter(x=x, y=y, ax=ax, **scatter_opts) patches.append(sc) if cbar: - if cbarlabels == 'auto': + if cbarlabels == "auto": cbarlabels = None - mycolors.cbar(ticklabels=cbarlabels, - ax=ax, - cax=sc, - fraction=0.03, label=legend_title) + mycolors.cbar( + ticklabels=cbarlabels, ax=ax, cax=sc, fraction=0.03, label=legend_title + ) if traj: for k, [name, group] in enumerate(data.groupby(traj_axis)): - ax.plot(group[x], group[y], - color=traj_color, - linewidth=0.5, - label="_nolegend_", - zorder=2, - ) + ax.plot( + group[x], + group[y], + color=traj_color, + linewidth=0.5, + label="_nolegend_", + zorder=2, + ) if set_global: ax.set_global() - latlongrid(ax, dx="auto", dy="auto", - label_style_arg={'color': COLORS['BLUE'], 'fontsize': 10}, - **{"color": COLORS['BLUE'], "alpha": 0.7}) + latlongrid( + ax, + dx="auto", + dy="auto", + label_style_arg={"color": COLORS["BLUE"], "fontsize": 10}, + **{"color": COLORS["BLUE"], "alpha": 0.7} + ) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) @@ -546,54 +604,55 @@ def guess_yvar(data): ) for spine in ax.spines.values(): - spine.set_edgecolor(COLORS['DARKBLUE']) + spine.set_edgecolor(COLORS["DARKBLUE"]) - ax.set_title('') + ax.set_title("") return fig, ax -def scatter_plot(ds: xr.Dataset, - this_param, - this_x='TIME', - this_y='PRES', - figsize=(18, 6), - cmap=None, - vmin=None, - vmax=None, - s=4, - bgcolor='lightgrey', - ): +def scatter_plot( + ds: xr.Dataset, + this_param, + this_x="TIME", + this_y="PRES", + figsize=(18, 6), + cmap=None, + vmin=None, + vmax=None, + s=4, + bgcolor="lightgrey", +): """A quick-and-dirty parameter scatter plot for one variable""" warnUnless(has_mpl, "requires matplotlib installed") if cmap is None: - cmap = mpl.colormaps['gist_ncar'] + cmap = mpl.colormaps["gist_ncar"] def get_vlabel(this_ds, this_v): attrs = this_ds[this_v].attrs - if 'standard_name' in attrs: - name = attrs['standard_name'] - elif 'long_name' in attrs: - name = attrs['long_name'] + if "standard_name" in attrs: + name = attrs["standard_name"] + elif "long_name" in attrs: + name = attrs["long_name"] else: name = this_v - units = attrs['units'] if 'units' in attrs else None + units = attrs["units"] if "units" in attrs else None return "%s\n[%s]" % (name, units) if units else name # Read variables for the plot: x, y = ds[this_x], ds[this_y] if "INTERPOLATED" in this_y: - x_bounds, y_bounds = np.meshgrid(x, y, indexing='ij') + x_bounds, y_bounds = np.meshgrid(x, y, indexing="ij") c = ds[this_param] # fig, ax = plt.subplots(dpi=90, figsize=figsize) - if vmin == 'attrs': - vmin = c.attrs['valid_min'] if 'valid_min' in c.attrs else None - if vmax == 'attrs': - vmax = c.attrs['valid_max'] if 'valid_max' in c.attrs else None + if vmin == "attrs": + vmin = c.attrs["valid_min"] if "valid_min" in c.attrs else None + if vmax == "attrs": + vmax = c.attrs["valid_max"] if "valid_max" in c.attrs else None if vmin is None: vmin = np.percentile(c, 10) if vmax is None: @@ -605,21 +664,21 @@ def get_vlabel(this_ds, this_v): m = ax.scatter(x, y, c=c, cmap=cmap, s=s, vmin=vmin, vmax=vmax) ax.set_facecolor(bgcolor) - cbar = fig.colorbar(m, shrink=0.9, extend='both', ax=ax) + cbar = fig.colorbar(m, shrink=0.9, extend="both", ax=ax) cbar.ax.set_ylabel(get_vlabel(ds, this_param), rotation=90) ylim = ax.get_ylim() - if 'PRES' in this_y: + if "PRES" in this_y: ax.invert_yaxis() y_bottom, y_top = np.max(ylim), np.min(ylim) else: y_bottom, y_top = ylim - if this_x == 'CYCLE_NUMBER': + if this_x == "CYCLE_NUMBER": ax.set_xlim([np.min(ds[this_x]) - 1, np.max(ds[this_x]) + 1]) - elif this_x == 'TIME': + elif this_x == "TIME": ax.set_xlim([np.min(ds[this_x]), np.max(ds[this_x])]) - if 'PRES' in this_y: + if "PRES" in this_y: ax.set_ylim([y_bottom, 0]) # diff --git a/argopy/plot/utils.py b/argopy/plot/utils.py index ef1d2665..24561a42 100644 --- a/argopy/plot/utils.py +++ b/argopy/plot/utils.py @@ -22,11 +22,11 @@ def _importorskip(modname): if has_mpl: - import matplotlib as mpl - import matplotlib.pyplot as plt + import matplotlib as mpl # noqa: F401 + import matplotlib.pyplot as plt # noqa: F401 import matplotlib.ticker as mticker - import matplotlib.cm as cm - import matplotlib.colors as mcolors + import matplotlib.cm as cm # noqa: F401 + import matplotlib.colors as mcolors # noqa: F401 if has_cartopy: diff --git a/argopy/related/argo_documentation.py b/argopy/related/argo_documentation.py index 95e8fc74..bc8a5a4f 100644 --- a/argopy/related/argo_documentation.py +++ b/argopy/related/argo_documentation.py @@ -49,7 +49,7 @@ def parse(self, file): try: with self.fs.open(file, 'r', encoding="utf-8") as f: TXTlines = f.readlines() - except: + except: # noqa: E722 with self.fs.open(file, 'r', encoding="latin-1") as f: TXTlines = f.readlines() diff --git a/argopy/stores/argo_index_proto.py b/argopy/stores/argo_index_proto.py index d3481467..6d7fa312 100644 --- a/argopy/stores/argo_index_proto.py +++ b/argopy/stores/argo_index_proto.py @@ -26,7 +26,7 @@ except ModuleNotFoundError: pass -from .argo_index_proto_s3 import search_s3 +# from .argo_index_proto_s3 import search_s3 log = logging.getLogger("argopy.stores.index") diff --git a/argopy/stores/argo_index_proto_s3.py b/argopy/stores/argo_index_proto_s3.py index 7ce7e778..a9d04015 100644 --- a/argopy/stores/argo_index_proto_s3.py +++ b/argopy/stores/argo_index_proto_s3.py @@ -17,6 +17,7 @@ HAS_PYARROW = True except ModuleNotFoundError: HAS_PYARROW = False + class pa: @property def Table(self): @@ -83,7 +84,7 @@ def __init__(self): try: access_key = self.fs._request_signer._credentials.get_frozen_credentials().access_key log.debug("Found AWS Credentials for access_key='%s'" % access_key) - except: + except: # noqa: E722 pass else: self.fs = boto3.client('s3', config=Config(signature_version=UNSIGNED)) @@ -160,7 +161,7 @@ def query(self, sql_expression: str) -> str: "CompressionType": self.CompressionType}, OutputSerialization={"CSV": {}}, ) - except: + except: # noqa: E722 # log.debug(boto3.set_stream_logger('botocore', level='DEBUG')) raise diff --git a/argopy/utils/__init__.py b/argopy/utils/__init__.py index 5c597e8f..5afa936d 100644 --- a/argopy/utils/__init__.py +++ b/argopy/utils/__init__.py @@ -1,4 +1,4 @@ -from .checkers import ( +from .checkers import ( # noqa: F401 is_box, is_indexbox, is_list_of_strings, @@ -30,15 +30,21 @@ from .monitored_threadpool import MyThreadPoolExecutor as MonitoredThreadPoolExecutor from .chunking import Chunker from .accessories import Registry, float_wmo -from .locals import ( +from .locals import ( # noqa: F401 show_versions, show_options, modified_environ, - get_sys_info, # noqa: F401 + get_sys_info, # noqa: F401 netcdf_and_hdf5_versions, # noqa: F401 ) from .monitors import monitor_status, badge, fetch_status # noqa: F401 -from .geo import wmo2box, wrap_longitude, conv_lon, toYearFraction, YearFraction_to_datetime +from .geo import ( + wmo2box, + wrap_longitude, + conv_lon, + toYearFraction, + YearFraction_to_datetime, +) from .compute import linear_interpolation_remap, groupby_remap from .transform import ( fill_variables_not_in_all_datasets, diff --git a/argopy/utils/checkers.py b/argopy/utils/checkers.py index 3775b09e..9bc5b778 100644 --- a/argopy/utils/checkers.py +++ b/argopy/utils/checkers.py @@ -698,4 +698,4 @@ def has_aws_credentials(): client = boto3.client('s3') return client._request_signer._credentials is not None else: - raise Exception("boto3 is not available !") \ No newline at end of file + raise Exception("boto3 is not available !") diff --git a/argopy/utils/format.py b/argopy/utils/format.py index ae72701d..c2845a7a 100644 --- a/argopy/utils/format.py +++ b/argopy/utils/format.py @@ -209,24 +209,33 @@ def erddapuri2fetchobj(uri: str) -> dict: """Given an Ifremer ERDDAP URI, return a dictionary with BOX or WMO or (WMO, CYC) fetcher arguments""" params = parse_qs(uri) result = {} - if 'longitude>' in params.keys(): + if "longitude>" in params.keys(): # Recreate the box definition: - box = [float(params['longitude>'][0]), float(params['longitude<'][0]), - float(params['latitude>'][0]), float(params['latitude<'][0]), - float(params['pres>'][0]), float(params['pres<'][0])] + box = [ + float(params["longitude>"][0]), + float(params["longitude<"][0]), + float(params["latitude>"][0]), + float(params["latitude<"][0]), + float(params["pres>"][0]), + float(params["pres<"][0]), + ] if "time>" in params.keys(): - box.append(pd.to_datetime(float(params['time>'][0]), unit='s').strftime("%Y-%m-%d")) - box.append(pd.to_datetime(float(params['time<'][0]), unit='s').strftime("%Y-%m-%d")) - result['box'] = box - elif 'platform_number' in params: - wmo = params['platform_number'][0].replace("~","").replace("\"","").split("|") + box.append( + pd.to_datetime(float(params["time>"][0]), unit="s").strftime("%Y-%m-%d") + ) + box.append( + pd.to_datetime(float(params["time<"][0]), unit="s").strftime("%Y-%m-%d") + ) + result["box"] = box + elif "platform_number" in params: + wmo = params["platform_number"][0].replace("~", "").replace('"', "").split("|") wmo = check_wmo(wmo) - result['wmo'] = wmo - if 'cycle_number' in params: - cyc = params['cycle_number'][0].replace("~","").replace("\"","").split("|") + result["wmo"] = wmo + if "cycle_number" in params: + cyc = params["cycle_number"][0].replace("~", "").replace('"', "").split("|") cyc = check_cyc(cyc) - result['cyc'] = cyc - if len(result.keys())==0: + result["cyc"] = cyc + if len(result.keys()) == 0: raise ValueError("This is not a typical Argo Ifremer Erddap uri") else: return result @@ -240,25 +249,27 @@ def _is_url(self, url): return parsed.scheme and parsed.netloc def __init__(self, obj): - if hasattr(obj, 'BOX'): + if hasattr(obj, "BOX"): self.BOX = obj.BOX - elif hasattr(obj, 'WMO'): + elif hasattr(obj, "WMO"): self.WMO = obj.WMO - if hasattr(obj, 'CYC'): + if hasattr(obj, "CYC"): self.CYC = obj.CYC elif self._is_url(obj) and "/tabledap/" in obj: obj = erddapuri2fetchobj(obj) - if 'box' in obj.keys(): - self.BOX = obj['box'] - elif 'wmo' in obj.keys(): - self.WMO = obj['wmo'] - if 'cyc' in obj.keys(): - self.CYC = obj['cyc'] + if "box" in obj.keys(): + self.BOX = obj["box"] + elif "wmo" in obj.keys(): + self.WMO = obj["wmo"] + if "cyc" in obj.keys(): + self.CYC = obj["cyc"] else: - raise ValueError("This class is only available with Erddap uri string requests or an ArgoDataFetcherProto instance") + raise ValueError( + "This class is only available with Erddap uri string requests or an ArgoDataFetcherProto instance" + ) def _format(self, x, typ: str) -> str: - """ string formatting helper """ + """string formatting helper""" if typ == "lon": if x < 0: x = 360.0 + x @@ -276,7 +287,7 @@ def __repr__(self): @property def cname(self) -> str: - """ Fetcher one line string definition helper """ + """Fetcher one line string definition helper""" cname = "?" if hasattr(self, "BOX"): @@ -325,4 +336,4 @@ def cname(self) -> str: if hasattr(self, "dataset_id"): cname = self.dataset_id + ";" + cname - return cname \ No newline at end of file + return cname diff --git a/argopy/utils/locals.py b/argopy/utils/locals.py index dccf3b83..393041ef 100644 --- a/argopy/utils/locals.py +++ b/argopy/utils/locals.py @@ -80,7 +80,7 @@ def netcdf_and_hdf5_versions(): def get_version(module_name): - ver = '-' + ver = "-" try: ver = module_name.__version__ except AttributeError: @@ -112,7 +112,6 @@ def show_versions(file=sys.stdout, conda=False): # noqa: C901 "core": sorted( [ ("argopy", get_version), - ("xarray", get_version), ("scipy", get_version), ("netCDF4", get_version), @@ -178,9 +177,11 @@ def show_versions(file=sys.stdout, conda=False): # noqa: C901 ("sphinx", get_version), ] ), - 'pip': sorted([ - ("pytest-reportlog", get_version), - ]) + "pip": sorted( + [ + ("pytest-reportlog", get_version), + ] + ), } DEPS_blob = {} @@ -189,18 +190,10 @@ def show_versions(file=sys.stdout, conda=False): # noqa: C901 deps_blob = list() for modname, ver_f in deps: try: - if modname in sys.modules: - mod = sys.modules[modname] - else: - mod = importlib.import_module(modname) + ver = ver_f(modname) + deps_blob.append((modname, ver)) except Exception: - deps_blob.append((modname, "-")) - else: - try: - ver = ver_f(modname) - deps_blob.append((modname, ver)) - except Exception: - deps_blob.append((modname, "installed")) + deps_blob.append((modname, "installed")) DEPS_blob[level] = deps_blob print("\nSYSTEM", file=file) diff --git a/argopy/xarray.py b/argopy/xarray.py index ccd2e594..096e05b8 100644 --- a/argopy/xarray.py +++ b/argopy/xarray.py @@ -34,33 +34,33 @@ class ArgoAccessor: """Class registered under scope ``argo`` to access a :class:`xarray.Dataset` object. - Examples - -------- - - Ensure all variables are of the Argo required dtype with: - >>> ds.argo.cast_types() - - Convert a collection of points into a collection of profiles: - >>> ds.argo.point2profile() - - Convert a collection of profiles to a collection of points: - >>> ds.argo.profile2point() - - Filter measurements according to data mode: - >>> ds.argo.filter_data_mode() - - Filter measurements according to QC flag values: - >>> ds.argo.filter_qc(QC_list=[1, 2], QC_fields='all') - - Filter variables according OWC salinity calibration requirements: - >>> ds.argo.filter_scalib_pres(force='default') - - Interpolate measurements on pressure levels: - >>> ds.argo.inter_std_levels(std_lev=[10., 500., 1000.]) - - Group and reduce measurements by pressure bins: - >>> ds.argo.groupby_pressure_bins(bins=[0, 200., 500., 1000.]) - - Compute and add additional variables to the dataset: - >>> ds.argo.teos10(vlist='PV') - - Preprocess data for OWC salinity calibration: - >>> ds.argo.create_float_source("output_folder") - - """ + Examples + -------- + - Ensure all variables are of the Argo required dtype with: + >>> ds.argo.cast_types() + - Convert a collection of points into a collection of profiles: + >>> ds.argo.point2profile() + - Convert a collection of profiles to a collection of points: + >>> ds.argo.profile2point() + - Filter measurements according to data mode: + >>> ds.argo.filter_data_mode() + - Filter measurements according to QC flag values: + >>> ds.argo.filter_qc(QC_list=[1, 2], QC_fields='all') + - Filter variables according OWC salinity calibration requirements: + >>> ds.argo.filter_scalib_pres(force='default') + - Interpolate measurements on pressure levels: + >>> ds.argo.inter_std_levels(std_lev=[10., 500., 1000.]) + - Group and reduce measurements by pressure bins: + >>> ds.argo.groupby_pressure_bins(bins=[0, 200., 500., 1000.]) + - Compute and add additional variables to the dataset: + >>> ds.argo.teos10(vlist='PV') + - Preprocess data for OWC salinity calibration: + >>> ds.argo.create_float_source("output_folder") + + """ def __init__(self, xarray_obj): - """ Init """ + """Init""" self._obj = xarray_obj self._added = list() # Will record all new variables added by argo # self._register = collections.OrderedDict() # Will register mutable instances of sub-modules like 'plot' @@ -85,7 +85,9 @@ def __init__(self, xarray_obj): elif "PRES" in self._vars: self._mode = "standard" else: - raise InvalidDatasetStructure("Argo dataset structure not recognised (no PRES nor PRES_ADJUSTED") + raise InvalidDatasetStructure( + "Argo dataset structure not recognised (no PRES nor PRES_ADJUSTED" + ) def __repr__(self): # import xarray.core.formatting as xrf @@ -176,7 +178,7 @@ def _add_history(self, txt): self._obj.attrs["history"] = txt def _where(self, cond, other=xr.core.dtypes.NA, drop: bool = False): - """ where that preserve dtypes of Argo fields + """where that preserve dtypes of Argo fields Parameters ---------- @@ -198,7 +200,7 @@ def _where(self, cond, other=xr.core.dtypes.NA, drop: bool = False): return this def cast_types(self, **kwargs): # noqa: C901 - """ Make sure variables are of the appropriate types according to Argo """ + """Make sure variables are of the appropriate types according to Argo""" ds = self._obj return cast_Argo_variable_type(ds, **kwargs) @@ -206,14 +208,14 @@ def cast_types(self, **kwargs): # noqa: C901 def _dummy_argo_uid(self): if self._type == "point": return xr.DataArray( - self.uid( - self._obj["PLATFORM_NUMBER"].values, - self._obj["CYCLE_NUMBER"].values, - self._obj["DIRECTION"].values, - ), - dims="N_POINTS", - coords={"N_POINTS": self._obj["N_POINTS"]}, - name="dummy_argo_uid", + self.uid( + self._obj["PLATFORM_NUMBER"].values, + self._obj["CYCLE_NUMBER"].values, + self._obj["DIRECTION"].values, + ), + dims="N_POINTS", + coords={"N_POINTS": self._obj["N_POINTS"]}, + name="dummy_argo_uid", ) else: raise InvalidDatasetStructure( @@ -221,7 +223,7 @@ def _dummy_argo_uid(self): ) def uid(self, wmo_or_uid, cyc=None, direction=None): - """ UID encoder/decoder + """UID encoder/decoder Parameters ---------- @@ -242,21 +244,22 @@ def uid(self, wmo_or_uid, cyc=None, direction=None): >>> wmo, cyc, drc = uid(unique_float_profile_id) # Decode """ + def encode_direction(x): - y = np.where(x == 'A', 1, x.astype(object)) - y = np.where(y == 'D', -1, y.astype(object)) + y = np.where(x == "A", 1, x.astype(object)) + y = np.where(y == "D", -1, y.astype(object)) try: return y.astype(int) except ValueError: - raise ValueError('x has un-expected values') + raise ValueError("x has un-expected values") def decode_direction(x): x = np.array(x) if np.any(np.unique(np.abs(x)) != 1): - raise ValueError('x has un-expected values') - y = np.where(x == 1, 'A', x) - y = np.where(y == '-1', 'D', y) - return y.astype('= len(this["N_POINTS"]) if N_LEVELS == 1: - log.debug("point2profile: This dataset has a single vertical level, thus final variables will only have a N_PROF " - "dimension and no N_LEVELS") + log.debug( + "point2profile: This dataset has a single vertical level, thus final variables will only have a N_PROF " + "dimension and no N_LEVELS" + ) # Store the initial set of coordinates: coords_list = list(this.coords) @@ -406,7 +439,10 @@ def fillvalue(da): try: count[i_prof, iv] = len(np.unique(prof[vname])) except Exception: - log.error("point2profile: An error happened when dealing with the '%s' data variable" % vname) + log.error( + "point2profile: An error happened when dealing with the '%s' data variable" + % vname + ) raise # Variables with a unique value for each profiles: @@ -455,7 +491,7 @@ def fillvalue(da): y = new_ds[vname].values x = prof[vname].values try: - y[i_prof, 0: len(x)] = x + y[i_prof, 0 : len(x)] = x except Exception: print(vname, "input", x.shape, "output", y[i_prof, :].shape) raise @@ -468,14 +504,18 @@ def fillvalue(da): # Restore coordinate variables: new_ds = new_ds.set_coords([c for c in coords_list if c in new_ds]) - new_ds['N_PROF'] = np.arange(N_PROF) - if 'N_LEVELS' in new_ds['LATITUDE'].dims: - new_ds['LATITUDE'] = new_ds['LATITUDE'].isel(N_LEVELS=0) # Make sure LAT is (N_PROF) and not (N_PROF, N_LEVELS) - new_ds['LONGITUDE'] = new_ds['LONGITUDE'].isel(N_LEVELS=0) + new_ds["N_PROF"] = np.arange(N_PROF) + if "N_LEVELS" in new_ds["LATITUDE"].dims: + new_ds["LATITUDE"] = new_ds["LATITUDE"].isel( + N_LEVELS=0 + ) # Make sure LAT is (N_PROF) and not (N_PROF, N_LEVELS) + new_ds["LONGITUDE"] = new_ds["LONGITUDE"].isel(N_LEVELS=0) # Misc formatting new_ds = new_ds.sortby("TIME") - new_ds = new_ds.argo.cast_types() if not drop else cast_Argo_variable_type(new_ds) + new_ds = ( + new_ds.argo.cast_types() if not drop else cast_Argo_variable_type(new_ds) + ) new_ds = new_ds[np.sort(new_ds.data_vars)] new_ds.encoding = self.encoding # Preserve low-level encoding information new_ds.attrs = self.attrs # Preserve original attributes @@ -485,7 +525,7 @@ def fillvalue(da): return new_ds def profile2point(self): - """ Convert a collection of profiles to a collection of points + """Convert a collection of profiles to a collection of points A "point" is a single location for measurements in space and time A "point" is localised as unique UID based on WMO, CYCLE_NUMBER and DIRECTION variable values. @@ -539,7 +579,7 @@ def profile2point(self): def filter_data_mode( # noqa: C901 self, keep_error: bool = True, errors: str = "raise" ): - """ Filter variables according to their data mode + """Filter variables according to their data mode This filter applies to and @@ -590,7 +630,7 @@ def safe_where_eq(xds, key, value): xds = xds.drop_vars("TIME") xds = xds.where(xds[key] == value, drop=True) xds["TIME"] = xr.DataArray( - np.empty((len(xds["N_POINTS"]),), dtype='datetime64[ns]'), + np.empty((len(xds["N_POINTS"]),), dtype="datetime64[ns]"), dims="N_POINTS", attrs=TIME.attrs, ) @@ -598,9 +638,9 @@ def safe_where_eq(xds, key, value): return xds def ds_split_datamode(xds): - """ Create one dataset for each of the data_mode + """Create one dataset for each of the data_mode - Split full dataset into 3 datasets + Split full dataset into 3 datasets """ # Real-time: argo_r = safe_where_eq(xds, "DATA_MODE", "R") @@ -631,7 +671,7 @@ def ds_split_datamode(xds): def fill_adjusted_nan(this_ds, vname): """Fill in the adjusted field with the non-adjusted wherever it is NaN - Ensure to have values even for bad QC data in delayed mode + Ensure to have values even for bad QC data in delayed mode """ ii = this_ds.where(np.isnan(this_ds[vname + "_ADJUSTED"]), drop=1)[ "N_POINTS" @@ -642,13 +682,13 @@ def fill_adjusted_nan(this_ds, vname): return this_ds def merge_arrays(this_argo_r, this_argo_a, this_argo_d, this_vname): - """ Merge one variable from 3 DataArrays + """Merge one variable from 3 DataArrays - Based on xarray merge function with ’no_conflicts’: only values - which are not null in all datasets must be equal. The returned - dataset then contains the combination of all non-null values. + Based on xarray merge function with ’no_conflicts’: only values + which are not null in all datasets must be equal. The returned + dataset then contains the combination of all non-null values. - Return a xarray.DataArray + Return a xarray.DataArray """ def merge_this(a1, a2, a3): @@ -686,7 +726,7 @@ def merge_this(a1, a2, a3): ######### ds = self._obj if "DATA_MODE" not in ds: - if errors == 'raise': + if errors == "raise": raise InvalidDatasetStructure( "Method only available for dataset with a 'DATA_MODE' variable " ) @@ -753,7 +793,7 @@ def merge_this(a1, a2, a3): def filter_qc( # noqa: C901 self, QC_list=[1, 2], QC_fields="all", drop=True, mode="all", mask=False ): - """ Filter data set according to QC values + """Filter data set according to QC values Filter the dataset to keep points where ``all`` or ``any`` of the QC fields has a value in the list of integer QC flags. @@ -815,7 +855,9 @@ def filter_qc( # noqa: C901 ) if len(QC_fields) == 0: - this.argo._add_history("Variables selected according to QC (but found no QC variables)") + this.argo._add_history( + "Variables selected according to QC (but found no QC variables)" + ) return this log.debug( @@ -852,7 +894,7 @@ def filter_qc( # noqa: C901 return this_mask def filter_scalib_pres(self, force: str = "default", inplace: bool = True): - """ Filter variables according to OWC salinity calibration software requirements + """Filter variables according to OWC salinity calibration software requirements By default, this filter will return a dataset with raw PRES, PSAL and TEMP; and if PRES is adjusted, PRES variable will be replaced by PRES_ADJUSTED. @@ -966,11 +1008,13 @@ def filter_researchmode(self) -> xr.Dataset: # Apply filter this = this.argo.filter_data_mode(errors="ignore") - if 'DATA_MODE' in this.data_vars: - this = this.where(this['DATA_MODE'] == 'D', drop=True) + if "DATA_MODE" in this.data_vars: + this = this.where(this["DATA_MODE"] == "D", drop=True) this = this.argo.filter_qc(QC_list=1) - if 'PRES_ERROR' in this.data_vars: # PRES_ADJUSTED_ERROR was renamed PRES_ERROR by filter_data_mode - this = this.where(this['PRES_ERROR'] < 20, drop=True) + if ( + "PRES_ERROR" in this.data_vars + ): # PRES_ADJUSTED_ERROR was renamed PRES_ERROR by filter_data_mode + this = this.where(this["PRES_ERROR"] < 20, drop=True) # Manage output: if to_profile: @@ -982,10 +1026,8 @@ def filter_researchmode(self) -> xr.Dataset: this = this.argo.cast_types() return this - def interp_std_levels(self, - std_lev: list or np.array, - axis: str = 'PRES'): - """ Interpolate measurements to standard pressure levels + def interp_std_levels(self, std_lev: list or np.array, axis: str = "PRES"): + """Interpolate measurements to standard pressure levels Parameters ---------- @@ -1075,13 +1117,17 @@ def interp_std_levels(self, z_regridded_dim="Z_LEVELS", ) ds_out[dv].attrs = this_dsp[dv].attrs # Preserve attributes - if 'long_name' in ds_out[dv].attrs: - ds_out[dv].attrs['long_name'] = "Interpolated %s" % ds_out[dv].attrs['long_name'] + if "long_name" in ds_out[dv].attrs: + ds_out[dv].attrs["long_name"] = ( + "Interpolated %s" % ds_out[dv].attrs["long_name"] + ) ds_out = ds_out.rename({"remapped": "%s_INTERPOLATED" % axis}) ds_out["%s_INTERPOLATED" % axis].attrs = this_dsp[axis].attrs if "long_name" in ds_out["%s_INTERPOLATED" % axis].attrs: - ds_out["%s_INTERPOLATED" % axis].attrs['long_name'] = "Standard %s levels" % axis + ds_out["%s_INTERPOLATED" % axis].attrs["long_name"] = ( + "Standard %s levels" % axis + ) for sv in solovars: ds_out[sv] = this_dsp[sv] @@ -1109,7 +1155,7 @@ def groupby_pressure_bins( squeeze: bool = True, merge: bool = True, ): - """ Group measurements by pressure bins + """Group measurements by pressure bins This method can be used to subsample and align an irregular dataset (pressure not being similar in all profiles) on a set of pressure bins. The output dataset could then be used to perform statistics along the ``N_PROF`` dimension @@ -1193,7 +1239,7 @@ def groupby_pressure_bins( bins = bins[np.where(h > 0)] def replace_i_level_values(this_da, this_i_level, new_values_along_profiles): - """ Convenience fct to update only one level of a ["N_PROF", "N_LEVELS"] xr.DataArray""" + """Convenience fct to update only one level of a ["N_PROF", "N_LEVELS"] xr.DataArray""" if this_da.dims == ("N_PROF", "N_LEVELS"): values = this_da.values values[:, this_i_level] = new_values_along_profiles @@ -1203,11 +1249,11 @@ def replace_i_level_values(this_da, this_i_level, new_values_along_profiles): return this_da def nanmerge(x, y): - """ Merge two 1D array + """Merge two 1D array - Given 2 arrays x, y of 1 dimension, return a new array with: - - x values where x is not NaN - - y values where x is NaN + Given 2 arrays x, y of 1 dimension, return a new array with: + - x values where x is not NaN + - y values where x is NaN """ z = x.copy() for i, v in enumerate(x): @@ -1217,12 +1263,10 @@ def nanmerge(x, y): merged_is_nan = lambda l1, l2: len( # noqa: E731 np.unique(np.where(np.isnan(l1.values + l2.values))) - ) == len( - l1 - ) + ) == len(l1) def merge_bin_matching_levels(this_ds: xr.Dataset) -> xr.Dataset: - """ Levels merger of type 'bins' value + """Levels merger of type 'bins' value Merge pair of lines with the following pattern: nan, VAL, VAL, nan, VAL, VAL @@ -1264,7 +1308,7 @@ def merge_bin_matching_levels(this_ds: xr.Dataset) -> xr.Dataset: return new_ds def merge_all_matching_levels(this_ds: xr.Dataset) -> xr.Dataset: - """ Levels merger + """Levels merger Merge any pair of levels with a "matching" pattern like this: VAL, VAL, VAL, nan, nan, VAL, nan, nan, @@ -1315,7 +1359,7 @@ def merge_all_matching_levels(this_ds: xr.Dataset) -> xr.Dataset: dv for dv in list(this_dsp.data_vars) if set(["N_LEVELS", "N_PROF"]) == set(this_dsp[dv].dims) - and dv not in DATA_TYPES['data']['str'] + and dv not in DATA_TYPES["data"]["str"] ] else: datavars = [ @@ -1324,7 +1368,7 @@ def merge_all_matching_levels(this_ds: xr.Dataset) -> xr.Dataset: if set(["N_LEVELS", "N_PROF"]) == set(this_dsp[dv].dims) and "QC" not in dv and "ERROR" not in dv - and dv not in DATA_TYPES['data']['str'] + and dv not in DATA_TYPES["data"]["str"] ] # All other variables: @@ -1388,7 +1432,7 @@ def teos10( # noqa: C901 vlist: list = ["SA", "CT", "SIG0", "N2", "PV", "PTEMP"], inplace: bool = True, ): - """ Add TEOS10 variables to the dataset + """Add TEOS10 variables to the dataset By default, adds: 'SA', 'CT' Other possible variables: 'SIG0', 'N2', 'PV', 'PTEMP', 'SOUND_SPEED' @@ -1607,7 +1651,7 @@ def create_float_source( do_compression: bool = True, debug_output: bool = False, ): - """ Preprocess data for OWC software calibration + """Preprocess data for OWC software calibration This method can create a FLOAT SOURCE file (i.e. the .mat file that usually goes into /float_source/) for OWC software. The FLOAT SOURCE file is saved as: @@ -1763,7 +1807,7 @@ def preprocess_one_float( select: str = "deep", debug_output: bool = False, ): - """ Run the entire preprocessing on a given dataset with one float data """ + """Run the entire preprocessing on a given dataset with one float data""" # Add potential temperature: if "PTEMP" not in this_one: @@ -1991,7 +2035,10 @@ def list_N_PROF_variables(self, uid=False): try: count[i_prof, iv] = len(np.unique(prof[vname])) except Exception as e: - print("An error happened when dealing with the '%s' data variable" % vname) + print( + "An error happened when dealing with the '%s' data variable" + % vname + ) raise (e) # Variables with a single unique value for each profile: @@ -2005,8 +2052,8 @@ def list_N_PROF_variables(self, uid=False): def list_WMO_CYC(self): """Given a dataset, return a list with all possible (PLATFORM_NUMBER, CYCLE_NUMBER) tuple""" profiles = [] - for wmo, grp in self._obj.groupby('PLATFORM_NUMBER'): - [profiles.append((wmo, cyc)) for cyc in np.unique(grp['CYCLE_NUMBER'])] + for wmo, grp in self._obj.groupby("PLATFORM_NUMBER"): + [profiles.append((wmo, cyc)) for cyc in np.unique(grp["CYCLE_NUMBER"])] return profiles @@ -2017,18 +2064,19 @@ def open_Argo_dataset(filename_or_obj): class ArgoEngine(BackendEntrypoint): - """ Backend for Argo netCDF files based on the xarray netCDF4 engine + """Backend for Argo netCDF files based on the xarray netCDF4 engine - It can open any Argo ".nc" files with 'Argo' in their global attribute 'Conventions'. + It can open any Argo ".nc" files with 'Argo' in their global attribute 'Conventions'. - But it will not be detected as valid backend for netcdf files, so make - sure to specify ``engine="argo"`` in :func:`xarray.open_dataset`. + But it will not be detected as valid backend for netcdf files, so make + sure to specify ``engine="argo"`` in :func:`xarray.open_dataset`. - Examples - -------- - >>> import xarray as xr - >>> ds = xr.open_dataset("dac/aoml/1901393/1901393_prof.nc", engine='argo') + Examples + -------- + >>> import xarray as xr + >>> ds = xr.open_dataset("dac/aoml/1901393/1901393_prof.nc", engine='argo') """ + description = "Open Argo netCDF files (.nc)" url = "https://argopy.readthedocs.io/en/latest/generated/argopy.xarray.ArgoEngine.html#argopy.xarray.ArgoEngine" @@ -2050,7 +2098,7 @@ def guess_can_open(self, filename_or_obj): except TypeError: return False if ext in {".nc"}: - attrs = xr.open_dataset(filename_or_obj, engine='netcdf4').attrs - return 'Conventions' in attrs and 'Argo' in attrs['Conventions'] + attrs = xr.open_dataset(filename_or_obj, engine="netcdf4").attrs + return "Conventions" in attrs and "Argo" in attrs["Conventions"] else: return False diff --git a/docs/user-guide/working-with-argo-data/owc_workflow_eg.py b/docs/user-guide/working-with-argo-data/owc_workflow_eg.py index 47f9dfda..e74bb0a0 100644 --- a/docs/user-guide/working-with-argo-data/owc_workflow_eg.py +++ b/docs/user-guide/working-with-argo-data/owc_workflow_eg.py @@ -1,4 +1,5 @@ -import os, shutil +import os +import shutil from pathlib import Path import pyowc as owc From c1fc7c01058457e9050e40ec360024036ed040d5 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Mon, 19 Aug 2024 15:51:12 +0200 Subject: [PATCH 05/23] Delete utilities.py --- argopy/__init__.py | 2 - argopy/utilities.py | 304 -------------------------------------------- 2 files changed, 306 deletions(-) delete mode 100644 argopy/utilities.py diff --git a/argopy/__init__.py b/argopy/__init__.py index 3eef4a01..824dbf70 100644 --- a/argopy/__init__.py +++ b/argopy/__init__.py @@ -29,7 +29,6 @@ # Other Import # from . import utils # noqa: E402 -from . import utilities # noqa: E402 # being deprecated until 0.1.15, then remove from . import stores # noqa: E402 from . import errors # noqa: E402 from . import plot # noqa: E402 @@ -72,7 +71,6 @@ "ArgoDOI", # Class # Submodules: - "utilities", # being deprecated until 0.1.15, then remove # "utils", "errors", "plot", diff --git a/argopy/utilities.py b/argopy/utilities.py deleted file mode 100644 index a2f232ff..00000000 --- a/argopy/utilities.py +++ /dev/null @@ -1,304 +0,0 @@ -import warnings -import importlib -import inspect -from functools import wraps - -warnings.filterwarnings("default", category=DeprecationWarning, module=__name__) - - -def refactored(func1): - - rel = importlib.import_module('argopy.related') - utils = importlib.import_module('argopy.utils') - in_related = hasattr(rel, func1.__name__) - func2 = getattr(rel, func1.__name__) if in_related else getattr(utils, func1.__name__) - - func1_type = 'function' - if inspect.isclass(func1): - func1_type = 'class' - - func2_loc = 'utils' - if in_related: - func2_loc = 'related' - - msg = "The 'argopy.utilities.{name}' {ftype} has moved to 'argopy.{where}.{name}'. \ -You're seeing this message because you called '{name}' imported from 'argopy.utilities'. \ -Please update your script to import '{name}' from 'argopy.{where}'. \ -After 0.1.15, importing 'utilities' will raise an error." - - @wraps(func1) - def decorator(*args, **kwargs): - # warnings.simplefilter('always', DeprecationWarning) - warnings.warn( - msg.format(name=func1.__name__, ftype=func1_type, where=func2_loc), - category=DeprecationWarning, - stacklevel=2 - ) - # warnings.simplefilter('default', DeprecationWarning) - return func2(*args, **kwargs) - - return decorator - -# Argo related dataset and Meta-data fetchers - -@refactored -class TopoFetcher: - pass - -@refactored -class ArgoDocs: - pass - -@refactored -class ArgoNVSReferenceTables: - pass - -@refactored -class OceanOPSDeployments: - pass - -@refactored -def get_coriolis_profile_id(*args, **kwargs): - pass - -@refactored -def get_ea_profile_page(*args, **kwargs): - pass - -@refactored -def load_dict(*args, **kwargs): - pass - -@refactored -def mapp_dict(*args, **kwargs): - pass - -# Checkers -@refactored -def is_box(*args, **kwargs): - pass - -@refactored -def is_indexbox(*args, **kwargs): - pass - -@refactored -def is_list_of_strings(*args, **kwargs): - pass - -@refactored -def is_list_of_dicts(*args, **kwargs): - pass - -@refactored -def is_list_of_datasets(*args, **kwargs): - pass - -@refactored -def is_list_equal(*args, **kwargs): - pass - -@refactored -def check_wmo(*args, **kwargs): - pass - -@refactored -def is_wmo(*args, **kwargs): - pass - -@refactored -def check_cyc(*args, **kwargs): - pass - -@refactored -def is_cyc(*args, **kwargs): - pass - -@refactored -def check_index_cols(*args, **kwargs): - pass - -@refactored -def check_gdac_path(*args, **kwargs): - pass - -@refactored -def isconnected(*args, **kwargs): - pass - -@refactored -def isalive(*args, **kwargs): - pass - -@refactored -def isAPIconnected(*args, **kwargs): - pass - -@refactored -def erddap_ds_exists(*args, **kwargs): - pass - -@refactored -def urlhaskeyword(*args, **kwargs): - pass - - -# Data type casting: - -@refactored -def to_list(*args, **kwargs): - pass - -@refactored -def cast_Argo_variable_type(*args, **kwargs): - pass - -from .utils.casting import DATA_TYPES - -# Decorators - -@refactored -def deprecated(*args, **kwargs): - pass - -@refactored -def doc_inherit(*args, **kwargs): - pass - -# Lists: - -@refactored -def list_available_data_src(*args, **kwargs): - pass - -@refactored -def list_available_index_src(*args, **kwargs): - pass - -@refactored -def list_standard_variables(*args, **kwargs): - pass - -@refactored -def list_multiprofile_file_variables(*args, **kwargs): - pass - -# Cache management: -@refactored -def clear_cache(*args, **kwargs): - pass - -@refactored -def lscache(*args, **kwargs): - pass - -# Computation and performances: -@refactored -class Chunker: - pass - -# Accessories classes (specific objects): -@refactored -class float_wmo: - pass - -@refactored -class Registry: - pass - -# Locals (environments, versions, systems): -@refactored -def get_sys_info(*args, **kwargs): - pass - -@refactored -def netcdf_and_hdf5_versions(*args, **kwargs): - pass - -@refactored -def show_versions(*args, **kwargs): - pass - -@refactored -def show_options(*args, **kwargs): - pass - -@refactored -def modified_environ(*args, **kwargs): - pass - - -# Monitors -@refactored -def badge(*args, **kwargs): - pass - -@refactored -class fetch_status: - pass - -@refactored -class monitor_status: - pass - -# Geo (space/time data utilities) -@refactored -def toYearFraction(*args, **kwargs): - pass - -@refactored -def YearFraction_to_datetime(*args, **kwargs): - pass - -@refactored -def wrap_longitude(*args, **kwargs): - pass - -@refactored -def wmo2box(*args, **kwargs): - pass - -# Computation with datasets: -@refactored -def linear_interpolation_remap(*args, **kwargs): - pass - -@refactored -def groupby_remap(*args, **kwargs): - pass - -# Manipulate datasets: -@refactored -def drop_variables_not_in_all_datasets(*args, **kwargs): - pass - -@refactored -def fill_variables_not_in_all_datasets(*args, **kwargs): - pass - -# Formatters: -@refactored -def format_oneline(*args, **kwargs): - pass - -@refactored -def argo_split_path(*args, **kwargs): - pass - - -# Loggers -@refactored -def warnUnless(*args, **kwargs): - pass - -@refactored -def log_argopy_callerstack(*args, **kwargs): - pass - -if __name__ == "argopy.utilities": - warnings.warn( - "The 'argopy.utilities' has moved to 'argopy.utils'. After 0.1.15, importing 'utilities' " - "will raise an error. Please update your script.", - category=DeprecationWarning, - stacklevel=2, - ) From 7a98bc132e5015230e39303adef54e54b5aa9ae9 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Mon, 19 Aug 2024 16:48:35 +0200 Subject: [PATCH 06/23] Add support for python 3.10, drop 3.8 --- .github/workflows/pytests-upstream.yml | 2 +- .github/workflows/pytests.yml | 2 +- argopy/utils/locals.py | 15 ++++++- ci/envs_manager | 7 +++ ci/requirements/py3.10-all-free.yml | 61 ++++++++++++++++++++++++++ ci/requirements/py3.10-all-pinned.yml | 61 ++++++++++++++++++++++++++ ci/requirements/py3.10-core-free.yml | 61 ++++++++++++++++++++++++++ ci/requirements/py3.10-core-pinned.yml | 61 ++++++++++++++++++++++++++ cli/show_versions | 30 +++++++------ setup.py | 1 - 10 files changed, 282 insertions(+), 19 deletions(-) create mode 100644 ci/requirements/py3.10-all-free.yml create mode 100644 ci/requirements/py3.10-all-pinned.yml create mode 100644 ci/requirements/py3.10-core-free.yml create mode 100644 ci/requirements/py3.10-core-pinned.yml diff --git a/.github/workflows/pytests-upstream.yml b/.github/workflows/pytests-upstream.yml index d98bc65f..d21612bc 100644 --- a/.github/workflows/pytests-upstream.yml +++ b/.github/workflows/pytests-upstream.yml @@ -74,7 +74,7 @@ jobs: strategy: fail-fast: true matrix: - python-version: ["3.8", "3.9"] + python-version: ["3.9", "3.10"] os: ["ubuntu-latest", "macos-latest", "windows-latest"] steps: diff --git a/.github/workflows/pytests.yml b/.github/workflows/pytests.yml index 43d77dec..71aadc12 100644 --- a/.github/workflows/pytests.yml +++ b/.github/workflows/pytests.yml @@ -52,7 +52,7 @@ jobs: max-parallel: 12 fail-fast: false matrix: - python-version: ["3.8", "3.9"] + python-version: ["3.9", "3.10"] os: ["ubuntu-latest", "windows-latest", "macos-latest"] experimental: [false] diff --git a/argopy/utils/locals.py b/argopy/utils/locals.py index 393041ef..30026fab 100644 --- a/argopy/utils/locals.py +++ b/argopy/utils/locals.py @@ -8,6 +8,7 @@ from importlib.metadata import version import contextlib import copy +import shutil from ..options import OPTIONS @@ -79,6 +80,17 @@ def netcdf_and_hdf5_versions(): return [("libhdf5", libhdf5_version), ("libnetcdf", libnetcdf_version)] +def cli_version(cli_name): + try: + a = subprocess.run([cli_name, '--version'], capture_output=True) + return a.stdout.decode().strip("\n").replace(cli_name, '').strip() + except: + if shutil.which(cli_name): + return "installed" + else: + return "-" + + def get_version(module_name): ver = "-" try: @@ -163,9 +175,8 @@ def show_versions(file=sys.stdout, conda=False): # noqa: C901 ("bottleneck", get_version), ("cftime", get_version), ("cfgrib", get_version), - ("conda", get_version), + ("codespell", cli_version), ("flake8", get_version), - ("nc_time_axis", get_version), ("numpy", get_version), # will come with xarray and pandas ("pandas", get_version), # will come with xarray ("pip", get_version), diff --git a/ci/envs_manager b/ci/envs_manager index c1668504..61014404 100755 --- a/ci/envs_manager +++ b/ci/envs_manager @@ -35,6 +35,13 @@ declare -A ENV_LIST=( ['argopy-py39-core-pinned']="requirements/py3.9-core-pinned.yml" ['argopy-py39-core-free']="requirements/py3.9-core-free.yml" + +['argopy-py310-all-free']="requirements/py3.10-all-free.yml" +['argopy-py310-all-pinned']="requirements/py3.10-all-pinned.yml" + +['argopy-py310-core-free']="requirements/py3.10-core-free.yml" +['argopy-py310-core-pinned']="requirements/py3.10-core-pinned.yml" + ) diff --git a/ci/requirements/py3.10-all-free.yml b/ci/requirements/py3.10-all-free.yml new file mode 100644 index 00000000..895c2103 --- /dev/null +++ b/ci/requirements/py3.10-all-free.yml @@ -0,0 +1,61 @@ +name: argopy-tests +channels: + - conda-forge +dependencies: + - python = 3.10.14 + +# CORE: + - aiohttp + - decorator + - erddapy + - fsspec + - netCDF4 + - packaging + - requests + - scipy + - toolz + - xarray + +# EXT.UTIL: + - boto3 + - gsw + - s3fs + - tqdm + - zarr + +# EXT.PERF: + - dask + - distributed + - h5netcdf + - pyarrow + +# EXT.PLOT: + - IPython + - cartopy + - ipykernel + - ipywidgets + - matplotlib + - pyproj + - seaborn + +# DEV: + - aiofiles + - black + - bottleneck + - cfgrib + - cftime + - codespell + - flake8 + - numpy + - pandas + - pip + - pytest + - pytest-cov + - pytest-env + - pytest-localftpserver + - setuptools +# - sphinx + +# PIP: + - pip: + - pytest-reportlog diff --git a/ci/requirements/py3.10-all-pinned.yml b/ci/requirements/py3.10-all-pinned.yml new file mode 100644 index 00000000..dda50ec4 --- /dev/null +++ b/ci/requirements/py3.10-all-pinned.yml @@ -0,0 +1,61 @@ +name: argopy-tests +channels: + - conda-forge +dependencies: + - python = 3.10.14 + +# CORE: + - aiohttp = 3.10.4 + - decorator = 5.1.1 + - erddapy = 2.2.0 + - fsspec = 2024.6.1 + - netCDF4 = 1.7.1 + - packaging = 24.1 + - requests = 2.32.3 + - scipy = 1.14.0 + - toolz = 0.12.1 + - xarray = 2024.7.0 + +# EXT.UTIL: + - boto3 = 1.35.0 + - gsw = 3.6.19 + - s3fs = 0.4.2 + - tqdm = 4.66.5 + - zarr = 2.18.2 + +# EXT.PERF: + - dask = 2024.8.1 + - distributed = 2024.8.1 + - h5netcdf = 1.3.0 + - pyarrow = 17.0.0 + +# EXT.PLOT: + - IPython = 8.26.0 + - cartopy = 0.23.0 + - ipykernel = 6.29.5 + - ipywidgets = 8.1.3 + - matplotlib = 3.9.2 + - pyproj = 3.6.1 + - seaborn = 0.13.2 + +# DEV: + - aiofiles = 24.1.0 + - black = 24.8.0 + - bottleneck = 1.4.0 + - cfgrib = 0.9.14.0 + - cftime = 1.6.4 + - codespell = 2.3.0 + - flake8 = 7.1.1 + - numpy = 2.1.0 + - pandas = 2.2.2 + - pip = 24.2 + - pytest = 8.3.2 + - pytest-cov = 5.0.0 + - pytest-env = 1.1.3 + - pytest-localftpserver = 0.0.0 + - setuptools = 72.1.0 +# - sphinx = - + +# PIP: + - pip: + - pytest-reportlog == 0.4.0 \ No newline at end of file diff --git a/ci/requirements/py3.10-core-free.yml b/ci/requirements/py3.10-core-free.yml new file mode 100644 index 00000000..0e805b99 --- /dev/null +++ b/ci/requirements/py3.10-core-free.yml @@ -0,0 +1,61 @@ +name: argopy-tests +channels: + - conda-forge +dependencies: + - python = 3.10.14 + +# CORE: + - aiohttp + - decorator + - erddapy + - fsspec + - netCDF4 + - packaging + - requests + - scipy + - toolz + - xarray + +# EXT.UTIL: +# - boto3 +# - gsw +# - s3fs +# - tqdm +# - zarr + +# EXT.PERF: +# - dask +# - distributed +# - h5netcdf +# - pyarrow + +# EXT.PLOT: +# - IPython +# - cartopy +# - ipykernel +# - ipywidgets +# - matplotlib +# - pyproj +# - seaborn + +# DEV: + - aiofiles + - black + - bottleneck + - cfgrib + - cftime + - codespell + - flake8 + - numpy + - pandas + - pip + - pytest + - pytest-cov + - pytest-env + - pytest-localftpserver + - setuptools +# - sphinx + +# PIP: + - pip: + - pytest-reportlog \ No newline at end of file diff --git a/ci/requirements/py3.10-core-pinned.yml b/ci/requirements/py3.10-core-pinned.yml new file mode 100644 index 00000000..bc7ac77c --- /dev/null +++ b/ci/requirements/py3.10-core-pinned.yml @@ -0,0 +1,61 @@ +name: argopy-tests +channels: + - conda-forge +dependencies: + - python = 3.10.14 + +# CORE: + - aiohttp = 3.10.4 + - decorator = 5.1.1 + - erddapy = 2.2.0 + - fsspec = 2024.6.1 + - netCDF4 = 1.7.1 + - packaging = 24.1 + - requests = 2.32.3 + - scipy = 1.14.0 + - toolz = 0.12.1 + - xarray = 2024.7.0 + +# EXT.UTIL: +# - boto3 = 1.35.0 +# - gsw = 3.6.19 +# - s3fs = 0.4.2 +# - tqdm = 4.66.5 +# - zarr = 2.18.2 + +# EXT.PERF: +# - dask = 2024.8.1 +# - distributed = 2024.8.1 +# - h5netcdf = 1.3.0 +# - pyarrow = 17.0.0 + +# EXT.PLOT: +# - IPython = 8.26.0 +# - cartopy = 0.23.0 +# - ipykernel = 6.29.5 +# - ipywidgets = 8.1.3 +# - matplotlib = 3.9.2 +# - pyproj = 3.6.1 +# - seaborn = 0.13.2 + +# DEV: + - aiofiles = 24.1.0 + - black = 24.8.0 + - bottleneck = 1.4.0 + - cfgrib = 0.9.14.0 + - cftime = 1.6.4 + - codespell = 2.3.0 + - flake8 = 7.1.1 + - numpy = 2.1.0 + - pandas = 2.2.2 + - pip = 24.2 + - pytest = 8.3.2 + - pytest-cov = 5.0.0 + - pytest-env = 1.1.3 + - pytest-localftpserver = 0.0.0 + - setuptools = 72.1.0 +# - sphinx = - + +# PIP: + - pip: + - pytest-reportlog == 0.4.0 \ No newline at end of file diff --git a/cli/show_versions b/cli/show_versions index 52e5abbe..7709809b 100755 --- a/cli/show_versions +++ b/cli/show_versions @@ -9,6 +9,7 @@ import locale import argparse import setuptools from importlib.metadata import version +import shutil def get_sys_info(): @@ -79,6 +80,16 @@ def netcdf_and_hdf5_versions(): return [("libhdf5", libhdf5_version), ("libnetcdf", libnetcdf_version)] +def cli_version(cli_name): + try: + a = subprocess.run([cli_name, '--version'], capture_output=True) + return a.stdout.decode().strip("\n").replace(cli_name, '').strip() + except: + if shutil.which(cli_name): + return "installed" + else: + return "-" + def get_version(module_name): ver = '-' try: @@ -151,9 +162,8 @@ def show_versions(file=sys.stdout, conda=False, free=False, core=False): # noqa ("bottleneck", get_version), ("cftime", get_version), ("cfgrib", get_version), - ("conda", get_version), + ("codespell", cli_version), ("flake8", get_version), - ("nc_time_axis", get_version), ("numpy", get_version), # will come with xarray and pandas ("pandas", get_version), # will come with xarray ("pip", get_version), @@ -176,19 +186,11 @@ def show_versions(file=sys.stdout, conda=False, free=False, core=False): # noqa deps_blob = list() for (modname, ver_f) in deps: try: - if modname in sys.modules: - mod = sys.modules[modname] - else: - mod = importlib.import_module(modname) + ver = ver_f(modname) + deps_blob.append((modname, ver)) except Exception: - deps_blob.append((modname, '-')) - else: - try: - ver = ver_f(modname) - deps_blob.append((modname, ver)) - except Exception: - # raise ValueError("Can't get version for '%s'" % modname) - deps_blob.append((modname, "installed")) + # raise ValueError("Can't get version for '%s'" % modname) + deps_blob.append((modname, "installed")) DEPS_blob[level] = deps_blob # Print: diff --git a/setup.py b/setup.py index 6c7c3320..4fb7aaa1 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,6 @@ package_data={"argopy": ["static/assets/*", "static/css/*"]}, install_requires=requirements, classifiers=[ - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Topic :: Scientific/Engineering", From a3df8851374e686718af58f1fbfca1f367bc98d3 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 10:27:18 +0200 Subject: [PATCH 07/23] Improve show_versions following support for py 3.10 --- argopy/utils/locals.py | 34 +++++++++++++++++++++++++++++++--- cli/show_versions | 32 +++++++++++++++++++++++++++++--- 2 files changed, 60 insertions(+), 6 deletions(-) diff --git a/argopy/utils/locals.py b/argopy/utils/locals.py index 30026fab..737373f3 100644 --- a/argopy/utils/locals.py +++ b/argopy/utils/locals.py @@ -9,9 +9,20 @@ import contextlib import copy import shutil +import json from ..options import OPTIONS +PIP_INSTALLED = {} +try: + reqs = subprocess.check_output([sys.executable, '-m', 'pip', 'list', '--format', 'json']) + reqs = json.loads(reqs.decode()) + [PIP_INSTALLED.update({mod['name']: mod['version']}) for mod in reqs] +except: + pass + + + def get_sys_info(): """Returns system information as a dict""" @@ -86,23 +97,40 @@ def cli_version(cli_name): return a.stdout.decode().strip("\n").replace(cli_name, '').strip() except: if shutil.which(cli_name): - return "installed" + return "- # installed" else: return "-" +def pip_version(pip_name): + version = '-' + for name in [pip_name, pip_name.replace("_", "-"), pip_name.replace("-", "_")]: + if name in PIP_INSTALLED: + version = PIP_INSTALLED[name] + return version + + def get_version(module_name): - ver = "-" + ver = '-' try: ver = module_name.__version__ except AttributeError: try: ver = version(module_name) except importlib.metadata.PackageNotFoundError: - pass + try: + ver = pip_version(module_name) + except: + try: + ver = cli_version(module_name) + except: + pass + if sum([int(v == '0') for v in ver.split(".")]) == len(ver.split(".")): + ver = '-' return ver + def show_versions(file=sys.stdout, conda=False): # noqa: C901 """Print the versions of argopy and its dependencies diff --git a/cli/show_versions b/cli/show_versions index 7709809b..f25b2396 100755 --- a/cli/show_versions +++ b/cli/show_versions @@ -10,6 +10,15 @@ import argparse import setuptools from importlib.metadata import version import shutil +import json + +PIP_INSTALLED = {} +try: + reqs = subprocess.check_output([sys.executable, '-m', 'pip', 'list', '--format', 'json']) + reqs = json.loads(reqs.decode()) + [PIP_INSTALLED.update({mod['name']: mod['version']}) for mod in reqs] +except: + pass def get_sys_info(): @@ -86,10 +95,19 @@ def cli_version(cli_name): return a.stdout.decode().strip("\n").replace(cli_name, '').strip() except: if shutil.which(cli_name): - return "installed" + return "- # installed" else: return "-" + +def pip_version(pip_name): + version = '-' + for name in [pip_name, pip_name.replace("_", "-"), pip_name.replace("-", "_")]: + if name in PIP_INSTALLED: + version = PIP_INSTALLED[name] + return version + + def get_version(module_name): ver = '-' try: @@ -98,7 +116,15 @@ def get_version(module_name): try: ver = version(module_name) except importlib.metadata.PackageNotFoundError: - pass + try: + ver = pip_version(module_name) + except: + try: + ver = cli_version(module_name) + except: + pass + if sum([int(v == '0') for v in ver.split(".")]) == len(ver.split(".")): + ver = '-' return ver @@ -175,7 +201,7 @@ def show_versions(file=sys.stdout, conda=False, free=False, core=False): # noqa ("sphinx", get_version), ]), 'pip': sorted([ - ("pytest_reportlog", get_version), + ("pytest_reportlog", pip_version), ]) } From f4fb3833c61f3e61260d5c16545471be6981f2bb Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 10:28:34 +0200 Subject: [PATCH 08/23] More 3.10 CI tests support --- .github/workflows/pytests-upstream.yml | 2 +- .github/workflows/pytests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pytests-upstream.yml b/.github/workflows/pytests-upstream.yml index d21612bc..7cde501c 100644 --- a/.github/workflows/pytests-upstream.yml +++ b/.github/workflows/pytests-upstream.yml @@ -200,7 +200,7 @@ jobs: strategy: fail-fast: true matrix: - python-version: ["3.8", "3.9"] + python-version: ["3.9", "3.10"] os: ["ubuntu-latest", "macos-latest", "windows-latest"] steps: diff --git a/.github/workflows/pytests.yml b/.github/workflows/pytests.yml index 71aadc12..23afc8f5 100644 --- a/.github/workflows/pytests.yml +++ b/.github/workflows/pytests.yml @@ -174,7 +174,7 @@ jobs: max-parallel: 12 fail-fast: false matrix: - python-version: ["3.8", "3.9"] + python-version: ["3.9", "3.10"] os: ["ubuntu-latest", "macos-latest", "windows-latest"] experimental: [false] From 844b4150b9e2b6b212b3afda1b62dd47a9c79ce0 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 10:41:10 +0200 Subject: [PATCH 09/23] fix 3.10 env --- ci/requirements/py3.10-all-pinned.yml | 2 +- ci/requirements/py3.10-core-pinned.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/requirements/py3.10-all-pinned.yml b/ci/requirements/py3.10-all-pinned.yml index dda50ec4..ba481d36 100644 --- a/ci/requirements/py3.10-all-pinned.yml +++ b/ci/requirements/py3.10-all-pinned.yml @@ -52,7 +52,7 @@ dependencies: - pytest = 8.3.2 - pytest-cov = 5.0.0 - pytest-env = 1.1.3 - - pytest-localftpserver = 0.0.0 + - pytest-localftpserver - setuptools = 72.1.0 # - sphinx = - diff --git a/ci/requirements/py3.10-core-pinned.yml b/ci/requirements/py3.10-core-pinned.yml index bc7ac77c..58a008df 100644 --- a/ci/requirements/py3.10-core-pinned.yml +++ b/ci/requirements/py3.10-core-pinned.yml @@ -52,7 +52,7 @@ dependencies: - pytest = 8.3.2 - pytest-cov = 5.0.0 - pytest-env = 1.1.3 - - pytest-localftpserver = 0.0.0 + - pytest-localftpserver - setuptools = 72.1.0 # - sphinx = - From 860f3ec0345c8dca4b4e63d2679924c50bc072f7 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 10:41:34 +0200 Subject: [PATCH 10/23] Update casting.py - delete deprecated `cast_types` --- argopy/utils/casting.py | 195 ---------------------------------------- 1 file changed, 195 deletions(-) diff --git a/argopy/utils/casting.py b/argopy/utils/casting.py index 00cf60e0..1119debb 100644 --- a/argopy/utils/casting.py +++ b/argopy/utils/casting.py @@ -20,201 +20,6 @@ DATA_TYPES = json.load(f) -@deprecated("The 'cast_types' utility is deprecated since 0.1.13. It's been replaced by 'cast_Argo_variable_type'. Calling it will raise an error after argopy 0.1.15") -def cast_types(ds): # noqa: C901 - """Make sure variables are of the appropriate types according to Argo - - #todo: This is hard coded, but should be retrieved from an API somewhere. - Should be able to handle all possible variables encountered in the Argo dataset. - - Parameter - --------- - :class:`xarray.DataSet` - - Returns - ------- - :class:`xarray.DataSet` - """ - - list_str = [ - "PLATFORM_NUMBER", - "DATA_MODE", - "DIRECTION", - "DATA_CENTRE", - "DATA_TYPE", - "FORMAT_VERSION", - "HANDBOOK_VERSION", - "PROJECT_NAME", - "PI_NAME", - "STATION_PARAMETERS", - "DATA_CENTER", - "DC_REFERENCE", - "DATA_STATE_INDICATOR", - "PLATFORM_TYPE", - "FIRMWARE_VERSION", - "POSITIONING_SYSTEM", - "PROFILE_PRES_QC", - "PROFILE_PSAL_QC", - "PROFILE_TEMP_QC", - "PARAMETER", - "SCIENTIFIC_CALIB_EQUATION", - "SCIENTIFIC_CALIB_COEFFICIENT", - "SCIENTIFIC_CALIB_COMMENT", - "HISTORY_INSTITUTION", - "HISTORY_STEP", - "HISTORY_SOFTWARE", - "HISTORY_SOFTWARE_RELEASE", - "HISTORY_REFERENCE", - "HISTORY_QCTEST", - "HISTORY_ACTION", - "HISTORY_PARAMETER", - "VERTICAL_SAMPLING_SCHEME", - "FLOAT_SERIAL_NO", - "SOURCE", - "EXPOCODE", - "QCLEVEL", - ] - list_int = [ - "PLATFORM_NUMBER", - "WMO_INST_TYPE", - "WMO_INST_TYPE", - "CYCLE_NUMBER", - "CONFIG_MISSION_NUMBER", - ] - list_datetime = [ - "REFERENCE_DATE_TIME", - "DATE_CREATION", - "DATE_UPDATE", - "JULD", - "JULD_LOCATION", - "SCIENTIFIC_CALIB_DATE", - "HISTORY_DATE", - "TIME", - ] - - def fix_weird_bytes(x): - x = x.replace(b"\xb1", b"+/-") - return x - - fix_weird_bytes = np.vectorize(fix_weird_bytes) - - def cast_this(da, type): - """Low-level casting of DataArray values""" - try: - da.values = da.values.astype(type) - da.attrs["casted"] = 1 - except Exception: - msg = ( - "Oops! %s occurred. Fail to cast <%s> into %s for: %s. Encountered unique values: %s" - % (sys.exc_info()[0], str(da.dtype), type, da.name, str(np.unique(da))) - ) - log.debug(msg) - return da - - def cast_this_da(da): - """Cast any DataArray""" - v = da.name - da.attrs["casted"] = 0 - if v in list_str and da.dtype == "O": # Object - if v in ["SCIENTIFIC_CALIB_COEFFICIENT"]: - da.values = fix_weird_bytes(da.values) - da = cast_this(da, str) - - if v in list_int: # and da.dtype == 'O': # Object - da = cast_this(da, np.int32) - - if v in list_datetime and da.dtype == "O": # Object - if ( - "conventions" in da.attrs - and da.attrs["conventions"] == "YYYYMMDDHHMISS" - ): - if da.size != 0: - if len(da.dims) <= 1: - val = da.astype(str).values.astype("U14") - # This should not happen, but still ! That's real world data - val[val == " "] = "nan" - da.values = pd.to_datetime(val, format="%Y%m%d%H%M%S") - else: - s = da.stack(dummy_index=da.dims) - val = s.astype(str).values.astype("U14") - # This should not happen, but still ! That's real world data - val[val == ""] = "nan" - val[val == " "] = "nan" - # - s.values = pd.to_datetime(val, format="%Y%m%d%H%M%S") - da.values = s.unstack("dummy_index") - da = cast_this(da, "datetime64[s]") - else: - da = cast_this(da, "datetime64[s]") - - elif v == "SCIENTIFIC_CALIB_DATE": - da = cast_this(da, str) - s = da.stack(dummy_index=da.dims) - s.values = pd.to_datetime(s.values, format="%Y%m%d%H%M%S") - da.values = (s.unstack("dummy_index")).values - da = cast_this(da, "datetime64[s]") - - if "QC" in v and "PROFILE" not in v and "QCTEST" not in v: - if da.dtype == "O": # convert object to string - da = cast_this(da, str) - - # Address weird string values: - # (replace missing or nan values by a '0' that will be cast as an integer later - - if da.dtype == " Date: Tue, 20 Aug 2024 10:56:51 +0200 Subject: [PATCH 11/23] xarray < 2024.3 while https://github.com/pydata/xarray/issues/8909 is not solved --- ci/requirements/py3.10-all-free.yml | 2 +- ci/requirements/py3.10-core-free.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/requirements/py3.10-all-free.yml b/ci/requirements/py3.10-all-free.yml index 895c2103..47da7b4b 100644 --- a/ci/requirements/py3.10-all-free.yml +++ b/ci/requirements/py3.10-all-free.yml @@ -14,7 +14,7 @@ dependencies: - requests - scipy - toolz - - xarray + - xarray < 2024.3 # while https://github.com/pydata/xarray/issues/8909 is not solved # EXT.UTIL: - boto3 diff --git a/ci/requirements/py3.10-core-free.yml b/ci/requirements/py3.10-core-free.yml index 0e805b99..94f62622 100644 --- a/ci/requirements/py3.10-core-free.yml +++ b/ci/requirements/py3.10-core-free.yml @@ -14,7 +14,7 @@ dependencies: - requests - scipy - toolz - - xarray + - xarray < 2024.3 # while https://github.com/pydata/xarray/issues/8909 is not solved # EXT.UTIL: # - boto3 From 90e66251c6b43963140dca790e5b671e868df52b Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 11:00:11 +0200 Subject: [PATCH 12/23] xarray < 2024.3 in pinned env --- ci/requirements/py3.10-all-pinned.yml | 4 ++-- ci/requirements/py3.10-core-pinned.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/ci/requirements/py3.10-all-pinned.yml b/ci/requirements/py3.10-all-pinned.yml index ba481d36..f20410d2 100644 --- a/ci/requirements/py3.10-all-pinned.yml +++ b/ci/requirements/py3.10-all-pinned.yml @@ -14,7 +14,7 @@ dependencies: - requests = 2.32.3 - scipy = 1.14.0 - toolz = 0.12.1 - - xarray = 2024.7.0 + - xarray = 2024.2.0 # EXT.UTIL: - boto3 = 1.35.0 @@ -46,7 +46,7 @@ dependencies: - cftime = 1.6.4 - codespell = 2.3.0 - flake8 = 7.1.1 - - numpy = 2.1.0 + - numpy = 1.26.4 - pandas = 2.2.2 - pip = 24.2 - pytest = 8.3.2 diff --git a/ci/requirements/py3.10-core-pinned.yml b/ci/requirements/py3.10-core-pinned.yml index 58a008df..95e4e481 100644 --- a/ci/requirements/py3.10-core-pinned.yml +++ b/ci/requirements/py3.10-core-pinned.yml @@ -14,7 +14,7 @@ dependencies: - requests = 2.32.3 - scipy = 1.14.0 - toolz = 0.12.1 - - xarray = 2024.7.0 + - xarray = 2024.2.0 # EXT.UTIL: # - boto3 = 1.35.0 @@ -46,7 +46,7 @@ dependencies: - cftime = 1.6.4 - codespell = 2.3.0 - flake8 = 7.1.1 - - numpy = 2.1.0 + - numpy = 1.26.4 - pandas = 2.2.2 - pip = 24.2 - pytest = 8.3.2 From c062ee06453e96b51335c6408093bda178ee4b32 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 14:16:24 +0200 Subject: [PATCH 13/23] Update rtd env --- ci/envs_manager | 3 +- .../{py3.9-docs.yml => py3.9-docs-dev.yml} | 4 +- ci/requirements/py3.9-docs-rtd.yml | 59 ++++++++++++++++ docs/requirements.txt | 70 ++++++++++--------- 4 files changed, 101 insertions(+), 35 deletions(-) rename ci/requirements/{py3.9-docs.yml => py3.9-docs-dev.yml} (97%) create mode 100644 ci/requirements/py3.9-docs-rtd.yml diff --git a/ci/envs_manager b/ci/envs_manager index 61014404..6b9846e3 100755 --- a/ci/envs_manager +++ b/ci/envs_manager @@ -19,7 +19,8 @@ source ~/miniconda3/etc/profile.d/conda.sh # LIST ALL AVAILABLE ENVIRONMENTS AND THE ASSOCIATED YAML FILE ######################### declare -A ENV_LIST=( -['argopy-docs']="requirements/py3.9-docs.yml" +['argopy-docs-dev']="requirements/py3.9-docs-dev.yml" +['argopy-docs-rtd']="requirements/py3.9-docs-rtd.yml" ['argopy-py37-all-free']="requirements/py3.7-all-free.yml" ['argopy-py37-core-free']="requirements/py3.7-core-free.yml" diff --git a/ci/requirements/py3.9-docs.yml b/ci/requirements/py3.9-docs-dev.yml similarity index 97% rename from ci/requirements/py3.9-docs.yml rename to ci/requirements/py3.9-docs-dev.yml index 28f58ecb..9f883003 100644 --- a/ci/requirements/py3.9-docs.yml +++ b/ci/requirements/py3.9-docs-dev.yml @@ -1,4 +1,4 @@ -name: argopy-docs +name: argopy-docs-dev channels: - conda-forge dependencies: @@ -18,6 +18,7 @@ dependencies: - cartopy - cmocean - cftime + - cfgrib - decorator - distributed - ipython @@ -40,7 +41,6 @@ dependencies: - sphinx-book-theme - pydata-sphinx-theme - codespell - - cfgrib - black - flake8 - pytest-cov diff --git a/ci/requirements/py3.9-docs-rtd.yml b/ci/requirements/py3.9-docs-rtd.yml new file mode 100644 index 00000000..6efcae9c --- /dev/null +++ b/ci/requirements/py3.9-docs-rtd.yml @@ -0,0 +1,59 @@ +name: argopy-docs-rtd +channels: + - conda-forge +dependencies: + - python=3.9 + - xarray=2024.2.0 + - scipy + - netcdf4 + - dask +# - toolz + - erddapy + - fsspec + - gsw + - aiohttp +# - bottleneck + - s3fs + - boto3 +# - cartopy +# - cmocean + - cftime + - cfgrib + - decorator + - distributed + - ipython + - matplotlib +# - numpy + - pandas +# - packaging + - pip +# - pytest +# - seaborn +# - setuptools + - zarr + - tqdm + - ipykernel + - ipywidgets + - nbsphinx + - jinja2 + - sphinx-issues + - sphinx_rtd_theme + - sphinx-book-theme + - pydata-sphinx-theme +# - codespell +# - black +# - flake8 +# - pytest-cov +# - pytest-env + - pip: + - Sphinx + - numpydoc + - readthedocs-sphinx-ext + - sphinx-autosummary-accessors + - pydocstyle + - sphinx-tabs + - sphinxext-rediraffe +# - sphinxext-opengraph + - git+https://github.com/sphinx-contrib/googleanalytics + - sphinx-copybutton + - sphinx-design diff --git a/docs/requirements.txt b/docs/requirements.txt index d1a64f19..6976ab22 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,54 +1,60 @@ +xarray==2024.2.0 +scipy>=1.5 +netcdf4>=1.5.4 +dask>=2021.9.1 +# toolz>=0.8.2 +erddapy>=0.7.2 +fsspec>=0.7.4 #, <2022.3.1 +gsw>=3.4.0 +aiohttp>=3.7.4 +# bottleneck>=1.3.2 +s3fs +boto3 +cftime>=1.5.1 +cfgrib>=0.9.9.1 +decorator +distributed>=2.30.0 +ipython<=8.26.0 +matplotlib>=3.4.0 +# numpy +pandas<3.0 +# packaging pip>=23.1 -# pkgconfig +# pytest +# seaborn>=0.11.0 +# setuptools +zarr>=2.4.0 +tqdm>=4.50.2 ipykernel>=6.22 -ipython<=8.26.0 ipywidgets>=8.0 -urllib3<2 - nbsphinx>=0.9 -numpydoc>=1.1.0 jinja2>=3.0.2 sphinx_issues>=1.2.0 -gitpython>=3.1.2 -sphinx-autosummary-accessors>=0.1.2 -pydata-sphinx-theme>=0.4.3 -readthedocs-sphinx-ext sphinx-rtd-theme sphinx-book-theme # sphinx-bootstrap-theme +pydata-sphinx-theme>=0.4.3 +Sphinx +numpydoc>=1.1.0 +readthedocs-sphinx-ext +sphinx-autosummary-accessors>=0.1.2 +pydocstyle sphinx-tabs +sphinxext-rediraffe +# sphinxext-opengraph git+https://github.com/sphinx-contrib/googleanalytics sphinx-copybutton sphinx-design -sphinxext-rediraffe -xarray>=0.16.1,<2024.3 -pandas<3.0 -scipy>=1.5 -scikit-learn>=0.23.2 -dask>=2021.9.1 -distributed>=2.30.0 -# bottleneck>=1.3.2 -# toolz>=0.8.2 -erddapy>=0.7.2 -gsw>=3.4.0 +# pkgconfig +# urllib3<2 -s3fs -boto3 -decorator +# gitpython>=3.1.2 -fsspec>=0.7.4 #, <2022.3.1 -aiohttp>=3.7.4 +# scikit-learn>=0.23.2 -matplotlib>=3.4.0 #cartopy>=0.18.0 -#seaborn>=0.11.0 #geos>=0.2 # cmocean -zarr>=2.4.0 -netcdf4>=1.5.4 -cftime>=1.5.1 -cfgrib>=0.9.9.1 -tqdm>=4.50.2 From b937fbd6c72874671baeb49e00ee99d1a35159f7 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 14:16:27 +0200 Subject: [PATCH 14/23] Update impact.rst --- docs/impact.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/impact.rst b/docs/impact.rst index 9ba9b19b..4e1c83db 100644 --- a/docs/impact.rst +++ b/docs/impact.rst @@ -14,6 +14,8 @@ Papers & proceedings mentioning argopy - Dunnington et al., (2021). argodata: An R interface to oceanographic data from the International Argo Program. Journal of Open Source Software, 6(68), 3659, https://doi.org/10.21105/joss.03659 +- Elipot S. , P Miron, M Curcic, K Santana, R Lumpkin (2024). Clouddrift: a Python package to accelerate the use of Lagrangian data for atmospheric, oceanic, and climate sciences. Journal of Open Source Software, 9(99), 6742, https://joss.theoj.org/papers/10.21105/joss.06742 + - Gonzalez A., "The Argo Online School: An e-learning tool to get started with Argo" (2023), The Journal of Open Source Education (Under review) - Huda, Md Nurul, "Machine Learning for Improvement of Ocean Data Resolution for Weather Forecasting and Climatological Research" (2023). Thesis and Dissertations, Virginia Tech, http://hdl.handle.net/10919/116504 @@ -22,6 +24,8 @@ Papers & proceedings mentioning argopy - Zhang, Y. (2023). Python Data Analysis Techniques in Administrative Information Integration Management System. In: Atiquzzaman, M., Yen, N.Y., Xu, Z. (eds) Proceedings of the 4th International Conference on Big Data Analytics for Cyber-Physical System in Smart City - Volume 2. BDCPS 2022. Lecture Notes on Data Engineering and Communications Technologies, vol 168. Springer, Singapore. https://doi.org/10.1007/978-981-99-1157-8_35 + + Other interesting mentions -------------------------- From 3c25411bd2e236f2335c8a5f14633689ef2b0e10 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 15:26:47 +0200 Subject: [PATCH 15/23] Update readthedocs.yml --- readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readthedocs.yml b/readthedocs.yml index 269de29c..dc94528c 100644 --- a/readthedocs.yml +++ b/readthedocs.yml @@ -12,7 +12,7 @@ build: # image: latest os: "ubuntu-22.04" tools: - python: "3.8" + python: "3.9" python: install: From cfe17db9328ee72f3f218ca06adbb7b6b0014425 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 15:48:57 +0200 Subject: [PATCH 16/23] Remove py 3.8 env files --- ci/requirements/py3.8-all-free.yml | 59 -------------------------- ci/requirements/py3.8-all-pinned.yml | 61 --------------------------- ci/requirements/py3.8-core-free.yml | 60 -------------------------- ci/requirements/py3.8-core-pinned.yml | 60 -------------------------- 4 files changed, 240 deletions(-) delete mode 100644 ci/requirements/py3.8-all-free.yml delete mode 100644 ci/requirements/py3.8-all-pinned.yml delete mode 100644 ci/requirements/py3.8-core-free.yml delete mode 100644 ci/requirements/py3.8-core-pinned.yml diff --git a/ci/requirements/py3.8-all-free.yml b/ci/requirements/py3.8-all-free.yml deleted file mode 100644 index ea4d0e8e..00000000 --- a/ci/requirements/py3.8-all-free.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: argopy-tests -channels: - - conda-forge -dependencies: - - python = 3.8 - -# CORE: - - aiohttp - - decorator - - erddapy - - fsspec - - netCDF4 - - packaging - - requests - - scipy - - toolz - - xarray - -# EXT.UTIL: - - boto3 - - gsw - - s3fs - - tqdm - - zarr - -# EXT.PERF: - - dask - - distributed - - pyarrow - -# EXT.PLOT: - - IPython - - cartopy - - ipykernel - - ipywidgets <= 7.8.0 - - matplotlib - - pyproj - - seaborn - -# DEV: - - aiofiles - - black - - bottleneck - - cfgrib - - cftime -# - conda - - flake8 -# - nc-time-axis - - numpy - - pandas - - pip - - pytest - - pytest-cov - - pytest-env - - pytest-localftpserver - - setuptools - - - pip: - - pytest-reportlog \ No newline at end of file diff --git a/ci/requirements/py3.8-all-pinned.yml b/ci/requirements/py3.8-all-pinned.yml deleted file mode 100644 index 56ab3b01..00000000 --- a/ci/requirements/py3.8-all-pinned.yml +++ /dev/null @@ -1,61 +0,0 @@ -name: argopy-tests -channels: - - conda-forge -dependencies: - - python = 3.8 - -# CORE: - - aiohttp = 3.8.6 - - decorator = 5.1.1 - - erddapy = 2.2.0 - - fsspec = 2023.9.2 - - netCDF4 = 1.6.4 - - packaging = 23.2 - - requests = 2.31.0 - - scipy = 1.10.1 - - toolz = 0.12.0 - - xarray = 2023.1.0 - -# EXT.UTIL: - - boto3 = 1.28.17 - - gsw = 3.6.17 - - s3fs = 2023.9.2 - - tqdm = 4.66.1 - - zarr = 2.13.3 - -# EXT.PERF: - - dask = 2023.5.0 - - distributed = 2023.5.0 - - pyarrow = 13.0.0 - -# EXT.PLOT: - - IPython = 8.12.2 - - cartopy = 0.21.1 - - ipykernel = 6.25.2 - - ipywidgets = 7.8.0 - - matplotlib = 3.7.3 - - pyproj = 3.5.0 - - seaborn = 0.13.0 - -# DEV: - - aiofiles = 23.1.0 - - black = 23.9.1 - - bottleneck = 1.3.7 - - cfgrib = 0.9.10.4 - - cftime = 1.6.2 -# - conda = - - - flake8 = 6.1.0 -# - nc-time-axis = - - - numpy = 1.24.4 - - pandas = 1.5.3 - - pip = 23.2.1 - - pytest = 7.4.2 - - pytest-cov = 4.1.0 - - pytest-env = 1.0.1 - - pytest-localftpserver = 1.1.4 - - setuptools = 68.2.2 -# - sphinx = - - -# PIP: - - pip: - - pytest-reportlog == 0.4.0 diff --git a/ci/requirements/py3.8-core-free.yml b/ci/requirements/py3.8-core-free.yml deleted file mode 100644 index 29d57f76..00000000 --- a/ci/requirements/py3.8-core-free.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: argopy-tests -channels: - - conda-forge -dependencies: - - python = 3.8 - -# CORE: - - aiohttp - - decorator - - erddapy - - fsspec - - netCDF4 - - packaging - - requests - - scipy - - toolz - - xarray - -# EXT.UTIL: -# - boto3 -# - gsw -# - s3fs -# - tqdm -# - zarr - -# EXT.PERF: -# - dask -# - distributed -# - pyarrow - -# EXT.PLOT: -# - IPython -# - cartopy -# - ipykernel -# - ipywidgets -# - matplotlib -# - pyproj -# - seaborn - -# DEV: - - aiofiles - - black - - bottleneck - - cfgrib - - cftime -# - conda - - flake8 -# - nc-time-axis - - numpy - - pandas - - pip - - pytest - - pytest-cov - - pytest-env - - pytest-localftpserver - - setuptools -# - sphinx - - - pip: - - pytest-reportlog \ No newline at end of file diff --git a/ci/requirements/py3.8-core-pinned.yml b/ci/requirements/py3.8-core-pinned.yml deleted file mode 100644 index 3eb19957..00000000 --- a/ci/requirements/py3.8-core-pinned.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: argopy-tests -channels: - - conda-forge -dependencies: - - python = 3.8 - -# CORE: - - aiohttp = 3.8.6 - - decorator = 5.1.1 - - erddapy = 2.2.0 - - fsspec = 2023.9.2 - - netCDF4 = 1.6.4 - - packaging = 23.2 - - requests = 2.31.0 - - scipy = 1.10.1 - - toolz = 0.12.0 - - xarray = 2023.1.0 - -# EXT.UTIL: -# - boto3 = 1.28.17 -# - gsw = 3.6.17 -# - s3fs = 2023.9.2 -# - tqdm = 4.66.1 -# - zarr = 2.13.3 - -# EXT.PERF: -# - dask = 2023.5.0 -# - distributed = 2023.5.0 -# - pyarrow = 13.0.0 - -# EXT.PLOT: -# - IPython = 8.12.2 -# - cartopy = 0.21.1 -# - ipykernel = 6.25.2 -# - ipywidgets = 7.8.0 -# - matplotlib = 3.7.3 -# - pyproj = 3.5.0 -# - seaborn = 0.13.0 - -# DEV: - - aiofiles = 23.1.0 - - black = 23.9.1 - - bottleneck = 1.3.7 - - cfgrib = 0.9.10.4 - - cftime = 1.6.2 -# - conda = - - - flake8 = 6.1.0 -# - nc-time-axis = - - - numpy = 1.24.4 - - pandas = 1.5.3 - - pip = 23.2.1 - - pytest = 7.4.2 - - pytest-cov = 4.1.0 - - pytest-env = 1.0.1 - - pytest-localftpserver = 1.1.4 - - setuptools = 68.2.2 -# - sphinx = - - - - pip: - - pytest-reportlog == 0.4.0 From 808e3fee62dd1e5719f509e97c076af6974e64f9 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Tue, 20 Aug 2024 15:49:02 +0200 Subject: [PATCH 17/23] Update requirements.txt --- docs/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 6976ab22..778227aa 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -10,8 +10,8 @@ aiohttp>=3.7.4 # bottleneck>=1.3.2 s3fs boto3 -cftime>=1.5.1 -cfgrib>=0.9.9.1 +cftime # >=1.5.1 +cfgrib # >=0.9.9.1 decorator distributed>=2.30.0 ipython<=8.26.0 From c8593357c40e7abd9ec19247d0ca9d92db30cf74 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Wed, 21 Aug 2024 07:33:14 +0200 Subject: [PATCH 18/23] Update requirements.txt --- docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 778227aa..e8926d1d 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -16,7 +16,7 @@ decorator distributed>=2.30.0 ipython<=8.26.0 matplotlib>=3.4.0 -# numpy +numpy==1.26.4 pandas<3.0 # packaging pip>=23.1 From 2554876505c69534e3e82bd1a42fd8532f9bd3f4 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Wed, 21 Aug 2024 07:59:26 +0200 Subject: [PATCH 19/23] Upgrade py3.9 pinned versions --- ci/requirements/py3.9-all-free.yml | 10 +-- ci/requirements/py3.9-all-pinned.yml | 84 +++++++++++++------------ ci/requirements/py3.9-core-free.yml | 10 +-- ci/requirements/py3.9-core-pinned.yml | 90 ++++++++++++++------------- 4 files changed, 101 insertions(+), 93 deletions(-) diff --git a/ci/requirements/py3.9-all-free.yml b/ci/requirements/py3.9-all-free.yml index 35286507..5aad7e8a 100644 --- a/ci/requirements/py3.9-all-free.yml +++ b/ci/requirements/py3.9-all-free.yml @@ -2,7 +2,7 @@ name: argopy-tests channels: - conda-forge dependencies: - - python=3.9 + - python = 3.9.19 # CORE: - aiohttp @@ -14,7 +14,7 @@ dependencies: - requests - scipy - toolz - - xarray<2024.3 + - xarray # EXT.UTIL: - boto3 @@ -35,6 +35,7 @@ dependencies: - ipykernel - ipywidgets - matplotlib + - pyproj - seaborn # DEV: @@ -43,9 +44,8 @@ dependencies: - bottleneck - cfgrib - cftime -# - conda + - codespell - flake8 -# - nc-time-axis - numpy - pandas - pip @@ -54,6 +54,8 @@ dependencies: - pytest-env - pytest-localftpserver - setuptools +# - sphinx +# PIP: - pip: - pytest-reportlog diff --git a/ci/requirements/py3.9-all-pinned.yml b/ci/requirements/py3.9-all-pinned.yml index e94f6fb3..62ca9fa9 100644 --- a/ci/requirements/py3.9-all-pinned.yml +++ b/ci/requirements/py3.9-all-pinned.yml @@ -2,58 +2,60 @@ name: argopy-tests channels: - conda-forge dependencies: - - python=3.9 + - python = 3.9.19 # CORE: - - aiohttp = 3.8.5 + - aiohttp = 3.10.4 - decorator = 5.1.1 - erddapy = 2.2.0 - - fsspec = 2023.9.2 - - netCDF4 = 1.6.4 - - packaging = 23.1 - - requests = 2.31.0 - - scipy = 1.11.3 - - toolz = 0.12.0 - - xarray = 2023.9.0 + - fsspec = 2024.6.1 + - netCDF4 = 1.7.1 + - packaging = 24.1 + - requests = 2.32.3 + - scipy = 1.13.1 + - toolz = 0.12.1 + - xarray = 2024.2.0 # EXT.UTIL: - - boto3 = 1.28.17 - - gsw = 3.6.17 - - s3fs = 2023.9.2 - - tqdm = 4.66.1 - - zarr = 2.16.1 + - boto3 = 1.35.0 + - gsw = 3.6.19 + - s3fs = 0.4.2 + - tqdm = 4.66.5 + - zarr = 2.18.2 # EXT.PERF: - - dask = 2023.9.2 - - distributed = 2023.9.2 - - h5netcdf = 1.2.0 - - pyarrow = 13.0.0 + - dask = 2024.8.0 + - distributed = 2024.8.0 + - h5netcdf = 1.3.0 + - pyarrow = 17.0.0 # EXT.PLOT: - - IPython = 8.15.0 - - cartopy = 0.22.0 - - ipykernel = 6.25.2 - - ipywidgets = 8.1.1 - - matplotlib = 3.8.0 - - seaborn = 0.12.2 + - IPython = 8.18.1 + - cartopy = 0.23.0 + - ipykernel = 6.29.5 + - ipywidgets = 8.1.3 + - matplotlib = 3.9.2 + - pyproj = 3.6.1 + - seaborn = 0.13.2 # DEV: - - aiofiles = 23.1.0 - - black = 23.9.1 - - bottleneck = 1.3.7 - - cfgrib = 0.9.10.4 - - cftime = 1.6.2 -# - conda = - - - flake8 = 6.1.0 -# - nc-time-axis = - - - numpy = 1.26.0 - - pandas = 2.1.1 - - pip = 23.2.1 - - pytest = 7.4.2 - - pytest-cov = 4.1.0 - - pytest-env = 1.0.1 - - pytest-localftpserver = 1.1.4 - - setuptools = 68.2.2 + - aiofiles = 24.1.0 + - black = 24.8.0 + - bottleneck = 1.4.0 + - cfgrib = 0.9.14.0 + - cftime = 1.6.4 + - codespell + - flake8 = 7.1.1 + - numpy = 1.26.4 + - pandas = 2.2.2 + - pip = 24.2 + - pytest = 8.3.2 + - pytest-cov = 5.0.0 + - pytest-env = 1.1.3 + - pytest-localftpserver + - setuptools = 72.1.0 +# - sphinx = - +# PIP: - pip: - - pytest-reportlog==0.1.2 + - pytest-reportlog == 0.4.0 diff --git a/ci/requirements/py3.9-core-free.yml b/ci/requirements/py3.9-core-free.yml index 771eec21..21410bcf 100644 --- a/ci/requirements/py3.9-core-free.yml +++ b/ci/requirements/py3.9-core-free.yml @@ -2,7 +2,7 @@ name: argopy-tests channels: - conda-forge dependencies: - - python=3.9 + - python = 3.9.19 # CORE: - aiohttp @@ -14,7 +14,7 @@ dependencies: - requests - scipy - toolz - - xarray<2024.3 + - xarray # EXT.UTIL: # - boto3 @@ -35,6 +35,7 @@ dependencies: # - ipykernel # - ipywidgets # - matplotlib +# - pyproj # - seaborn # DEV: @@ -43,9 +44,8 @@ dependencies: - bottleneck - cfgrib - cftime -# - conda + - codespell - flake8 -# - nc-time-axis - numpy - pandas - pip @@ -54,6 +54,8 @@ dependencies: - pytest-env - pytest-localftpserver - setuptools +# - sphinx +# PIP: - pip: - pytest-reportlog diff --git a/ci/requirements/py3.9-core-pinned.yml b/ci/requirements/py3.9-core-pinned.yml index cd6bd5f5..47051c8b 100644 --- a/ci/requirements/py3.9-core-pinned.yml +++ b/ci/requirements/py3.9-core-pinned.yml @@ -2,58 +2,60 @@ name: argopy-tests channels: - conda-forge dependencies: - - python=3.9 + - python = 3.9.19 # CORE: - - aiohttp = 3.8.5 + - aiohttp = 3.10.4 - decorator = 5.1.1 - erddapy = 2.2.0 - - fsspec = 2023.9.2 - - netCDF4 = 1.6.4 - - packaging = 23.1 - - requests = 2.31.0 - - scipy = 1.11.3 - - toolz = 0.12.0 - - xarray = 2023.9.0 + - fsspec = 2024.6.1 + - netCDF4 = 1.7.1 + - packaging = 24.1 + - requests = 2.32.3 + - scipy = 1.13.1 + - toolz = 0.12.1 + - xarray = 2024.2.0 -# # EXT.UTIL: -# - boto3 = 1.28.17 -# - gsw = 3.6.17 -# - s3fs = 2023.9.2 -# - tqdm = 4.66.1 -# - zarr = 2.16.1 +# EXT.UTIL: +# - boto3 = 1.35.0 +# - gsw = 3.6.19 +# - s3fs = 0.4.2 +# - tqdm = 4.66.5 +# - zarr = 2.18.2 -# # EXT.PERF: -# - dask = 2023.9.2 -# - distributed = 2023.9.2 -# - h5netcdf = 1.2.0 -# - pyarrow = 13.0.0 +# EXT.PERF: +# - dask = 2024.8.0 +# - distributed = 2024.8.0 +# - h5netcdf = 1.3.0 +# - pyarrow = 17.0.0 -# # EXT.PLOT: -# - IPython = 8.15.0 -# - cartopy = 0.22.0 -# - ipykernel = 6.25.2 -# - ipywidgets = 8.1.1 -# - matplotlib = 3.8.0 -# - seaborn = 0.12.2 +# EXT.PLOT: +# - IPython = 8.18.1 +# - cartopy = 0.23.0 +# - ipykernel = 6.29.5 +# - ipywidgets = 8.1.3 +# - matplotlib = 3.9.2 +# - pyproj = 3.6.1 +# - seaborn = 0.13.2 # DEV: - - aiofiles = 23.1.0 - - black = 23.9.1 - - bottleneck = 1.3.7 - - cfgrib = 0.9.10.4 - - cftime = 1.6.2 -# - conda = - - - flake8 = 6.1.0 -# - nc-time-axis = - - - numpy = 1.26.0 - - pandas = 2.1.1 - - pip = 23.2.1 - - pytest = 7.4.2 - - pytest-cov = 4.1.0 - - pytest-env = 1.0.1 - - pytest-localftpserver = 1.1.4 - - setuptools = 68.2.2 + - aiofiles = 24.1.0 + - black = 24.8.0 + - bottleneck = 1.4.0 + - cfgrib = 0.9.14.0 + - cftime = 1.6.4 + - codespell + - flake8 = 7.1.1 + - numpy = 1.26.4 + - pandas = 2.2.2 + - pip = 24.2 + - pytest = 8.3.2 + - pytest-cov = 5.0.0 + - pytest-env = 1.1.3 + - pytest-localftpserver + - setuptools = 72.1.0 +# - sphinx = - +# PIP: - pip: - - pytest-reportlog==0.1.2 + - pytest-reportlog == 0.4.0 From 98cd7b618b43df94de8fe83888e870f6cd85c13a Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Wed, 21 Aug 2024 08:00:11 +0200 Subject: [PATCH 20/23] Misc --- ci/envs_manager | 9 --------- docs/whats-new.rst | 4 +++- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/ci/envs_manager b/ci/envs_manager index 6b9846e3..de576f40 100755 --- a/ci/envs_manager +++ b/ci/envs_manager @@ -22,15 +22,6 @@ declare -A ENV_LIST=( ['argopy-docs-dev']="requirements/py3.9-docs-dev.yml" ['argopy-docs-rtd']="requirements/py3.9-docs-rtd.yml" -['argopy-py37-all-free']="requirements/py3.7-all-free.yml" -['argopy-py37-core-free']="requirements/py3.7-core-free.yml" - -['argopy-py38-all-free']="requirements/py3.8-all-free.yml" -['argopy-py38-all-pinned']="requirements/py3.8-all-pinned.yml" - -['argopy-py38-core-free']="requirements/py3.8-core-free.yml" -['argopy-py38-core-pinned']="requirements/py3.8-core-pinned.yml" - ['argopy-py39-all-free']="requirements/py3.9-all-free.yml" ['argopy-py39-all-pinned']="requirements/py3.9-all-pinned.yml" diff --git a/docs/whats-new.rst b/docs/whats-new.rst index 26ddb9b8..49c4a459 100644 --- a/docs/whats-new.rst +++ b/docs/whats-new.rst @@ -13,7 +13,7 @@ v0.1.16 (xx Aug. 2024) **Features and front-end API** -- **Support for AWS S3 index files**. This support is experimental and is primarily made available for benchmarking as part of the `ADMT working group on Argo cloud format activities `_. The `ADMT working group discussion items are listed here `_. Both CORE and BGC index files are supported. The new :class:`ArgoIndex` not only support access to the AWS S3 index files but also implement improved performances for search methods on WMO and cycle numbers, using :class:`boto3.client.select_object_content` SQL queries. Indeed, the ``https`` and ``ftp`` default GDAC server index files are downloaded and loaded in memory before being searched. With ``s3``, index files can directly be queried on the server using SQL syntax; the full index is not necessarily downloaded. (:pr:`326`) by `G. Maze `_ +- **Support for AWS S3 index files**. This support is experimental and is primarily made available for benchmarking as part of the `ADMT working group on Argo cloud format activities `_. The `ADMT working group discussion items are listed here `_. Both CORE and BGC index files are supported. The new :class:`ArgoIndex` not only support access to the AWS S3 index files but also implement improved performances for search methods on WMO and cycle numbers, using :class:`boto3.client.select_object_content` SQL queries. Indeed, the ``https`` and ``ftp`` default GDAC server index files are downloaded and loaded in memory before being searched. But with ``s3``, index files can directly be queried on the server using SQL syntax; the full index is not necessarily downloaded. (:pr:`326`) by `G. Maze `_ .. code-block:: python @@ -35,6 +35,8 @@ v0.1.16 (xx Aug. 2024) **Internals** +- Drop support for Python 3.8, add support for Python 3.10. (:pr:`379`) by `G. Maze `_ + - Update :class:`argopy.ArgoNVSReferenceTables` to handle new NVS server output format. (:pr:`378`) by `G. Maze `_ - Pin upper bound on xarray < 2024.3 to fix failing upstream tests because of ``AttributeError: 'ScipyArrayWrapper' object has no attribute 'oindex'``, `reported here `_. (:pr:`326`) by `G. Maze `_ From 74b54a5e7ff7738a1668cad02951f6e196be8370 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Wed, 21 Aug 2024 08:15:56 +0200 Subject: [PATCH 21/23] Update install.rst --- docs/install.rst | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/install.rst b/docs/install.rst index daf3b775..5669da97 100644 --- a/docs/install.rst +++ b/docs/install.rst @@ -32,18 +32,20 @@ Required dependencies - aiohttp - erddapy -- fsspec < 2023.12.0 (more at :issue:`317`) +- fsspec - netCDF4 - scipy - toolz -- xarray +- xarray < 2024.3.0 (because of `this issue `_) - requests +- decorator +- packaging Note that Erddapy_ is required because `erddap `_ is the default data fetching backend. Requirement dependencies details can be found `here `_. -The **argopy** software is `continuously tested `_ under latest OS (Linux, Mac OS and Windows) and with python versions 3.8 and 3.9 +The **argopy** software is `continuously tested `_ under latest OS (Linux, Mac OS and Windows) and with python versions 3.9 and 3.10 Optional dependencies --------------------- From d8a25d34617560c72ec8bff151d069c1f67b1f05 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Thu, 22 Aug 2024 16:12:27 +0200 Subject: [PATCH 22/23] Restore use of mocked server --- argopy/tests/test_fetchers_data_erddap.py | 2 +- argopy/tests/test_fetchers_data_erddap_bgc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/argopy/tests/test_fetchers_data_erddap.py b/argopy/tests/test_fetchers_data_erddap.py index a71a7a3c..e52da841 100644 --- a/argopy/tests/test_fetchers_data_erddap.py +++ b/argopy/tests/test_fetchers_data_erddap.py @@ -19,7 +19,7 @@ log = logging.getLogger("argopy.tests.data.erddap") -USE_MOCKED_SERVER = False +USE_MOCKED_SERVER = True """ List access points to be tested for each datasets: phy and ref. diff --git a/argopy/tests/test_fetchers_data_erddap_bgc.py b/argopy/tests/test_fetchers_data_erddap_bgc.py index b636d59c..1a5cbfbe 100644 --- a/argopy/tests/test_fetchers_data_erddap_bgc.py +++ b/argopy/tests/test_fetchers_data_erddap_bgc.py @@ -20,7 +20,7 @@ log = logging.getLogger("argopy.tests.data.erddap") -USE_MOCKED_SERVER = False +USE_MOCKED_SERVER = True """ List access points to be tested for each datasets: bgc. From 17e2b52a9f642f6bc161bfb7daeed65986133669 Mon Sep 17 00:00:00 2001 From: Guillaume Maze Date: Fri, 23 Aug 2024 14:31:29 +0200 Subject: [PATCH 23/23] Pin xarray < 2024.3 --- ci/requirements/py3.9-all-free.yml | 2 +- ci/requirements/py3.9-core-free.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/requirements/py3.9-all-free.yml b/ci/requirements/py3.9-all-free.yml index 5aad7e8a..8219167e 100644 --- a/ci/requirements/py3.9-all-free.yml +++ b/ci/requirements/py3.9-all-free.yml @@ -14,7 +14,7 @@ dependencies: - requests - scipy - toolz - - xarray + - xarray < 2024.3 # while https://github.com/pydata/xarray/issues/8909 is not solved # EXT.UTIL: - boto3 diff --git a/ci/requirements/py3.9-core-free.yml b/ci/requirements/py3.9-core-free.yml index 21410bcf..c38813de 100644 --- a/ci/requirements/py3.9-core-free.yml +++ b/ci/requirements/py3.9-core-free.yml @@ -14,7 +14,7 @@ dependencies: - requests - scipy - toolz - - xarray + - xarray < 2024.3 # while https://github.com/pydata/xarray/issues/8909 is not solved # EXT.UTIL: # - boto3