From b9bcb234de4afb973b4767c113603f3084f05633 Mon Sep 17 00:00:00 2001 From: J-M Date: Mon, 2 Sep 2024 10:11:29 +1000 Subject: [PATCH] trying to circumvent issue numpy/netcdf runtime warning issue https://github.com/csiro-hydroinformatics/efts-io/issues/4 --- src/efts_io/__init__.py | 2 +- src/efts_io/conventions.py | 28 ++++++++++++++++-------- src/efts_io/dimensions.py | 2 +- src/efts_io/variables.py | 2 +- src/efts_io/wrapper.py | 2 +- tests/test_create.py | 44 ++++++++++++++++++++++++++++++++++++++ tests/test_read_file.py | 6 +----- 7 files changed, 68 insertions(+), 18 deletions(-) create mode 100644 tests/test_create.py diff --git a/src/efts_io/__init__.py b/src/efts_io/__init__.py index 9569ccf..dc5a53d 100644 --- a/src/efts_io/__init__.py +++ b/src/efts_io/__init__.py @@ -5,6 +5,6 @@ from __future__ import annotations -import netCDF4 # noqa: I001 +# import netCDF4 __all__: list[str] = [] diff --git a/src/efts_io/conventions.py b/src/efts_io/conventions.py index 8d31d3a..71c86b7 100644 --- a/src/efts_io/conventions.py +++ b/src/efts_io/conventions.py @@ -1,9 +1,9 @@ """Naming conventions for the EFTS netCDF file format.""" from datetime import datetime -from typing import Iterable, List, Optional, Union +from typing import Any, Iterable, List, Optional, Union -import netCDF4 as nc # noqa: N813 +# import netCDF4 as nc import numpy as np import pandas as pd import xarray as xr @@ -123,20 +123,30 @@ def check_index_found( ) +# MdDatasetsType = Union[nc.Dataset, xr.Dataset, xr.DataArray] +MdDatasetsType = Union[xr.Dataset, xr.DataArray] + + +def _is_nc_dataset(d: Any) -> bool: + # Have to disable using directly netCDF4 for now due to issue #4 + return False + # return isinstance(d, nc.Dataset) + + def _has_required_dimensions( - d: Union[nc.Dataset, xr.Dataset, xr.DataArray], + d: MdDatasetsType, mandatory_dimensions: Iterable[str], ) -> bool: - if isinstance(d, nc.Dataset): + if _is_nc_dataset(d): return set(d.dimensions.keys()) == set(mandatory_dimensions) return set(d.dims.keys()) == set(mandatory_dimensions) -def has_required_stf2_dimensions(d: Union[nc.Dataset, xr.Dataset, xr.DataArray]) -> bool: +def has_required_stf2_dimensions(d: MdDatasetsType) -> bool: return _has_required_dimensions(d, mandatory_netcdf_dimensions) -def has_required_xarray_dimensions(d: Union[nc.Dataset, xr.Dataset, xr.DataArray]) -> bool: +def has_required_xarray_dimensions(d: MdDatasetsType) -> bool: return _has_required_dimensions(d, mandatory_xarray_dimensions) @@ -145,8 +155,8 @@ def _has_all_members(tested: Iterable[str], reference: Iterable[str]) -> bool: return set(tested).intersection(r) == r -def has_required_global_attributes(d: Union[nc.Dataset, xr.Dataset, xr.DataArray]) -> bool: - if isinstance(d, nc.Dataset): +def has_required_global_attributes(d: MdDatasetsType) -> bool: + if _is_nc_dataset(d): a = d.ncattrs() tested = set(a) else: @@ -155,7 +165,7 @@ def has_required_global_attributes(d: Union[nc.Dataset, xr.Dataset, xr.DataArray return _has_all_members(tested, mandatory_global_attributes) -def has_required_variables(d: Union[nc.Dataset, xr.Dataset, xr.DataArray]) -> bool: +def has_required_variables(d: MdDatasetsType) -> bool: a = d.variables.keys() tested = set(a) # Note: even if xarray, we do not need to check for the 'data_vars' attribute here. diff --git a/src/efts_io/dimensions.py b/src/efts_io/dimensions.py index 399f733..d297423 100644 --- a/src/efts_io/dimensions.py +++ b/src/efts_io/dimensions.py @@ -3,7 +3,7 @@ from datetime import datetime from typing import Any, Dict, Optional, Tuple, Union -import netCDF4 # noqa: F401 +# import netCDF4 import numpy as np import pandas as pd diff --git a/src/efts_io/variables.py b/src/efts_io/variables.py index 667a1b8..2a72bf3 100644 --- a/src/efts_io/variables.py +++ b/src/efts_io/variables.py @@ -2,7 +2,7 @@ from typing import Any, Dict, List, Optional -import netCDF4 # noqa: F401 +# import netCDF4 import numpy as np import pandas as pd diff --git a/src/efts_io/wrapper.py b/src/efts_io/wrapper.py index df19cd3..c32bdc4 100644 --- a/src/efts_io/wrapper.py +++ b/src/efts_io/wrapper.py @@ -3,7 +3,7 @@ import os from typing import Any, Dict, Iterable, List, Optional, Tuple, Union -import netCDF4 # noqa: F401 +# import netCDF4 import numpy as np import pandas as pd import xarray as xr diff --git a/tests/test_create.py b/tests/test_create.py new file mode 100644 index 0000000..12faef0 --- /dev/null +++ b/tests/test_create.py @@ -0,0 +1,44 @@ +# import netCDF4 +import numpy as np +import pandas as pd + + +def test_create_new_efts(): + import efts_io.wrapper as wrap + + issue_times = pd.date_range("2010-01-01", periods=31, freq="D") + station_ids = ["a", "b"] + lead_times = np.arange(start=1, stop=4, step=1) + lead_time_tstep = "hours" + ensemble_size = 10 + station_names = None + nc_attributes = None + latitudes = None + longitudes = None + areas = None + d = wrap.xr_efts( + issue_times, + station_ids, + lead_times, + lead_time_tstep, + ensemble_size, + station_names, + nc_attributes, + latitudes, + longitudes, + areas, + ) + from efts_io.conventions import ( + has_required_global_attributes, + has_required_variables, + has_required_xarray_dimensions, + ) + + assert has_required_xarray_dimensions(d) + assert has_required_global_attributes(d) + assert has_required_variables(d) + + +if __name__ == "__main__": + # test_read_thing() + test_create_new_efts() diff --git a/tests/test_read_file.py b/tests/test_read_file.py index aab3e74..09ce3ab 100644 --- a/tests/test_read_file.py +++ b/tests/test_read_file.py @@ -1,16 +1,12 @@ import os +# import netCDF4 import numpy as np import pandas as pd from efts_io.wrapper import EftsDataSet pkg_dir = os.path.join(os.path.dirname(__file__), "..") -# sys.path.insert(0, pkg_dir) - -# from efts_io.attributes import * -# from efts_io.dimensions import * -# from efts_io.wrapper import * variable_names = ["variable_1", "variable_2"] stations_ids = [123, 456]