Skip to content

Commit

Permalink
trying to circumvent issue numpy/netcdf runtime warning issue #4
Browse files Browse the repository at this point in the history
  • Loading branch information
jmp75 committed Sep 2, 2024
1 parent 436839f commit b9bcb23
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 18 deletions.
2 changes: 1 addition & 1 deletion src/efts_io/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@

from __future__ import annotations

import netCDF4 # noqa: I001
# import netCDF4

__all__: list[str] = []
28 changes: 19 additions & 9 deletions src/efts_io/conventions.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
"""Naming conventions for the EFTS netCDF file format."""

from datetime import datetime
from typing import Iterable, List, Optional, Union
from typing import Any, Iterable, List, Optional, Union

import netCDF4 as nc # noqa: N813
# import netCDF4 as nc
import numpy as np
import pandas as pd
import xarray as xr
Expand Down Expand Up @@ -123,20 +123,30 @@ def check_index_found(
)


# MdDatasetsType = Union[nc.Dataset, xr.Dataset, xr.DataArray]
MdDatasetsType = Union[xr.Dataset, xr.DataArray]


def _is_nc_dataset(d: Any) -> bool:
# Have to disable using directly netCDF4 for now due to issue #4
return False
# return isinstance(d, nc.Dataset)


def _has_required_dimensions(
d: Union[nc.Dataset, xr.Dataset, xr.DataArray],
d: MdDatasetsType,
mandatory_dimensions: Iterable[str],
) -> bool:
if isinstance(d, nc.Dataset):
if _is_nc_dataset(d):
return set(d.dimensions.keys()) == set(mandatory_dimensions)
return set(d.dims.keys()) == set(mandatory_dimensions)


def has_required_stf2_dimensions(d: Union[nc.Dataset, xr.Dataset, xr.DataArray]) -> bool:
def has_required_stf2_dimensions(d: MdDatasetsType) -> bool:
return _has_required_dimensions(d, mandatory_netcdf_dimensions)


def has_required_xarray_dimensions(d: Union[nc.Dataset, xr.Dataset, xr.DataArray]) -> bool:
def has_required_xarray_dimensions(d: MdDatasetsType) -> bool:
return _has_required_dimensions(d, mandatory_xarray_dimensions)


Expand All @@ -145,8 +155,8 @@ def _has_all_members(tested: Iterable[str], reference: Iterable[str]) -> bool:
return set(tested).intersection(r) == r


def has_required_global_attributes(d: Union[nc.Dataset, xr.Dataset, xr.DataArray]) -> bool:
if isinstance(d, nc.Dataset):
def has_required_global_attributes(d: MdDatasetsType) -> bool:
if _is_nc_dataset(d):
a = d.ncattrs()
tested = set(a)
else:
Expand All @@ -155,7 +165,7 @@ def has_required_global_attributes(d: Union[nc.Dataset, xr.Dataset, xr.DataArray
return _has_all_members(tested, mandatory_global_attributes)


def has_required_variables(d: Union[nc.Dataset, xr.Dataset, xr.DataArray]) -> bool:
def has_required_variables(d: MdDatasetsType) -> bool:
a = d.variables.keys()
tested = set(a)
# Note: even if xarray, we do not need to check for the 'data_vars' attribute here.
Expand Down
2 changes: 1 addition & 1 deletion src/efts_io/dimensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from datetime import datetime
from typing import Any, Dict, Optional, Tuple, Union

import netCDF4 # noqa: F401
# import netCDF4
import numpy as np
import pandas as pd

Expand Down
2 changes: 1 addition & 1 deletion src/efts_io/variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from typing import Any, Dict, List, Optional

import netCDF4 # noqa: F401
# import netCDF4
import numpy as np
import pandas as pd

Expand Down
2 changes: 1 addition & 1 deletion src/efts_io/wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import os
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union

import netCDF4 # noqa: F401
# import netCDF4
import numpy as np
import pandas as pd
import xarray as xr
Expand Down
44 changes: 44 additions & 0 deletions tests/test_create.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# import netCDF4
import numpy as np
import pandas as pd


def test_create_new_efts():
import efts_io.wrapper as wrap

issue_times = pd.date_range("2010-01-01", periods=31, freq="D")
station_ids = ["a", "b"]
lead_times = np.arange(start=1, stop=4, step=1)
lead_time_tstep = "hours"
ensemble_size = 10
station_names = None
nc_attributes = None
latitudes = None
longitudes = None
areas = None
d = wrap.xr_efts(
issue_times,
station_ids,
lead_times,
lead_time_tstep,
ensemble_size,
station_names,
nc_attributes,
latitudes,
longitudes,
areas,
)
from efts_io.conventions import (
has_required_global_attributes,
has_required_variables,
has_required_xarray_dimensions,
)

assert has_required_xarray_dimensions(d)
assert has_required_global_attributes(d)
assert has_required_variables(d)


if __name__ == "__main__":
# test_read_thing()
test_create_new_efts()
6 changes: 1 addition & 5 deletions tests/test_read_file.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
import os

# import netCDF4
import numpy as np
import pandas as pd

from efts_io.wrapper import EftsDataSet

pkg_dir = os.path.join(os.path.dirname(__file__), "..")
# sys.path.insert(0, pkg_dir)

# from efts_io.attributes import *
# from efts_io.dimensions import *
# from efts_io.wrapper import *

variable_names = ["variable_1", "variable_2"]
stations_ids = [123, 456]
Expand Down

0 comments on commit b9bcb23

Please sign in to comment.