diff --git a/.github/scripts/generate_yml_env_fixed_py.py b/.github/scripts/generate_yml_env_fixed_py.py new file mode 100644 index 000000000..08922e3ca --- /dev/null +++ b/.github/scripts/generate_yml_env_fixed_py.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import argparse + +import yaml # type: ignore + + +def environment_yml_nopy(fn_env: str, py_version: str, add_deps: list[str] = None) -> None: + """ + Generate temporary environment-py3.XX.yml files forcing python versions for setup of continuous integration. + + :param fn_env: Filename path to environment.yml + :param py_version: Python version to force. + :param add_deps: Additional dependencies to solve for directly (for instance graphviz fails with mamba update). + """ + + # Load the yml as dictionary + yaml_env = yaml.safe_load(open(fn_env)) + conda_dep_env = list(yaml_env["dependencies"]) + + # Force python version + conda_dep_env_forced_py = ["python=" + py_version if "python" in dep else dep for dep in conda_dep_env] + + # Optionally, add other dependencies + if add_deps is not None: + conda_dep_env_forced_py.extend(add_deps) + + # Copy back to new yaml dict + yaml_out = yaml_env.copy() + yaml_out["dependencies"] = conda_dep_env_forced_py + + with open("environment-ci-py" + py_version + ".yml", "w") as outfile: + yaml.dump(yaml_out, outfile, default_flow_style=False) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate environment files for CI with fixed python versions.") + parser.add_argument("fn_env", metavar="fn_env", type=str, help="Path to the generic environment file.") + parser.add_argument( + "--pyv", + dest="py_version", + default="3.9", + type=str, + help="List of Python versions to force.", + ) + parser.add_argument( + "--add", + dest="add_deps", + default=None, + type=str, + help="List of dependencies to add.", + ) + args = parser.parse_args() + environment_yml_nopy(fn_env=args.fn_env, py_version=args.py_version, add_deps=args.add_deps.split(",")) diff --git a/.github/scripts/get_yml_env_nopy.py b/.github/scripts/get_yml_env_nopy.py deleted file mode 100644 index 4eeb037f0..000000000 --- a/.github/scripts/get_yml_env_nopy.py +++ /dev/null @@ -1,53 +0,0 @@ -import argparse - -import yaml # type: ignore - - -def environment_yml_nopy(fn_env: str, print_dep: str = "both") -> None: - """ - List dependencies in environment.yml without python version for setup of continuous integration. - - :param fn_env: Filename path to environment.yml - :param print_dep: Whether to print conda differences "conda", pip differences "pip" or both. - """ - - # Load the yml as dictionary - yaml_env = yaml.safe_load(open(fn_env)) - conda_dep_env = list(yaml_env["dependencies"]) - - if isinstance(conda_dep_env[-1], dict): - pip_dep_env = list(conda_dep_env.pop()["pip"]) - else: - pip_dep_env = ["None"] - - conda_dep_env_without_python = [dep for dep in conda_dep_env if "python" not in dep] - - # Join the lists - joined_list_conda_dep = " ".join(conda_dep_env_without_python) - joined_list_pip_dep = " ".join(pip_dep_env) - - # Print to be captured in bash - if print_dep == "both": - print(joined_list_conda_dep) - print(joined_list_pip_dep) - elif print_dep == "conda": - print(joined_list_conda_dep) - elif print_dep == "pip": - print(joined_list_pip_dep) - else: - raise ValueError('The argument "print_dep" can only be "conda", "pip" or "both".') - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Get environment list without python version.") - parser.add_argument("fn_env", metavar="fn_env", type=str, help="Path to the environment file.") - parser.add_argument( - "--p", - dest="print_dep", - default="both", - type=str, - help="Whether to print conda dependencies, pip ones, or both.", - ) - - args = parser.parse_args() - environment_yml_nopy(fn_env=args.fn_env, print_dep=args.print_dep) diff --git a/.github/workflows/python-tests.yml b/.github/workflows/python-tests.yml index d3ed51f2f..4872797e7 100644 --- a/.github/workflows/python-tests.yml +++ b/.github/workflows/python-tests.yml @@ -51,20 +51,16 @@ jobs: CACHE_NUMBER: 0 # Increase this value to reset cache if environment.yml has not changed id: cache - # The trick below is necessary because the generic environment file does not specify a Python version, and only - # "conda env update" can be used to update with an environment file, which upgrades the Python version + # The trick below is necessary because the generic environment file does not specify a Python version, and ONLY + # "conda env update" CAN BE USED WITH CACHING, which upgrades the Python version when using the base environment # (we add "graphviz" from dev-environment to solve all dependencies at once, at graphviz relies on image # processing packages very much like geo-packages; not a problem for docs, dev installs where all is done at once) - name: Install base environment with a fixed Python version if: steps.cache.outputs.cache-hit != 'true' run: | mamba install pyyaml python=${{ matrix.python-version }} - pkgs_conda_base=`python .github/scripts/get_yml_env_nopy.py "environment.yml" --p "conda"` - pkgs_pip_base=`python .github/scripts/get_yml_env_nopy.py "environment.yml" --p "pip"` - mamba install python=${{ matrix.python-version }} $pkgs_conda_base graphviz - if [[ "$pkgs_pip_base" != "None" ]]; then - pip install $pkgs_pip_base - fi + python .github/scripts/generate_yml_env_fixed_py.py --pyv ${{ matrix.python-version }} --add "graphviz" "environment.yml" + mamba env update -n xdem-dev -f environment-ci-py${{ matrix.python-version }}.yml - name: Install project run: pip install -e . --no-dependencies diff --git a/doc/source/api.md b/doc/source/api.md index e84698a90..9a3144358 100644 --- a/doc/source/api.md +++ b/doc/source/api.md @@ -60,8 +60,8 @@ documentation. Raster.width Raster.count Raster.count_on_disk - Raster.indexes - Raster.indexes_on_disk + Raster.bands + Raster.bands_on_disk Raster.res Raster.bounds Raster.dtypes diff --git a/geoutils/projtools.py b/geoutils/projtools.py index 21d846f8e..1f234a5d5 100644 --- a/geoutils/projtools.py +++ b/geoutils/projtools.py @@ -244,7 +244,7 @@ def align_bounds( def reproject_points( - points: list[list[float]] | tuple[list[float], list[float]] | NDArrayNum, in_crs: CRS, out_crs: CRS + points: list[list[float]] | list[float] | tuple[list[float], list[float]] | NDArrayNum, in_crs: CRS, out_crs: CRS ) -> tuple[list[float], list[float]]: """ Reproject a set of point from input_crs to output_crs. @@ -269,7 +269,7 @@ def reproject_points( def reproject_to_latlon( - points: list[list[float]] | NDArrayNum, in_crs: CRS, round_: int = 8 + points: list[list[float]] | list[float] | NDArrayNum, in_crs: CRS, round_: int = 8 ) -> tuple[list[float], list[float]]: """ Reproject a set of point from in_crs to lat/lon. diff --git a/geoutils/raster/raster.py b/geoutils/raster/raster.py index 3d93f680f..26816b958 100644 --- a/geoutils/raster/raster.py +++ b/geoutils/raster/raster.py @@ -986,8 +986,8 @@ def raster_equal(self, other: object) -> bool: - The raster's transform, crs and nodata values. """ - if not isinstance(other, type(self)): # TODO: Possibly add equals to SatelliteImage? - return NotImplemented + if not isinstance(other, Raster): # TODO: Possibly add equals to SatelliteImage? + raise NotImplementedError("Equality with other object than Raster not supported by raster_equal.") return all( [ np.array_equal(self.data.data, other.data.data, equal_nan=True), @@ -2541,7 +2541,7 @@ def save( save_data = self.data # If the raster is a mask, convert to uint8 before saving and force nodata to 255 - if save_data.dtype == bool: + if self.data.dtype == bool: save_data = save_data.astype("uint8") nodata = 255 @@ -3744,12 +3744,17 @@ def reproject( memory_limit=memory_limit, ) - # Transform back to a boolean array + # Transform output back to a boolean array output._data = output.data.astype(bool) # type: ignore + # Transform self back to boolean array + self._data = self.data.astype(bool) # type: ignore + if inplace: self._transform = output._transform # type: ignore self._crs = output._crs # type: ignore + # Little trick to force the shape, same as in Raster.reproject() + self._data = output._data # type: ignore self.data = output._data # type: ignore return None else: @@ -3820,7 +3825,14 @@ def polygonize( # Convert to unsigned integer and pass to parent method self._data = self.data.astype("uint8") # type: ignore - return super().polygonize(target_values=target_values) + + # Get output from parent method + output = super().polygonize(target_values=target_values) + + # Convert array back to boolean + self._data = self.data.astype(bool) # type: ignore + + return output def proximity( self, @@ -3841,12 +3853,11 @@ def proximity( # warnings.warn("In-value converted to 1 for polygonizing boolean mask.") # target_values = [1] - # Convert to unsigned integer and pass to parent method - self._data = self.data.astype("uint8") # type: ignore - # Need to cast output to Raster before computing proximity, as output will not be boolean # (super() would instantiate Mask() again) - raster = Raster({"data": self.data, "transform": self.transform, "crs": self.crs, "nodata": self.nodata}) + raster = Raster( + {"data": self.data.astype("uint8"), "transform": self.transform, "crs": self.crs, "nodata": self.nodata} + ) return raster.proximity( vector=vector, target_values=target_values, @@ -3933,6 +3944,10 @@ def proximity_from_vector_or_raster( # 1/ First, if there is a vector input, we rasterize the geometry type # (works with .boundary that is a LineString (.exterior exists, but is a LinearRing) if vector is not None: + + # TODO: Only when using centroid... Maybe we should leave this operation to the user anyway? + warnings.filterwarnings("ignore", message="Geometry is in a geographic CRS.*") + # We create a geodataframe with the geometry type boundary_shp = gpd.GeoDataFrame(geometry=vector.ds.__getattr__(geometry_type), crs=vector.crs) # We mask the pixels that make up the geometry type diff --git a/pyproject.toml b/pyproject.toml index 48deb4831..9b3f2de21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ fallback_version = "0.0.1" target_version = ['py36'] [tool.pytest.ini_options] -addopts = "--doctest-modules" +addopts = "--doctest-modules -W error::UserWarning" testpaths = [ "tests", "geoutils" diff --git a/tests/test_doc.py b/tests/test_doc.py index 28aa90d10..66fbbadfa 100644 --- a/tests/test_doc.py +++ b/tests/test_doc.py @@ -2,6 +2,7 @@ import os import platform import shutil +import warnings import sphinx.cmd.build @@ -13,6 +14,11 @@ class TestDocs: def test_build(self) -> None: """Try building the documentation and see if it works.""" + # Ignore all warnings raised in the documentation + # (some UserWarning are shown on purpose in certain examples, so they shouldn't make the test fail, + # and most other warnings are for Sphinx developers, not meant to be seen by us; or we can check on RTD) + warnings.filterwarnings("ignore") + # Building the doc fails on Windows for the CLI section if (platform.system() == "Linux") or (platform.system() == "Darwin"): diff --git a/tests/test_geoviewer.py b/tests/test_geoviewer.py index 166a4f755..efc337668 100644 --- a/tests/test_geoviewer.py +++ b/tests/test_geoviewer.py @@ -4,6 +4,7 @@ import os import sys +import warnings import matplotlib.pyplot as plt import pytest @@ -40,9 +41,14 @@ def test_geoviewer_valid_1band(capsys, monkeypatch, filename, option): # type: # To avoid having the plots popping up during execution monkeypatch.setattr(plt, "show", lambda: None) + # The everest example will raise errors when setting a nodata value that exists + if "B4" in os.path.basename(filename) and len(option) > 0 and option[0] == "-nodata": + warnings.filterwarnings("ignore", category=UserWarning, message="New nodata value found in the data array.*") + # To not get exception when testing generic functions such as --help try: geoviewer.main([filename, *option]) + plt.close() except SystemExit: pass @@ -81,6 +87,10 @@ def test_geoviewer_invalid_1band(capsys, monkeypatch, filename, args): # type: # To avoid having the plots popping up during execution monkeypatch.setattr(plt, "show", lambda: None) + # The everest example will raise errors when setting a nodata value that exists + if "B4" in os.path.basename(filename) and len(args) > 0 and args[0] == "-nodata": + warnings.filterwarnings("ignore", category=UserWarning, message="New nodata value found in the data array.*") + # To not get exception when testing generic functions such as --help option, error = args with pytest.raises(error): @@ -108,9 +118,14 @@ def test_geoviewer_valid_3band(capsys, monkeypatch, filename, option): # type: # To avoid having the plots popping up during execution monkeypatch.setattr(plt, "show", lambda: None) + # The everest RGB example will raise errors when setting a nodata value that exists + if "RGB" in os.path.basename(filename) and len(option) > 0 and option[0] == "-nodata": + warnings.filterwarnings("ignore", category=UserWarning, message="New nodata value found in the data array.*") + # To not get exception when testing generic functions such as --help try: geoviewer.main([filename, *option]) + plt.close() except SystemExit: pass diff --git a/tests/test_multiraster.py b/tests/test_multiraster.py index ffefe7bec..6e974346d 100644 --- a/tests/test_multiraster.py +++ b/tests/test_multiraster.py @@ -16,7 +16,7 @@ from geoutils.raster import RasterType -class stack_merge_images: +class StackMergeImages: """ Test cases for stacking and merging images Split an image with some overlap, then stack/merge it, and validate bounds and shape. @@ -26,6 +26,9 @@ class stack_merge_images: def __init__( self, image: str, cls: Callable[[str], RasterType] = gu.Raster, different_crs: pyproj.CRS | None = None ) -> None: + + warnings.filterwarnings("ignore", category=UserWarning, message="For reprojection, nodata must be set.*") + img = cls(examples.get_path(image)) self.img = img @@ -66,22 +69,22 @@ def __init__( @pytest.fixture def images_1d(): # type: ignore - return stack_merge_images("everest_landsat_b4") + return StackMergeImages("everest_landsat_b4") @pytest.fixture def images_different_crs(): # type: ignore - return stack_merge_images("everest_landsat_b4", different_crs=4326) + return StackMergeImages("everest_landsat_b4", different_crs=4326) @pytest.fixture def sat_images(): # type: ignore - return stack_merge_images("everest_landsat_b4", cls=gu.SatelliteImage) + return StackMergeImages("everest_landsat_b4", cls=gu.SatelliteImage) @pytest.fixture def images_3d(): # type: ignore - return stack_merge_images("everest_landsat_rgb") + return StackMergeImages("everest_landsat_rgb") class TestMultiRaster: @@ -99,7 +102,7 @@ def test_stack_rasters(self, rasters) -> None: # type: ignore # Silence the reprojection warning for default nodata value warnings.filterwarnings("ignore", category=UserWarning, message="New nodata value found in the data array.*") - warnings.filterwarnings("ignore", category=UserWarning, message="For reprojection, dst_nodata must be set.*") + warnings.filterwarnings("ignore", category=UserWarning, message="For reprojection, nodata must be set.*") # Merge the two overlapping DEMs and check that output bounds and shape is correct if rasters.img1.count > 1: @@ -169,7 +172,7 @@ def test_merge_rasters(self, rasters) -> None: # type: ignore # Silence the reprojection warning for default nodata value warnings.filterwarnings("ignore", category=UserWarning, message="New nodata value found in the data array.*") - warnings.filterwarnings("ignore", category=UserWarning, message="For reprojection, dst_nodata must be set.*") + warnings.filterwarnings("ignore", category=UserWarning, message="For reprojection, nodata must be set.*") # Ignore warning already checked in test_stack_rasters if rasters.img1.count > 1: @@ -230,7 +233,7 @@ def test_load_multiple_overlap(self, raster_paths: list[str]) -> None: for k, rst in enumerate(output_rst): assert rst.is_loaded rst2 = gu.Raster(raster_paths[k]) - assert rst == rst2 + assert rst.raster_equal(rst2) # - Test that with crop=True and ref_grid=None, rasters are cropped only in area of overlap - # output_rst = gu.raster.load_multiple_rasters(raster_paths, crop=True, ref_grid=None) @@ -284,7 +287,7 @@ def test_load_multiple_no_overlap(self, raster_paths: list[str]) -> None: for k, rst in enumerate(output_rst): assert rst.is_loaded rst2 = gu.Raster(raster_paths[k]) - assert rst == rst2 + assert rst.raster_equal(rst2) # - With crop=True -> should raise a warning - # with pytest.warns(UserWarning, match="Intersection is void, returning unloaded rasters."): diff --git a/tests/test_raster.py b/tests/test_raster.py index 859cd1bd3..d8c5b8ddb 100644 --- a/tests/test_raster.py +++ b/tests/test_raster.py @@ -37,6 +37,8 @@ def run_gdal_proximity( # Rasterio strongly recommends against importing gdal along rio, so this is done here instead. from osgeo import gdal, gdalconst + gdal.UseExceptions() + # Initiate empty GDAL raster for proximity output drv = gdal.GetDriverByName("MEM") proxy_ds = drv.Create("", input_raster.shape[1], input_raster.shape[0], 1, gdal.GetDataTypeByName("Float32")) @@ -396,7 +398,7 @@ def test_to_xarray(self, example: str): @pytest.mark.parametrize("nodata_init", [None, "type_default"]) # type: ignore @pytest.mark.parametrize( - "dtype", ["uint8", "int8", "uint16", "int16", "uint32", "int32", "float32", "float64", "longdouble"] + "dtype", ["uint8", "int8", "uint16", "int16", "uint32", "int32", "float32", "float64"] ) # type: ignore def test_data_setter(self, dtype: str, nodata_init: str | None) -> None: """ @@ -1074,12 +1076,10 @@ def test_crop(self, data: list[str]) -> None: # Check that bound reprojection is done automatically if the CRS differ with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", category=UserWarning, message="For reprojection, dst_nodata must be set.*" - ) + warnings.filterwarnings("ignore", category=UserWarning, message="For reprojection, nodata must be set.*") - r_cropped_reproj = r_cropped.reproject(crs=3857) - r_cropped3 = r.crop(r_cropped_reproj) + r_cropped_reproj = r_cropped.reproject(crs=3857) + r_cropped3 = r.crop(r_cropped_reproj) # Original CRS bounds can be deformed during transformation, but result should be equivalent to this r_cropped4 = r.crop(crop_geom=r_cropped_reproj.get_bounds_projected(out_crs=r.crs)) @@ -1132,11 +1132,9 @@ def test_crop(self, data: list[str]) -> None: crop_geom[3] - rand_float * abs(r.res[1]), ] - # Filter warning about dst_nodata not set in reprojection (because match_extent triggers reproject) + # Filter warning about nodata not set in reprojection (because match_extent triggers reproject) with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", category=UserWarning, message="For reprojection, dst_nodata must be set.*" - ) + warnings.filterwarnings("ignore", category=UserWarning, message="For reprojection, nodata must be set.*") r_cropped = r.crop(crop_geom2, mode="match_extent") assert list(r_cropped.bounds) == crop_geom2 @@ -1145,11 +1143,9 @@ def test_crop(self, data: list[str]) -> None: abs(np.array(r.res) - np.array(r_cropped.res)) < np.array(r.res) / np.array(r_cropped.shape)[::-1] ) - # Filter warning about dst_nodata not set in reprojection (because match_extent triggers reproject) + # Filter warning about nodata not set in reprojection (because match_extent triggers reproject) with warnings.catch_warnings(): - warnings.filterwarnings( - "ignore", category=UserWarning, message="For reprojection, dst_nodata must be set.*" - ) + warnings.filterwarnings("ignore", category=UserWarning, message="For reprojection, nodata must be set.*") r_cropped2 = r.crop(r_cropped, mode="match_extent") assert r_cropped2.raster_equal(r_cropped) @@ -1850,10 +1846,11 @@ def test_xy2ij_and_interp(self) -> None: y = 3101000.0 i, j = r.xy2ij(x, y) val = r.interp_points([(x, y)], order=1)[0] - assert img[int(i), int(j)] == val + val_img = img[int(i[0]), int(j[0])] + assert val_img == val # Finally, check that interp convert to latlon - lat, lon = gu.projtools.reproject_to_latlon([[x], [y]], in_crs=r.crs) + lat, lon = gu.projtools.reproject_to_latlon([x, y], in_crs=r.crs) val_latlon = r.interp_points([(lat, lon)], order=1, input_latlon=True)[0] assert val == pytest.approx(val_latlon, abs=0.0001) @@ -1874,8 +1871,8 @@ def test_value_at_coords(self) -> None: itest0 = 120 jtest0 = 451 # This is the center of the pixel - xtest0 = 496975 - ytest0 = 3099095 + xtest0 = 496975.0 + ytest0 = 3099095.0 # Verify coordinates match indexes x_out, y_out = r.ij2xy(itest0, jtest0, offset="center") @@ -1896,7 +1893,7 @@ def test_value_at_coords(self) -> None: # -- Tests 2: check arguments work as intended -- # 1/ Lat-lon argument check by getting the coordinates of our last test point - lat, lon = reproject_to_latlon(points=[[xtest0], [ytest0]], in_crs=r.crs) + lat, lon = reproject_to_latlon(points=[xtest0, ytest0], in_crs=r.crs) z_val_2 = r.value_at_coords(lon, lat, latlon=True) assert z_val == z_val_2 @@ -2196,7 +2193,7 @@ def test_nodata_setter(self, example: str) -> None: with warnings.catch_warnings(): # Ignore warning that nodata value is already used in the raster data warnings.filterwarnings( - "ignore", category=UserWarning, message="For reprojection, dst_nodata must be set.*" + "ignore", category=UserWarning, message="New nodata value found in the data array.*" ) r.set_nodata(_default_nodata(r.dtypes[0])) r_copy.nodata = _default_nodata(r.dtypes[0]) @@ -2660,6 +2657,9 @@ def test_polygonize(self, example: str) -> None: def test_proximity_against_gdal(self, distunits: str, target_values: list[float] | None, raster: gu.Raster) -> None: """Test that proximity matches the results of GDAL for any parameter.""" + # TODO: When adding new rasters for tests, specify warning only for Landsat + warnings.filterwarnings("ignore", message="Setting default nodata -99999 to mask non-finite values *") + # We generate proximity with GDAL and GeoUtils gdal_proximity = run_gdal_proximity(raster, target_values=target_values, distunits=distunits) # We translate distunits GDAL option into its GeoUtils equivalent @@ -2827,7 +2827,7 @@ def test_init(self, example: str) -> None: """Test that Mask subclass initialization function as intended.""" # A warning should be raised when the raster is a multi-band - if "rgb" not in os.path.basename(example): + if "RGB" not in os.path.basename(example): mask = gu.Mask(example) else: with pytest.warns( @@ -2854,6 +2854,8 @@ def test_init(self, example: str) -> None: def test_repr_str(self, example: str) -> None: """Test the representation of a raster works""" + warnings.filterwarnings("ignore", message="Multi-band raster provided to create a Mask*") + # For data not loaded by default r = gu.Mask(example) @@ -2962,6 +2964,13 @@ def test_reproject(self, mask: gu.Mask) -> None: # Check instance is respected assert isinstance(mask_reproj, gu.Mask) + # Check the dtype of the original mask was properly reconverted + assert mask.data.dtype == bool + + # Check inplace behaviour works + mask_tmp = mask.copy() + mask_tmp.reproject(grid_size=(100, 100), force_source_nodata=2, inplace=True) + assert mask_tmp.raster_equal(mask_reproj) # This should be equivalent to converting the array to uint8, reprojecting, converting back mask_uint8 = mask.astype("uint8") @@ -2988,6 +2997,13 @@ def test_crop(self, mask: gu.Mask) -> None: # Check if instance is respected assert isinstance(mask_cropped, gu.Mask) + # Check the dtype of the original mask was properly reconverted + assert mask.data.dtype == bool + + # Check inplace behaviour works + mask_tmp = mask.copy() + mask_tmp.crop(crop_geom, inplace=True) + assert mask_tmp.raster_equal(mask_cropped) # - Test cropping each side by a random integer of pixels - # rand_int = np.random.randint(1, min(mask.shape) - 1) @@ -3022,10 +3038,10 @@ def test_crop(self, mask: gu.Mask) -> None: # Test inplace mask_orig = mask.copy() - mask.crop(crop_geom2, inplace=True) - assert list(mask.bounds) == crop_geom2 - assert np.array_equal(mask_orig.data[rand_int:, :].data, mask.data, equal_nan=True) - assert np.array_equal(mask_orig.data[rand_int:, :].mask, mask.data.mask) + mask_orig.crop(crop_geom2, inplace=True) + assert list(mask_orig.bounds) == crop_geom2 + assert np.array_equal(mask.data[rand_int:, :].data, mask_orig.data, equal_nan=True) + assert np.array_equal(mask.data[rand_int:, :].mask, mask_orig.data.mask) # Run with match_extent, check that inplace or not yields the same result @@ -3040,6 +3056,8 @@ def test_crop(self, mask: gu.Mask) -> None: def test_polygonize(self, mask: gu.Mask) -> None: # Run default vect = mask.polygonize() + # Check the dtype of the original mask was properly reconverted + assert mask.data.dtype == bool # Check the output is cast into a vector assert isinstance(vect, gu.Vector) @@ -3056,6 +3074,8 @@ def test_polygonize(self, mask: gu.Mask) -> None: def test_proximity(self, mask: gu.Mask) -> None: # Run default rast = mask.proximity() + # Check the dtype of the original mask was properly reconverted + assert mask.data.dtype == bool # Check that output is cast back into a raster assert isinstance(rast, gu.Raster) @@ -3072,8 +3092,19 @@ def test_save(self, mask: gu.Mask) -> None: # Save file to temporary file, with defaults opts temp_file = os.path.join(temp_dir.name, "test.tif") mask.save(temp_file) - saved = gu.Raster(temp_file) - assert mask.astype("uint8").raster_equal(saved) + saved = gu.Mask(temp_file) + + # TODO: Generalize raster_equal for masks? + + # Check all attributes are equal + assert all( + [ + np.ma.allequal(saved.data, mask.data), + saved.transform == mask.transform, + saved.crs == mask.crs, + saved.nodata == mask.nodata, + ] + ) # Clean up temporary folder - fails on Windows try: @@ -3797,6 +3828,9 @@ def test_array_ufunc_1nin_1nout(self, ufunc_str: str, nodata_init: None | str, d nodata: int | None = _default_nodata(dtype) else: nodata = None + warnings.filterwarnings( + "ignore", category=UserWarning, message="Setting default nodata -99999 to mask non-finite values*" + ) # Create Raster ma1 = np.ma.masked_array(data=self.arr1.astype(dtype), mask=self.mask1) @@ -3900,7 +3934,9 @@ def test_array_ufunc_2nin_1nout( # Catch warnings with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=RuntimeWarning) - warnings.filterwarnings("ignore", category=UserWarning) + warnings.filterwarnings( + "ignore", category=UserWarning, message="Setting default nodata -99999 to mask non-finite values*" + ) # Check if both our input dtypes are possible on this ufunc, if yes check that outputs are identical if com_dtype_tuple in [(np.dtype(t[0]), np.dtype(t[1])) for t in ufunc.types]: # noqa diff --git a/tests/test_vector.py b/tests/test_vector.py index 51b18ab98..bc7be27dd 100644 --- a/tests/test_vector.py +++ b/tests/test_vector.py @@ -486,7 +486,7 @@ def test_buffer_metric(self) -> None: assert two_squares_geographic_buffered_reproj.ds.area.values[0] == pytest.approx(expected_area, abs=0.01) # And this time, it is the reprojected GeoDataFrame that should almost match (within a tolerance of 10e-06) - assert all(direct_gpd_buffer.ds.geom_almost_equals(two_squares_geographic_buffered_reproj.ds)) + assert all(direct_gpd_buffer.ds.geom_equals_exact(two_squares_geographic_buffered_reproj.ds, tolerance=10e-6)) def test_buffer_without_overlap(self, monkeypatch) -> None: # type: ignore """ @@ -499,7 +499,9 @@ def test_buffer_without_overlap(self, monkeypatch) -> None: # type: ignore # Check with buffers that should not overlap # ------------------------------------------ buffer_size = 2 - buffer = two_squares.buffer_without_overlap(buffer_size, metric=False) + # We force metric = False, so buffer should raise a GeoPandas warning + with pytest.warns(UserWarning, match="Geometry is in a geographic CRS.*"): + buffer = two_squares.buffer_without_overlap(buffer_size, metric=False) # Output should be of same size as input and same geometry type assert len(buffer.ds) == len(two_squares.ds) @@ -527,7 +529,9 @@ def test_buffer_without_overlap(self, monkeypatch) -> None: # type: ignore # Case 2 - Check with buffers that overlap -> this case is actually not the expected result ! # ------------------------------- buffer_size = 5 - buffer = two_squares.buffer_without_overlap(buffer_size, metric=False) + # We force metric = False, so buffer should raise a GeoPandas warning + with pytest.warns(UserWarning, match="Geometry is in a geographic CRS.*"): + buffer = two_squares.buffer_without_overlap(buffer_size, metric=False) # Output should be of same size as input and same geometry type assert len(buffer.ds) == len(two_squares.ds) @@ -557,6 +561,10 @@ def test_buffer_without_overlap(self, monkeypatch) -> None: # type: ignore two_squares.buffer_without_overlap(buffer_size, plot=True) +class NeedToImplementWarning(FutureWarning): + """Warning to remember to implement new GeoPandas methods""" + + class TestGeoPandasMethods: # Use two synthetic vectors poly = Polygon([(10, 10), (11, 10), (11, 11), (10, 11)]) @@ -711,7 +719,9 @@ def test_geopandas_coverage(self) -> None: list_missing = [method for method in covered_methods if method not in self.all_declared] if len(list_missing) != 0: - warnings.warn(f"New GeoPandas methods are not implemented in GeoUtils: {list_missing}") + warnings.warn( + f"New GeoPandas methods are not implemented in GeoUtils: {list_missing}", NeedToImplementWarning + ) @pytest.mark.parametrize("method", nongeo_methods + geo_methods) # type: ignore def test_overridden_funcs_args(self, method: str) -> None: @@ -731,18 +741,18 @@ def test_overridden_funcs_args(self, method: str) -> None: # Check that all positional arguments are the same if argspec_upstream.args != argspec_geoutils.args: - warnings.warn("Argument of GeoPandas method not consistent in GeoUtils.") + warnings.warn("Argument of GeoPandas method not consistent in GeoUtils.", NeedToImplementWarning) # Check that the *args and **kwargs argument are declared consistently if argspec_upstream.varargs != argspec_geoutils.varargs: - warnings.warn("Argument of GeoPandas method not consistent in GeoUtils.") + warnings.warn("Argument of GeoPandas method not consistent in GeoUtils.", NeedToImplementWarning) if argspec_upstream.varkw != argspec_geoutils.varkw: - warnings.warn("Argument of GeoPandas method not consistent in GeoUtils.") + warnings.warn("Argument of GeoPandas method not consistent in GeoUtils.", NeedToImplementWarning) # Check that default argument values are the same if argspec_upstream.defaults != argspec_geoutils.defaults: - warnings.warn("Default argument of GeoPandas method not consistent in GeoUtils.") + warnings.warn("Default argument of GeoPandas method not consistent in GeoUtils.", NeedToImplementWarning) @pytest.mark.parametrize("vector", [synthvec1, synthvec2, realvec1, realvec2]) # type: ignore @pytest.mark.parametrize("method", nongeo_properties) # type: ignore