Skip to content

Commit

Permalink
prep v0.0.1
Browse files Browse the repository at this point in the history
  • Loading branch information
nicolasK committed Nov 10, 2023
1 parent d2c15df commit 5985f90
Show file tree
Hide file tree
Showing 8 changed files with 33 additions and 90 deletions.
11 changes: 6 additions & 5 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,23 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- `intersects` argument in search/datacube now supports wkt/geojson/geopandas
- `common_band_names` default set to True in datacube creation. It uses the new Assets Mapper in order to define to best suitable bands according to user needs.
- `clear_cover` in the datacube method when using the `earthdatastore.Auth` method.
- `clear_cover` argument in the datacube method when using the `earthdatastore.Auth` method.
- `datasets.load_pivot()` to load a GeoDataFrame of a pivot in Nebraska (alternates between corn or soy between years).
- `preload_mask` in authenticated datacube method set to `True`by default to load, if enough virtual memory.
- Several tests to check and validate code.
- Better performances for cloud mask statistics by checking data type (still not dask friendly)
- Better performances for cloud mask statistics by checking data type.

### Changed

- masks statistics are not anymore suffixed with the cloudmask type : `clear_percent`and `clear_pixels`. Warns with a DeprecationWarning.
- masks statistics are not anymore suffixed with the cloudmask type : `clear_percent` and `clear_pixels`. Warns with a DeprecationWarning.
- all queries in `post_query` must return True to keep the item. If a key doesn't exist, considers the result as False (instead of failling).
- default chunks are now `x=512` and `y=512`for odc-stac
- default chunks are now `x=512` and `y=512` for odc-stac.

### Fixed

- search `post_query` do not block if some properties are not available on all items.
- some scale/offsets were not supported due to missing scale/offsets from previous assets.
- typo in landsat `qa_pixel`
- issues when computing datacube using Landsat cloudmask (`qa_pixel`).
- `intersects` now supports several geometries and don't force selection on the first index.


Expand Down
22 changes: 14 additions & 8 deletions earthdaily/earthdatastore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import operator
import os
import warnings

import psutil
import geopandas as gpd
import pandas as pd
import requests
Expand Down Expand Up @@ -377,15 +377,16 @@ def datacube(
collections: str | list,
datetime=None,
assets: None | list | dict = None,
intersects: gpd.GeoDataFrame = None,
intersects: (gpd.GeoDataFrame, str, dict) = None,
bbox=None,
mask_with: None | str = None,
mask_with: (None, str) = None,
mask_statistics: bool | int = False,
clear_cover: (int, float) = None,
prefer_alternate: (str, False) = "download",
search_kwargs: dict = {},
add_default_scale_factor: bool = True,
common_band_names=True,
preload_mask=True,
**kwargs,
) -> xr.Dataset:
if mask_with and common_band_names:
Expand Down Expand Up @@ -450,11 +451,13 @@ def datacube(
groupby_date="max",
epsg=xr_datacube.rio.crs.to_epsg(),
resolution=xr_datacube.rio.resolution()[0],
prefer_alternate="download",
)
prefer_alternate="download"
)
xr_datacube["time"] = xr_datacube.time.astype("M8[s]")
acm_datacube["time"] = acm_datacube.time.astype("M8[s]")

acm_datacube = cube_utils._match_xy_dims(acm_datacube, xr_datacube)
if preload_mask and psutil.virtual_memory().available > acm_datacube.nbytes:
acm_datacube = acm_datacube.load()
mask_kwargs.update(acm_datacube=acm_datacube)
else:
mask_assets = mask._native_mask_asset_mapping[collections]
Expand All @@ -471,8 +474,11 @@ def datacube(
bbox=bbox,
assets=[mask_assets],
resampling=0,
**kwargs,
)
**kwargs
)
clouds_datacube = cube_utils._match_xy_dims(clouds_datacube, xr_datacube)
if preload_mask and psutil.virtual_memory().available > clouds_datacube.nbytes:
clouds_datacube = clouds_datacube.load()
xr_datacube = xr.merge(
(xr_datacube, clouds_datacube), compat="override"
)
Expand Down
9 changes: 7 additions & 2 deletions earthdaily/earthdatastore/cube_utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,11 @@
import rioxarray as rxr
import xarray as xr

def _match_xy_dims(src,dst):
if src.dims != dst.dims:
src = src.rio.reproject_match(dst)
return src


def _bbox_to_intersects(bbox):
if isinstance(bbox, str):
Expand All @@ -35,7 +40,7 @@ def _autofix_unfrozen_coords_dtype(ds):
return ds


def _cube_odc(items_collection, assets=None, times=None, **kwargs):
def _cube_odc(items_collection, assets=None, times=None, dtype='float32', **kwargs):
from odc import stac

if "epsg" in kwargs:
Expand All @@ -50,7 +55,7 @@ def _cube_odc(items_collection, assets=None, times=None, **kwargs):
items_collection,
bands=assets,
preserve_original_order=True,
dtype="float32",
dtype=dtype,
groupby=None,
**kwargs,
)
Expand Down
3 changes: 2 additions & 1 deletion earthdaily/earthdatastore/mask/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,7 @@ def compute_clear_pixels(cloudmask_array, labels, labels_are_clouds=False):
cloudmask_array.time,
desc="Clear coverage statistics",
unit="item",
disable=True
)
)

Expand Down Expand Up @@ -223,7 +224,7 @@ def landsat_qa_pixel(self, add_mask_var=False, mask_statistics=False):

def _landsat_qa_pixel_convert(self):
for time in self._obj.time:
data = self._obj["qa_pixel"].loc[dict(time=time)].data.compute()
data = self._obj["qa_pixel"].loc[dict(time=time)].data
data_f = data.flatten()
clm = QA_PIXEL_cloud_detection(data_f[~np.isnan(data_f)])
clm = np.where(clm == 0, np.nan, clm)
Expand Down
2 changes: 1 addition & 1 deletion examples/compare_scale_s2.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def get_cube(rescale=True):
assets=["red", "green", "blue"],
mask_with="native", # same as scl
clear_cover=50, # at least 50% of the polygon must be clear
rescale=rescale )
rescale=rescale)
return pivot_cube


Expand Down
3 changes: 1 addition & 2 deletions examples/field_evolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@
assets=["red", "green", "blue"],
mask_with="ag_cloud_mask", # same as scl
# mask_statistics=True, # as you ask `clear_cover`it will force computing mask_statistics
clear_cover=100
)
clear_cover=50)

pivot_cube.clear_percent.plot.scatter(x="time")

Expand Down
69 changes: 0 additions & 69 deletions examples/field_evolution_landsat.py

This file was deleted.

4 changes: 2 additions & 2 deletions examples/venus_cube_mask.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,6 @@
)
print(venus_datacube)

venus_datacube.isel(time=slice(29, 32), x=slice(4000, 4500), y=slice(4000, 4500))[
venus_datacube.isel(time=slice(29, 31), x=slice(4000, 4500), y=slice(4000, 4500))[
["red", "green", "blue"]
].to_array(dim="band").plot.imshow(col="time", vmin=0, vmax=0.33)
].to_array(dim="band").plot.imshow(col="time", vmin=0, vmax=0.30)

0 comments on commit 5985f90

Please sign in to comment.