Skip to content

Commit

Permalink
update 0.0.1-rc5
Browse files Browse the repository at this point in the history
setup.py update
tests and data
  • Loading branch information
nicolasK committed Nov 2, 2023
1 parent e98c410 commit 4748c49
Show file tree
Hide file tree
Showing 16 changed files with 141 additions and 74 deletions.
4 changes: 3 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).


## [0.0.1-rc5] 2023-11-02
## [0.0.1-rc5]

### Added

- `common_band_names` default set to True in datacube creation. It uses the new Assets Mapper in order to define to best suitable bands according to user needs.
- `clear_cover` in the datacube method when using the `earthdatastore.Auth` method.
- `datasets.load_pivot()` to load a GeoDataFrame of a pivot in Nebraska (alternates between corn or soy between years).
- Several tests to check and validate code.

### Changed

Expand Down
2 changes: 1 addition & 1 deletion earthdaily/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from earthdaily import earthdatastore
from . import earthdatastore, datasets

__version__ = "0.0.1-rc5"
29 changes: 29 additions & 0 deletions earthdaily/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import os
import geopandas as gpd

__pathFile = os.path.dirname(os.path.realpath(__file__))


def load_pivot(to_wkt: bool = False, to_geojson: bool = False):
"""
A pivot located in Nebraska.
Parameters
----------
to_wkt : BOOL, optional
Returns the pivot as a wkt. The default is False.
to_geojson : BOOL, optional
Returns the pivot as a geojson. The default is False.
Returns
-------
pivot : str, GeoDataFrame
DESCRIPTION.
"""
pivot = gpd.read_file(os.path.join(__pathFile, f"data{os.path.sep}pivot.geojson"))
if to_wkt:
pivot = pivot.to_wkt()["geometry"].iloc[0]
if to_geojson:
pivot = pivot.to_json()
return pivot
Empty file.
File renamed without changes.
5 changes: 3 additions & 2 deletions earthdaily/earthdatastore/cube_utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def _cube_odc(items_collection, assets=None, times=None, **kwargs):
if "resampling" in kwargs:
if isinstance(kwargs["resampling"], int):
kwargs["resampling"] = Resampling(kwargs["resampling"]).name
chunks = kwargs.get("chunks", dict(x="256", y="256", time="auto"))
chunks = kwargs.get("chunks", dict(x="auto", y="auto", time="auto"))
kwargs.pop("chunks", None)

ds = stac.load(
Expand All @@ -55,6 +55,7 @@ def _cube_odc(items_collection, assets=None, times=None, **kwargs):
groupby=None,
**kwargs,
)
ds = ds.chunk(dict(x=256, y=256))

return ds

Expand Down Expand Up @@ -110,7 +111,7 @@ def datacube(
)
if common_band_names and not isinstance(assets, dict):
aM = AssetMapper()
assets = aM.map_collection_bands(items_collection[0].collection_id, assets)
assets = aM.map_collection_assets(items_collection[0].collection_id, assets)

if isinstance(assets, dict):
assets_keys = list(assets.keys())
Expand Down
35 changes: 19 additions & 16 deletions earthdaily/earthdatastore/cube_utils/asset_mapper/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,31 +19,34 @@ def _collection_exists(self, collection, raise_warning=False):
)
return exists

def map_collection_bands(self, collection, bands):
if isinstance(bands, (dict | None)):
return bands
def collection_spectral_assets(self, collection):
return self.collection_mapping(collection)

def map_collection_assets(self, collection, assets):
if isinstance(assets, (dict | None)):
return assets
if not self._collection_exists(collection):
return bands
return assets

# HANDLE LIST TO DICT CONVERSION
if isinstance(bands, list):
bands = {band: band for band in bands}
if isinstance(assets, list):
assets = {asset: asset for asset in assets}

output_bands = {}
output_assets = {}

config = self.collection_mapping(collection)

# Try to map each band
for band in bands:
if band in config[0]:
output_bands[config[0][band]] = band
# No band found with specified key (common band name)
# Try to map each asset
for asset in assets:
if asset in config[0]:
output_assets[config[0][asset]] = asset
# No asset found with specified key (common asset name)
else:
# Looking for band matching the specified value (asset name)
# Looking for asset matching the specified value (asset name)
matching_assets = [
key for key, value in config[0].items() if value == band
key for key, value in config[0].items() if value == asset
]

if matching_assets:
output_bands[band] = band
return output_bands
output_assets[asset] = asset
return output_assets
4 changes: 2 additions & 2 deletions examples/compare_scale_s2.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
# Import librairies
# -------------------------------------------

from earthdaily import earthdatastore
from earthdaily import earthdatastore, datasets
import geopandas as gpd
from matplotlib import pyplot as plt

Expand All @@ -17,7 +17,7 @@
# -------------------------------------------

# load geojson
pivot = gpd.read_file("pivot.geojson")
pivot = datasets.load_pivot()

##############################################################################
# Init earthdatastore with env params
Expand Down
8 changes: 0 additions & 8 deletions examples/county_steel.geojson

This file was deleted.

4 changes: 2 additions & 2 deletions examples/field_evolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
# Import librairies
# -------------------------------------------

from earthdaily import earthdatastore
from earthdaily import earthdatastore, datasets
import geopandas as gpd
from matplotlib import pyplot as plt

Expand All @@ -17,7 +17,7 @@
# -------------------------------------------

# load geojson
pivot = gpd.read_file("pivot.geojson")
pivot = datasets.load_pivot()

##############################################################################
# Init earthdatastore with env params
Expand Down
4 changes: 2 additions & 2 deletions examples/first_steps_create_datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,14 @@
# Import librairies
# -------------------------------------------

from earthdaily import earthdatastore
from earthdaily import earthdatastore, datasets
import geopandas as gpd
from matplotlib import pyplot as plt

##########################
# Loading geometry

geometry = gpd.read_file("pivot.geojson")
geometry = datasets.load_pivot()

##########################
# Init earthdaily and check available assets
Expand Down
4 changes: 2 additions & 2 deletions examples/multisensors_cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,14 @@
from matplotlib import pyplot as plt
from rasterio.enums import Resampling

from earthdaily import earthdatastore
from earthdaily import earthdatastore, datasets

##############################################################################
# Import librairies
# -------------------------------------------

eds = earthdatastore.Auth()
polygon = gpd.read_file("pivot.geojson")
polygon = datasets.load_pivot()
# 500x500m
polygon.geometry = (
polygon.geometry.to_crs(epsg=3857).centroid.buffer(500).to_crs(epsg=4326)
Expand Down
8 changes: 4 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

setup(
name="earthdaily",
packages=find_packages(),
packages=['earthdaily'],
version=version,
description="earthdaily: easy authentication, search and retrieval of Earth Data Store collections data",
author="EarthDaily Agro",
Expand All @@ -23,24 +23,24 @@
"matplotlib",
"joblib",
"psutil",
"xarray",
"pandas",
"geopandas",
"rasterio",
"pystac-client",
"pystac",
"requests",
"xarray",
"rioxarray",
"h5netcdf ",
"netcdf4",
"pystac",
"stackstac",
"odc-stac",
"tqdm",
"python-dotenv",
"rich",
"python-dotenv",
],
include_package_data=True,
package_data={"":['*.geojson']},
license="MIT",
zip_safe=False,
keywords=["Earth Data Store", "earthdaily", "earthdailyagro", "stac"],
Expand Down
8 changes: 4 additions & 4 deletions tests/test_assetmapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,21 @@ def setUp(self):
def test_unknow_collection(self):
collection = "earthdaily-unknow-collection"
assets = ["blue", "green", "red", "lambda"]
self.assertEqual(self.aM.map_collection_bands(collection, assets), assets)
self.assertEqual(self.aM.map_collection_assets(collection, assets), assets)
with self.assertRaises(NotImplementedError):
self.aM._collection_exists(collection, raise_warning=True)

def test_return_same_dict(self):
collection = "sentinel-2-l2a"
assets = {"key": "value", "source": "target", "sensorasset": "myoutputband"}
self.assertEqual(self.aM.map_collection_bands(collection, assets), assets)
self.assertEqual(self.aM.map_collection_assets(collection, assets), assets)

def test_sentinel2(self):
collection = "sentinel-2-l2a"
assets = ["blue", "green", "red", "rededge74", "missing_band"]
assets_s2 = ["blue", "green", "red", "rededge2"]
self.assertEqual(
list(self.aM.map_collection_bands(collection, assets).keys()), assets_s2
list(self.aM.map_collection_assets(collection, assets).keys()), assets_s2
)

def test_venus_rededge(self):
Expand All @@ -36,7 +36,7 @@ def test_venus_rededge(self):
}

self.assertEqual(
list(self.aM.map_collection_bands(collection, rededges.keys()).keys()),
list(self.aM.map_collection_assets(collection, rededges.keys()).keys()),
list(rededges.values()),
)

Expand Down
71 changes: 51 additions & 20 deletions tests/test_simple_datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,9 @@
class TestEarthDataStore(unittest.TestCase):
def setUp(self):
self.eds = earthdaily.earthdatastore.Auth()
self.pivot = earthdaily.datasets.load_pivot()

def test_venus(self):
def test_rescale_on_venus(self):
collection = "venus-l2a"
theia_location = "MEAD"
max_cloud_cover = 20
Expand All @@ -16,27 +17,57 @@ def test_venus(self):
"theia:location": {"eq": theia_location},
"eo:cloud_cover": {"lt": max_cloud_cover},
}

items = self.eds.search(collection, query=query, max_items=1)
crs = items[0].properties['proj:epsg']
gsd = items[0].properties['gsd']


datacube = self.eds.datacube(collection, assets=['image_file_SRE_B3'], search_kwargs=dict(query=query, max_items=1),resolution=gsd,crs=crs)

self.assertEqual(datacube.rio.width,9374)
self.assertEqual(datacube.rio.height,10161)
self.assertEqual(datacube.time.size,1)
blue = datacube['image_file_SRE_B3'].isel(x=5000,y=5000,time=0).data.compute()
self.assertEqual(blue,0.028999999999999998)



items = self.eds.search(collection, query=query, max_items=1)
crs = items[0].properties["proj:epsg"]
gsd = items[0].properties["gsd"]

bands_info = (
items[0].assets["image_file_SRE_B3"].extra_fields["raster:bands"][0]
)
scale, offset = bands_info["scale"], bands_info["offset"]
for rescale in True, False:
datacube = self.eds.datacube(
collection,
assets=["image_file_SRE_B3"],
rescale=rescale,
search_kwargs=dict(query=query, max_items=1),
resolution=gsd,
crs=crs,
)

# self.assertEqual(datacube.rio.width,9374)
# self.assertEqual(datacube.rio.height,10161)
self.assertEqual(datacube.time.size, 1)
blue = (
datacube["image_file_SRE_B3"]
.isel(x=4000, y=4000, time=0)
.data.compute()
)
if rescale is False:
blue = blue * scale + offset
self.assertEqual(blue, 0.136)

def test_sentinel1(self):
# TODO : implement s1
collection = "sentinel-1-rtc"

# datacube = self.eds.datacube(collection, bbox=bbox)
datacube = self.eds.datacube(
collection,
assets=["vh", "vv"],
intersects=self.pivot,
datetime="2022-01",
)
self.assertEqual(list(datacube.data_vars.keys()), ["vh", "vv"])

def test_sentinel2(self):
collection = "sentinel-2-l2a"
datacube = self.eds.datacube(
collection,
assets=["blue", "green", "red"],
intersects=self.pivot,
datetime="2023-07-01",
)
self.assertEqual(list(datacube.data_vars.keys()), ["blue", "green", "red"])



if __name__ == "__main__":
unittest.main()
Loading

0 comments on commit 4748c49

Please sign in to comment.