Skip to content

Commit

Permalink
merge develop
Browse files Browse the repository at this point in the history
  • Loading branch information
mathleur committed Mar 4, 2024
2 parents 69250db + e7fb3b2 commit 5670e4b
Show file tree
Hide file tree
Showing 58 changed files with 232 additions and 356 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ jobs:
pip install setuptools wheel twine
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
TWINE_USERNAME: "__token__"
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
run: |
python setup.py sdist
twine upload dist/*
2 changes: 1 addition & 1 deletion docs/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
jinja2<3.1.0
jinja2>=3.1.3
Markdown<3.2
mkdocs>=1.0
20 changes: 10 additions & 10 deletions examples/requirements_examples.txt
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
-r ../requirements.txt
-r ../tests/requirements_test.txt

matplotlib==3.6.2
matplotlib-inline==0.1.6
Pillow==9.3.0
Shapely==1.8.5.post1
shp==1.0.2
Fiona==1.8.22
geopandas==0.12.2
plotly==5.11.0
pyshp==2.3.1
cfgrib==0.9.10.3
matplotlib
matplotlib-inline
Pillow
Shapely
shp
Fiona
geopandas
plotly
pyshp
cfgrib
1 change: 1 addition & 0 deletions performance/fdb_performance_3D.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def setup_method(self, method):
"date": {"transformation": {"merge": {"with": "time", "linkers": [" ", "00"]}}},
"step": {"transformation": {"type_change": "int"}},
"levelist": {"transformation": {"type_change": "int"}},
"longitude": {"transformation": {"cyclic": [0, 360]}},
}
self.config = {"class": "od", "expver": "0001", "levtype": "sfc"}
self.fdbdatacube = FDBDatacube(self.config, axis_options=self.options)
Expand Down
13 changes: 7 additions & 6 deletions polytope/datacube/backends/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ def __init__(self, axis_options=None, datacube_options=None):
self.treated_axes = []
self.nearest_search = {}
self._axes = None
self.transformed_axes = []

@abstractmethod
def get(self, requests: IndexTree) -> Any:
Expand Down Expand Up @@ -64,9 +65,7 @@ def _create_axes(self, name, values, transformation_type_key, transformation_opt

# first need to change the values so that we have right type
values = transformation.change_val_type(axis_name, values)
if self._axes is None:
DatacubeAxis.create_standard(axis_name, values, self)
elif axis_name not in self._axes.keys():
if self._axes is None or axis_name not in self._axes.keys():
DatacubeAxis.create_standard(axis_name, values, self)
# add transformation tag to axis, as well as transformation options for later
setattr(self._axes[axis_name], has_transform[transformation_type_key], True) # where has_transform is a
Expand All @@ -81,16 +80,16 @@ def _create_axes(self, name, values, transformation_type_key, transformation_opt

def _add_all_transformation_axes(self, options, name, values):
for transformation_type_key in options.keys():
if transformation_type_key != "cyclic":
self.transformed_axes.append(name)
self._create_axes(name, values, transformation_type_key, options)

def _check_and_add_axes(self, options, name, values):
if options is not None:
self._add_all_transformation_axes(options, name, values)
else:
if name not in self.blocked_axes:
if self._axes is None:
DatacubeAxis.create_standard(name, values, self)
elif name not in self._axes.keys():
if self._axes is None or name not in self._axes.keys():
DatacubeAxis.create_standard(name, values, self)

def has_index(self, path: DatacubePath, axis, index):
Expand All @@ -114,6 +113,7 @@ def get_indices(self, path: DatacubePath, axis, lower, upper, method=None):
"""
path = self.fit_path(path)
indexes = axis.find_indexes(path, self)
# TODO: this could also be handled by axis/transformations?
search_ranges = axis.remap([lower, upper])
original_search_ranges = axis.to_intervals([lower, upper])
# Find the offsets for each interval in the requested range, which we will need later
Expand All @@ -133,6 +133,7 @@ def get_indices(self, path: DatacubePath, axis, lower, upper, method=None):

def _look_up_datacube(self, search_ranges, search_ranges_offset, indexes, axis, method):
idx_between = []
# TODO: maybe this can all go inside find_indices_between for the different cyclic and other transformations
for i in range(len(search_ranges)):
r = search_ranges[i]
offset = search_ranges_offset[i]
Expand Down
23 changes: 20 additions & 3 deletions polytope/datacube/backends/xarray.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from copy import deepcopy

import numpy as np
import xarray as xr

from .datacube import Datacube, IndexTree
Expand Down Expand Up @@ -45,9 +46,25 @@ def get(self, requests: IndexTree):
path_copy = deepcopy(path)
for key in path_copy:
axis = self._axes[key]
(path, unmapped_path) = axis.unmap_to_datacube(path, unmapped_path)
# TODO: here do nearest point search
path = self.fit_path(path)
key_value_path = {key: path_copy[key]}
# (path, unmapped_path) = axis.unmap_to_datacube(path, unmapped_path)
(key_value_path, path, unmapped_path) = axis.unmap_path_key(key_value_path, path, unmapped_path)
path.update(key_value_path)
path.update(unmapped_path)

unmapped_path = {}
for key in path.keys():
if key not in self.dataarray.dims:
path.pop(key)
if key not in self.dataarray.coords.dtypes:
unmapped_path.update({key: path[key]})
path.pop(key)
for key in self.dataarray.coords.dtypes:
key_dtype = self.dataarray.coords.dtypes[key]
if key_dtype.type is np.str_ and key in path.keys():
unmapped_path.update({key: path[key]})
path.pop(key)

subxarray = self.dataarray.sel(path, method="nearest")
subxarray = subxarray.sel(unmapped_path)
value = subxarray.item()
Expand Down
13 changes: 8 additions & 5 deletions polytope/datacube/datacube_axis.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import bisect
from abc import ABC, abstractmethod
from copy import deepcopy
from typing import Any, List
Expand Down Expand Up @@ -70,10 +71,9 @@ def _remap_val_to_axis_range(self, value):
return value

def find_indices_between(self, index_ranges, low, up, datacube, method=None):
# TODO: add method for snappping
indexes_between_ranges = []
for indexes in index_ranges:
if self.name in datacube.complete_axes:
if self.name in datacube.complete_axes and self.name not in datacube.transformed_axes:
# Find the range of indexes between lower and upper
# https://pandas.pydata.org/docs/reference/api/pandas.Index.searchsorted.html
# Assumes the indexes are already sorted (could sort to be sure) and monotonically increasing
Expand All @@ -91,14 +91,16 @@ def find_indices_between(self, index_ranges, low, up, datacube, method=None):
indexes_between_ranges.append(indexes_between)
else:
if method == "surrounding" or method == "nearest":
start = indexes.index(low)
end = indexes.index(up)
start = bisect.bisect_left(indexes, low)
end = bisect.bisect_right(indexes, up)
start = max(start - 1, 0)
end = min(end + 1, len(indexes))
indexes_between = indexes[start:end]
indexes_between_ranges.append(indexes_between)
else:
indexes_between = [i for i in indexes if low <= i <= up]
lower_idx = bisect.bisect_left(indexes, low)
upper_idx = bisect.bisect_right(indexes, up)
indexes_between = indexes[lower_idx:upper_idx]
indexes_between_ranges.append(indexes_between)
return indexes_between_ranges

Expand Down Expand Up @@ -129,6 +131,7 @@ def __init__(self):
self.name = None
self.tol = 1e-12
self.range = None
# TODO: Maybe here, store transformations as a dico instead
self.transformations = []
self.type = 0

Expand Down
2 changes: 2 additions & 0 deletions polytope/datacube/transformations/datacube_cyclic/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
from .cyclic_axis_decorator import *
from .datacube_cyclic import *
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import bisect
import math
from copy import deepcopy
from typing import List
Expand Down Expand Up @@ -113,61 +112,6 @@ def remap(range: List):
ranges.append([low - cls.tol, up + cls.tol])
return ranges

old_find_indexes = cls.find_indexes

def find_indexes(path, datacube):
return old_find_indexes(path, datacube)

old_unmap_path_key = cls.unmap_path_key

def unmap_path_key(key_value_path, leaf_path, unwanted_path):
value = key_value_path[cls.name]
for transform in cls.transformations:
if isinstance(transform, DatacubeAxisCyclic):
if cls.name == transform.name:
new_val = _remap_val_to_axis_range(value)
key_value_path[cls.name] = new_val
key_value_path, leaf_path, unwanted_path = old_unmap_path_key(key_value_path, leaf_path, unwanted_path)
return (key_value_path, leaf_path, unwanted_path)

old_unmap_to_datacube = cls.unmap_to_datacube

def unmap_to_datacube(path, unmapped_path):
(path, unmapped_path) = old_unmap_to_datacube(path, unmapped_path)
return (path, unmapped_path)

old_find_indices_between = cls.find_indices_between

def find_indices_between(index_ranges, low, up, datacube, method=None):
update_range()
indexes_between_ranges = []

if method != "surrounding" or method != "nearest":
return old_find_indices_between(index_ranges, low, up, datacube, method)
else:
for indexes in index_ranges:
if cls.name in datacube.complete_axes:
start = indexes.searchsorted(low, "left")
end = indexes.searchsorted(up, "right")
else:
start = bisect.bisect_left(indexes, low)
end = bisect.bisect_right(indexes, up)

if start - 1 < 0:
index_val_found = indexes[-1:][0]
indexes_between_ranges.append([index_val_found])
if end + 1 > len(indexes):
index_val_found = indexes[:2][0]
indexes_between_ranges.append([index_val_found])
start = max(start - 1, 0)
end = min(end + 1, len(indexes))
if cls.name in datacube.complete_axes:
indexes_between = indexes[start:end].to_list()
else:
indexes_between = indexes[start:end]
indexes_between_ranges.append(indexes_between)
return indexes_between_ranges

def offset(range):
# We first unpad the range by the axis tolerance to make sure that
# we find the wanted range of the cyclic axis since we padded by the axis tolerance before.
Expand All @@ -180,10 +124,6 @@ def offset(range):
cls.to_intervals = to_intervals
cls.remap = remap
cls.offset = offset
cls.find_indexes = find_indexes
cls.unmap_to_datacube = unmap_to_datacube
cls.find_indices_between = find_indices_between
cls.unmap_path_key = unmap_path_key
cls._remap_val_to_axis_range = _remap_val_to_axis_range

return cls
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
from .datacube_mappers import *
from .mapper_axis_decorator import *
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
import bisect

from ....utility.list_tools import bisect_left_cmp, bisect_right_cmp
from .datacube_mappers import DatacubeMapper


Expand All @@ -18,35 +15,6 @@ def find_indexes(path, datacube):
first_val = path[transformation._mapped_axes()[0]]
return transformation.second_axis_vals(first_val)

old_unmap_to_datacube = cls.unmap_to_datacube

def unmap_to_datacube(path, unmapped_path):
(path, unmapped_path) = old_unmap_to_datacube(path, unmapped_path)
for transform in cls.transformations:
if isinstance(transform, DatacubeMapper):
if cls.name == transform._mapped_axes()[0]:
# if we are on the first axis, then need to add the first val to unmapped_path
first_val = path.get(cls.name, None)
path.pop(cls.name, None)
if cls.name not in unmapped_path:
# if for some reason, the unmapped_path already has the first axis val, then don't update
unmapped_path[cls.name] = first_val
if cls.name == transform._mapped_axes()[1]:
# if we are on the second axis, then the val of the first axis is stored
# inside unmapped_path so can get it from there
second_val = path.get(cls.name, None)
path.pop(cls.name, None)
first_val = unmapped_path.get(transform._mapped_axes()[0], None)
unmapped_path.pop(transform._mapped_axes()[0], None)
# if the first_val was not in the unmapped_path, then it's still in path
if first_val is None:
first_val = path.get(transform._mapped_axes()[0], None)
path.pop(transform._mapped_axes()[0], None)
if first_val is not None and second_val is not None:
unmapped_idx = transform.unmap(first_val, second_val)
unmapped_path[transform.old_axis] = unmapped_idx
return (path, unmapped_path)

old_unmap_path_key = cls.unmap_path_key

def unmap_path_key(key_value_path, leaf_path, unwanted_path):
Expand All @@ -65,44 +33,7 @@ def unmap_path_key(key_value_path, leaf_path, unwanted_path):
key_value_path[transform.old_axis] = unmapped_idx
return (key_value_path, leaf_path, unwanted_path)

def find_indices_between(index_ranges, low, up, datacube, method=None):
# TODO: add method for snappping
indexes_between_ranges = []
for transform in cls.transformations:
if isinstance(transform, DatacubeMapper):
transformation = transform
if cls.name in transformation._mapped_axes():
for idxs in index_ranges:
if method == "surrounding" or method == "nearest":
axis_reversed = transform._axis_reversed[cls.name]
if not axis_reversed:
start = bisect.bisect_left(idxs, low)
end = bisect.bisect_right(idxs, up)
else:
# TODO: do the custom bisect
end = bisect_left_cmp(idxs, low, cmp=lambda x, y: x > y) + 1
start = bisect_right_cmp(idxs, up, cmp=lambda x, y: x > y)
start = max(start - 1, 0)
end = min(end + 1, len(idxs))
indexes_between = idxs[start:end]
indexes_between_ranges.append(indexes_between)
else:
axis_reversed = transform._axis_reversed[cls.name]
if not axis_reversed:
lower_idx = bisect.bisect_left(idxs, low)
upper_idx = bisect.bisect_right(idxs, up)
indexes_between = idxs[lower_idx:upper_idx]
else:
# TODO: do the custom bisect
end_idx = bisect_left_cmp(idxs, low, cmp=lambda x, y: x > y) + 1
start_idx = bisect_right_cmp(idxs, up, cmp=lambda x, y: x > y)
indexes_between = idxs[start_idx:end_idx]
indexes_between_ranges.append(indexes_between)
return indexes_between_ranges

cls.find_indexes = find_indexes
cls.unmap_to_datacube = unmap_to_datacube
cls.find_indices_between = find_indices_between
cls.unmap_path_key = unmap_path_key

return cls
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from .healpix import *
from .local_regular import *
from .octahedral import *
from .reduced_ll import *
from .regular import *
2 changes: 2 additions & 0 deletions polytope/datacube/transformations/datacube_merger/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
from .datacube_merger import *
from .merger_axis_decorator import *
Loading

0 comments on commit 5670e4b

Please sign in to comment.