Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix sampling in NuthKaab and set better default values for other classes #439

Merged
merged 37 commits into from
Oct 14, 2023
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
4c52567
Use better default values and fix valid mask of Nuth and Kaab
rhugonnet Sep 28, 2023
b4383c9
Fix tests and lint
rhugonnet Sep 28, 2023
f8faf97
Fix values in example
rhugonnet Sep 28, 2023
22aad4a
Improve tests for subsampling in NuthKaab
rhugonnet Oct 9, 2023
da1813b
Check with different rounding
rhugonnet Oct 9, 2023
07fc512
Try to round c parameter as well
rhugonnet Oct 9, 2023
feb9707
Check if the inconsistency is not arising because of SciPy
rhugonnet Oct 10, 2023
8429d30
Like this?
rhugonnet Oct 10, 2023
a089fac
COMME CA
rhugonnet Oct 10, 2023
3c7d80f
Bon allez
rhugonnet Oct 10, 2023
3a66610
Like this
rhugonnet Oct 10, 2023
c5c6ad1
Last try
rhugonnet Oct 10, 2023
747156f
Close to giving up
rhugonnet Oct 10, 2023
2629297
Next try
rhugonnet Oct 10, 2023
b69a84b
Next
rhugonnet Oct 10, 2023
e0e529c
Next
rhugonnet Oct 10, 2023
b7cc601
Try this
rhugonnet Oct 10, 2023
69a04f7
Like this?
rhugonnet Oct 10, 2023
8ffc7a2
ça va marcher oui
rhugonnet Oct 10, 2023
a05c8c3
This
rhugonnet Oct 10, 2023
dcad234
Like this
rhugonnet Oct 10, 2023
1deed31
Change get yml script
rhugonnet Oct 10, 2023
fe5d0f6
Like this?
rhugonnet Oct 13, 2023
78d7278
Again
rhugonnet Oct 13, 2023
1a21a17
Test CI like this
rhugonnet Oct 13, 2023
3b121dc
Test
rhugonnet Oct 13, 2023
a373c24
Fix
rhugonnet Oct 13, 2023
bda6f61
Now?
rhugonnet Oct 13, 2023
c059897
Fix
rhugonnet Oct 13, 2023
507609d
Fixing version writing in env file
rhugonnet Oct 13, 2023
c8a0694
Add future annotations
rhugonnet Oct 13, 2023
5439fdb
Update python first
rhugonnet Oct 13, 2023
b3ead98
Try like this
rhugonnet Oct 14, 2023
c6cac4e
Linting
rhugonnet Oct 14, 2023
09127fa
Fix last tests
rhugonnet Oct 14, 2023
01e3223
Linting
rhugonnet Oct 14, 2023
b200c47
Unskip test
rhugonnet Oct 14, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions tests/test_coreg/test_affine.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,9 +142,8 @@ def test_coreg_example(self, verbose: bool = False) -> None:
nuth_kaab.fit(self.ref, self.tba, inlier_mask=self.inlier_mask, verbose=verbose, random_state=42)

# Check the output metadata is always the same
assert nuth_kaab._meta["offset_east_px"] == pytest.approx(-0.45061858808956284)
assert nuth_kaab._meta["offset_north_px"] == pytest.approx(-0.14225262689582596)
assert nuth_kaab._meta["vshift"] == pytest.approx(-1.987523471566405)
shifts = (nuth_kaab._meta["offset_east_px"], nuth_kaab._meta["offset_north_px"], nuth_kaab._meta["vshift"])
assert shifts == pytest.approx((-0.4648318628843316, -0.13376227526850593, -1.9860305501224076))

def test_gradientdescending(self, subsample: int = 10000, inlier_mask: bool = True, verbose: bool = False) -> None:
"""
Expand Down
10 changes: 5 additions & 5 deletions tests/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ class TestExamples:
ddem,
np.array(
[
-0.22833252,
-0.82458496,
0.18843079,
1.0004578,
-5.755249,
0.0028076172,
-0.7043457,
0.14201355,
1.1181335,
-5.935547,
],
dtype=np.float32,
),
Expand Down
5 changes: 2 additions & 3 deletions tests/test_spatialstats.py
Original file line number Diff line number Diff line change
Expand Up @@ -494,7 +494,7 @@ def test_sample_multirange_variogram_default(self) -> None:

# Check the variogram output is consistent for a random state
df = xdem.spatialstats.sample_empirical_variogram(values=self.diff, subsample=10, random_state=42)
assert df["exp"][15] == pytest.approx(5.046060943603516, abs=1e-3)
assert df["exp"][15] == pytest.approx(5.11900520324707, abs=1e-3)
assert df["lags"][15] == pytest.approx(5120)
assert df["count"][15] == 5
# With a single run, no error can be estimated
Expand Down Expand Up @@ -1286,7 +1286,7 @@ def test_patches_method_loop_quadrant(self) -> None:
assert all(df.columns == ["nmad", "nb_indep_patches", "exact_areas", "areas"])

# Check the sampling is fixed for a random state
assert df["nmad"][0] == pytest.approx(1.8530521253631773, abs=1e-3)
assert df["nmad"][0] == pytest.approx(1.8401465163449207, abs=1e-3)
assert df["nb_indep_patches"][0] == 100
assert df["exact_areas"][0] == pytest.approx(df["areas"][0], rel=0.2)

Expand All @@ -1295,7 +1295,6 @@ def test_patches_method_loop_quadrant(self) -> None:

# Check the sampling is always fixed for a random state
assert df_full["tile"].values[0] == "8_16"
assert df_full["nanmean"].values[0] == pytest.approx(0.3085488928369729, abs=1e-3)

# Check that all counts respect the default minimum percentage of 80% valid pixels
assert all(df_full["count"].values > 0.8 * np.max(df_full["count"].values))
Expand Down
10 changes: 6 additions & 4 deletions xdem/coreg/affine.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,12 +724,14 @@ def _fit_func(
if verbose:
print(" Calculate slope and aspect")

valid_mask = np.logical_and.reduce((inlier_mask, np.isfinite(ref_dem), np.isfinite(tba_dem)))
slope_tan, aspect = _calculate_slope_and_aspect_nuthkaab(ref_dem)

valid_mask = np.logical_and.reduce(
(inlier_mask, np.isfinite(ref_dem), np.isfinite(tba_dem), np.isfinite(slope_tan))
)
subsample_mask = self._get_subsample_on_valid_mask(valid_mask=valid_mask)
# TODO: Make this consistent with other subsampling once NK is updated (work on vector, not 2D with NaN)
ref_dem[~subsample_mask] = np.nan

slope_tan, aspect = _calculate_slope_and_aspect_nuthkaab(ref_dem)
ref_dem[~subsample_mask] = np.nan

# Make index grids for the east and north dimensions
east_grid = np.arange(ref_dem.shape[1])
Expand Down
4 changes: 2 additions & 2 deletions xdem/coreg/biascorr.py
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,7 @@ def __init__(
fit_or_bin: Literal["bin_and_fit"] | Literal["fit"] | Literal["bin"] = "bin_and_fit",
fit_func: Callable[..., NDArrayf] | Literal["norder_polynomial"] | Literal["nfreq_sumsin"] = "nfreq_sumsin",
fit_optimizer: Callable[..., tuple[NDArrayf, Any]] = scipy.optimize.curve_fit,
bin_sizes: int | dict[str, int | Iterable[float]] = 10,
bin_sizes: int | dict[str, int | Iterable[float]] = 100,
bin_statistic: Callable[[NDArrayf], np.floating[Any]] = np.nanmedian,
bin_apply_method: Literal["linear"] | Literal["per_bin"] = "linear",
subsample: float | int = 1.0,
Expand Down Expand Up @@ -810,7 +810,7 @@ def __init__(
bin_sizes: int | dict[str, int | Iterable[float]] = 10,
bin_statistic: Callable[[NDArrayf], np.floating[Any]] = np.nanmedian,
bin_apply_method: Literal["linear"] | Literal["per_bin"] = "linear",
subsample: float | int = 1.0,
subsample: float | int = 5e5,
):
"""
Instantiate a directional bias correction.
Expand Down