Skip to content

Commit

Permalink
Merge branch '418-normalisation-du-risk-par-intervalle-global-externe…
Browse files Browse the repository at this point in the history
…' into 'release'

Resolve "Normalisation du risk par intervalle global externe"

See merge request 3d/PandoraBox/pandora!361
  • Loading branch information
lecontm committed Jul 31, 2024
2 parents 4db6bc8 + a6005a0 commit 1fd5eb7
Show file tree
Hide file tree
Showing 5 changed files with 75 additions and 32 deletions.
22 changes: 2 additions & 20 deletions pandora/cost_volume_confidence/ambiguity.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,14 @@ def confidence_prediction(
# If activated, ambiguity normalization with percentile
if self._normalization:
if "global_disparity" in img_left.attrs:
ambiguity = self.normalize_with_extremum(ambiguity, img_left)
ambiguity = self.normalize_with_extremum(ambiguity, img_left, nbr_etas=self._nbr_etas)
logging.info(
"You are not using ambiguity normalization by percentile; \n"
"you are in a specific case with the instantiation of global_disparity."
)
# in case of cross correlation
elif "global_disparity" in img_right.attrs:
ambiguity = self.normalize_with_extremum(ambiguity, img_right)
ambiguity = self.normalize_with_extremum(ambiguity, img_right, nbr_etas=self._nbr_etas)
else:
ambiguity = self.normalize_with_percentile(ambiguity)

Expand Down Expand Up @@ -179,24 +179,6 @@ def normalize_with_percentile(self, ambiguity: np.ndarray) -> np.ndarray:

return (norm_amb - np.min(norm_amb)) / (np.max(norm_amb) - np.min(norm_amb))

def normalize_with_extremum(self, ambiguity: np.ndarray, dataset: xr.Dataset) -> np.ndarray:
"""
Normalize ambiguity with extremum
:param ambiguity: ambiguity
:type ambiguity: 2D np.ndarray (row, col) dtype = float32
:param dataset: Dataset image
:tye dataset: xarray.Dataset
:return: the normalized ambiguity
:rtype: 2D np.ndarray (row, col) dtype = float32
"""
norm_amb = np.copy(ambiguity)
global_disp_max = dataset.attrs["global_disparity"][1]
global_disp_min = dataset.attrs["global_disparity"][0]
max_norm = (global_disp_max - global_disp_min) * self._nbr_etas

return norm_amb / max_norm

@staticmethod
@njit(
"f4[:, :](f4[:, :, :], f8[:], i8, i8[:, :, :],f4[:])",
Expand Down
21 changes: 21 additions & 0 deletions pandora/cost_volume_confidence/cost_volume_confidence.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,27 @@ def confidence_prediction(
:return: None
"""

@staticmethod
def normalize_with_extremum(confidence: np.ndarray, dataset: xr.Dataset, nbr_etas: int) -> np.ndarray:
"""
Normalize ambiguity with extremum
:param confidence: confidence
:type confidence: 2D np.ndarray (row, col) dtype = float32
:param dataset: Dataset image
:tye dataset: xarray.Dataset
:param nbr_etas: size of etas
:type nbr_etas: int
:return: the normalized confidence
:rtype: 2D np.ndarray (row, col) dtype = float32
"""
norm_confidence = np.copy(confidence)
global_disp_max = dataset.attrs["global_disparity"][1]
global_disp_min = dataset.attrs["global_disparity"][0]
max_norm = (global_disp_max - global_disp_min) * nbr_etas

return norm_confidence / max_norm

@staticmethod
def allocate_confidence_map(
name_confidence_measure: str,
Expand Down
21 changes: 16 additions & 5 deletions pandora/cost_volume_confidence/risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"""
This module contains functions for estimating the risk.
"""

import logging
import os
import warnings
from typing import Dict, Tuple, Union
Expand Down Expand Up @@ -141,6 +141,16 @@ def confidence_prediction(
_, sampled_ambiguity = ambiguity_.compute_ambiguity_and_sampled_ambiguity( # type: ignore
cv["cost_volume"].data, self._etas, self._nbr_etas, grids, disparity_range
)

if "global_disparity" in img_left.attrs:
sampled_ambiguity = self.normalize_with_extremum(sampled_ambiguity, img_left, self._nbr_etas)
logging.info(
"You are using normalization by \n a specific case with the instantiation of global_disparity"
)
# in case of cross correlation
elif "global_disparity" in img_right.attrs:
sampled_ambiguity = self.normalize_with_extremum(sampled_ambiguity, img_right, self._nbr_etas)

# Computes risk using numba in parallel for memory and computation time optimization
risk_max, risk_min = self.compute_risk(
cv["cost_volume"].data,
Expand Down Expand Up @@ -215,13 +225,14 @@ def compute_risk(
# Normalized cost volume for one point
normalized_cv = (cv[row, col, :] - min_cost) / (max_cost - min_cost)
# Mask nan to -inf to later discard values out of [min; min + eta]

idx_disp_min = np.searchsorted(disparity_range, grids[0][row, col])
idx_disp_max = np.searchsorted(disparity_range, grids[1][row, col]) + 1

normalized_cv[idx_disp_min:idx_disp_max][
np.isnan(normalized_cv[idx_disp_min:idx_disp_max])
] = -np.inf
nan_in_normalized_cv = np.isnan(normalized_cv)

normalized_cv[idx_disp_min:idx_disp_max][nan_in_normalized_cv[idx_disp_min:idx_disp_max]] = -np.inf
normalized_cv[:idx_disp_min][nan_in_normalized_cv[:idx_disp_min]] = np.inf
normalized_cv[idx_disp_max:][nan_in_normalized_cv[idx_disp_max:]] = np.inf

normalized_cv = np.repeat(normalized_cv, nbr_etas)
# Initialize all disparities
Expand Down
2 changes: 1 addition & 1 deletion tests/test_confidence/test_ambiguity.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ def test_normalize_with_extremum(create_img_for_confidence):
ambiguity = np.ones((4, 4))

# normalize_with_extremum function to test
amb_test = ambiguity_.normalize_with_extremum(ambiguity, left_im)
amb_test = ambiguity_.normalize_with_extremum(ambiguity, left_im, ambiguity_._nbr_etas)

# create ground truth
nbr_etas = np.arange(0.0, 0.2, 0.1).shape[0]
Expand Down
41 changes: 35 additions & 6 deletions tests/test_confidence/test_risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import pandora.cost_volume_confidence as confidence
from pandora import matching_cost
from pandora.criteria import validity_mask
from pandora import img_tools


def test_compute_risk():
Expand Down Expand Up @@ -182,17 +183,17 @@ def test_compute_risk_with_subpix(create_images):
gt_risk_max = np.array(
[
[4.0, 3.3714285, 2.9285715, 4.0],
[3.8285713, 3.8428571, 2.3, 4.0],
[3.1857142, 1.5142857, 3.7142856, 3.5142858],
[1.657143, 3.8428571, 2.3, 4.0],
[1.1857142, 1.5142857, 3.7142856, 3.5142858],
[4.0, 3.2857144, 3.7428572, 3.942857],
],
dtype=np.float32,
)
gt_risk_min = np.array(
[
[0.8142857, 0.0, 0.0, 1.5714285],
[2.1714287, 0.3, 0.0, 1.3714286],
[2.0, 0.0, 0.8857143, 0.0],
[0.0, 0.3, 0.0, 1.3714286],
[0.0, 0.0, 0.8857143, 0.0],
[0.14285715, 0.0, 0.14285715, 0.27142859],
],
dtype=np.float32,
Expand Down Expand Up @@ -226,10 +227,10 @@ def test_compute_risk_with_variable_disparity(
risk_ = confidence.AbstractCostVolumeConfidence(**{"confidence_method": "risk", "eta_max": 0.2, "eta_step": 0.1})

gt_risk_max = np.array(
[[2.0, 1.5, 1.5, 1.0], [2.0, 1.0, 1.5, 2.0], [2.0, 2.0, 1.0, 2.0], [2.0, 1.5, 1.5, 1.0]], dtype=np.float32
[[2.0, 1.5, 1.5, 1.0], [2.0, 1.0, 1.5, 2.0], [1.0, 1.0, 0.0, 1.0], [1.0, 1.5, 1.5, 1.0]], dtype=np.float32
)
gt_risk_min = np.array(
[[0.0, 0.5, 0.5, 0.0], [1.0, 0.0, 0.5, 0.0], [1.0, 1.0, 1.0, 1.0], [1.0, 0.5, 0.5, 0.0]], dtype=np.float32
[[0.0, 0.5, 0.5, 0.0], [1.0, 0.0, 0.5, 0.0], [0.0, 0.0, 0.0, 0.0], [0.0, 0.5, 0.5, 0.0]], dtype=np.float32
)

etas = np.arange(0.0, 0.5, 0.3)
Expand Down Expand Up @@ -412,3 +413,31 @@ def test_compute_risk_and_sampled_risk_with_variable_disparity(
# Check if the calculated sampled risks are equal to the ground truth (same shape and all elements equals)
np.testing.assert_allclose(gt_sampled_risk_max, sampled_risk_max, rtol=1e-06)
np.testing.assert_allclose(gt_sampled_risk_min, sampled_risk_min, rtol=1e-06)


def test_normalize_with_extremum(create_img_for_confidence):
"""
test normalize_with_extremum function
"""

# create datas
left_im, _ = create_img_for_confidence

# Add tiles disparity
left_im.attrs["disp_min"] = 0
left_im.attrs["disp_max"] = 1

# Add global disparity
left_im = img_tools.add_global_disparity(left_im, -2, 2)

risk_ = confidence.AbstractCostVolumeConfidence(**{"confidence_method": "risk", "eta_max": 0.2, "eta_step": 0.1})
sampled_ambiguity = np.ones((4, 4))

# normalize_with_extremum function to test
sampled_ambiguity_test = risk_.normalize_with_extremum(sampled_ambiguity, left_im, risk_._nbr_etas)

# create ground truth
nbr_etas = np.arange(0.0, 0.2, 0.1).shape[0]
sampled_ambiguity_vt = np.copy(sampled_ambiguity) / ((2 - (-2)) * nbr_etas)

np.testing.assert_array_equal(sampled_ambiguity_test, sampled_ambiguity_vt)

0 comments on commit 1fd5eb7

Please sign in to comment.