Skip to content

Commit 762b525

Browse files
Fix precision issue in TestClipIntensityPercentiles3D (Project-MONAI#7808)
Fixes Project-MONAI#7797 ### Description Ensure the same dtype when test to avoid precision issue. ### Types of changes <!--- Put an `x` in all the boxes that apply, and remove the not applicable items --> - [x] Non-breaking change (fix or new feature that would not break existing functionality). - [ ] Breaking change (fix or new feature that would cause existing functionality to change). - [ ] New tests added to cover the changes. - [ ] Integration tests passed locally by running `./runtests.sh -f -u --net --coverage`. - [ ] Quick tests passed locally by running `./runtests.sh --quick --unittests --disttests`. - [ ] In-line docstrings updated. - [ ] Documentation updated, tested `make html` command in the `docs/` folder. --------- Signed-off-by: YunLiu <55491388+KumoLiu@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 94ab632 commit 762b525

File tree

2 files changed

+67
-63
lines changed

2 files changed

+67
-63
lines changed

tests/test_clip_intensity_percentiles.py

+44-31
Original file line numberDiff line numberDiff line change
@@ -18,74 +18,92 @@
1818
from monai.transforms import ClipIntensityPercentiles
1919
from monai.transforms.utils import soft_clip
2020
from monai.transforms.utils_pytorch_numpy_unification import clip, percentile
21+
from monai.utils.type_conversion import convert_to_tensor
2122
from tests.utils import TEST_NDARRAYS, NumpyImageTestCase2D, NumpyImageTestCase3D, assert_allclose
2223

2324

25+
def test_hard_clip_func(im, lower, upper):
26+
im_t = convert_to_tensor(im)
27+
if lower is None:
28+
upper = percentile(im_t, upper)
29+
elif upper is None:
30+
lower = percentile(im_t, lower)
31+
else:
32+
lower, upper = percentile(im_t, (lower, upper))
33+
return clip(im_t, lower, upper)
34+
35+
36+
def test_soft_clip_func(im, lower, upper):
37+
im_t = convert_to_tensor(im)
38+
if lower is None:
39+
upper = percentile(im_t, upper)
40+
elif upper is None:
41+
lower = percentile(im_t, lower)
42+
else:
43+
lower, upper = percentile(im_t, (lower, upper))
44+
return soft_clip(im_t, minv=lower, maxv=upper, sharpness_factor=1.0, dtype=torch.float32)
45+
46+
2447
class TestClipIntensityPercentiles2D(NumpyImageTestCase2D):
2548

2649
@parameterized.expand([[p] for p in TEST_NDARRAYS])
2750
def test_hard_clipping_two_sided(self, p):
2851
hard_clipper = ClipIntensityPercentiles(upper=95, lower=5)
2952
im = p(self.imt)
3053
result = hard_clipper(im)
31-
lower, upper = percentile(im, (5, 95))
32-
expected = clip(im, lower, upper)
54+
expected = test_hard_clip_func(im, 5, 95)
3355
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
3456

3557
@parameterized.expand([[p] for p in TEST_NDARRAYS])
3658
def test_hard_clipping_one_sided_high(self, p):
3759
hard_clipper = ClipIntensityPercentiles(upper=95, lower=None)
3860
im = p(self.imt)
3961
result = hard_clipper(im)
40-
lower, upper = percentile(im, (0, 95))
41-
expected = clip(im, lower, upper)
62+
expected = test_hard_clip_func(im, 0, 95)
4263
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
4364

4465
@parameterized.expand([[p] for p in TEST_NDARRAYS])
4566
def test_hard_clipping_one_sided_low(self, p):
4667
hard_clipper = ClipIntensityPercentiles(upper=None, lower=5)
4768
im = p(self.imt)
4869
result = hard_clipper(im)
49-
lower, upper = percentile(im, (5, 100))
50-
expected = clip(im, lower, upper)
70+
expected = test_hard_clip_func(im, 5, 100)
5171
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
5272

5373
@parameterized.expand([[p] for p in TEST_NDARRAYS])
5474
def test_soft_clipping_two_sided(self, p):
5575
soft_clipper = ClipIntensityPercentiles(upper=95, lower=5, sharpness_factor=1.0)
5676
im = p(self.imt)
5777
result = soft_clipper(im)
58-
lower, upper = percentile(im, (5, 95))
59-
expected = soft_clip(im, sharpness_factor=1.0, minv=lower, maxv=upper, dtype=torch.float32)
60-
# the rtol is set to 1e-6 because the logaddexp function used in softplus is not stable accross torch and numpy
78+
expected = test_soft_clip_func(im, 5, 95)
79+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
6180
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
6281

6382
@parameterized.expand([[p] for p in TEST_NDARRAYS])
6483
def test_soft_clipping_one_sided_high(self, p):
6584
soft_clipper = ClipIntensityPercentiles(upper=95, lower=None, sharpness_factor=1.0)
6685
im = p(self.imt)
6786
result = soft_clipper(im)
68-
upper = percentile(im, 95)
69-
expected = soft_clip(im, sharpness_factor=1.0, minv=None, maxv=upper, dtype=torch.float32)
70-
# the rtol is set to 5e-5 because the logaddexp function used in softplus is not stable accross torch and numpy
87+
expected = test_soft_clip_func(im, None, 95)
88+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
7189
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
7290

7391
@parameterized.expand([[p] for p in TEST_NDARRAYS])
7492
def test_soft_clipping_one_sided_low(self, p):
7593
soft_clipper = ClipIntensityPercentiles(upper=None, lower=5, sharpness_factor=1.0)
7694
im = p(self.imt)
7795
result = soft_clipper(im)
78-
lower = percentile(im, 5)
79-
expected = soft_clip(im, sharpness_factor=1.0, minv=lower, maxv=None, dtype=torch.float32)
80-
# the rtol is set to 1e-6 because the logaddexp function used in softplus is not stable accross torch and numpy
96+
expected = test_soft_clip_func(im, 5, None)
97+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
8198
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
8299

83100
@parameterized.expand([[p] for p in TEST_NDARRAYS])
84101
def test_channel_wise(self, p):
85102
clipper = ClipIntensityPercentiles(upper=95, lower=5, channel_wise=True)
86103
im = p(self.imt)
87104
result = clipper(im)
88-
for i, c in enumerate(im):
105+
im_t = convert_to_tensor(self.imt)
106+
for i, c in enumerate(im_t):
89107
lower, upper = percentile(c, (5, 95))
90108
expected = clip(c, lower, upper)
91109
assert_allclose(result[i], p(expected), type_test="tensor", rtol=1e-4, atol=0)
@@ -118,35 +136,31 @@ def test_hard_clipping_two_sided(self, p):
118136
hard_clipper = ClipIntensityPercentiles(upper=95, lower=5)
119137
im = p(self.imt)
120138
result = hard_clipper(im)
121-
lower, upper = percentile(im, (5, 95))
122-
expected = clip(im, lower, upper)
139+
expected = test_hard_clip_func(im, 5, 95)
123140
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
124141

125142
@parameterized.expand([[p] for p in TEST_NDARRAYS])
126143
def test_hard_clipping_one_sided_high(self, p):
127144
hard_clipper = ClipIntensityPercentiles(upper=95, lower=None)
128145
im = p(self.imt)
129146
result = hard_clipper(im)
130-
lower, upper = percentile(im, (0, 95))
131-
expected = clip(im, lower, upper)
147+
expected = test_hard_clip_func(im, 0, 95)
132148
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
133149

134150
@parameterized.expand([[p] for p in TEST_NDARRAYS])
135151
def test_hard_clipping_one_sided_low(self, p):
136152
hard_clipper = ClipIntensityPercentiles(upper=None, lower=5)
137153
im = p(self.imt)
138154
result = hard_clipper(im)
139-
lower, upper = percentile(im, (5, 100))
140-
expected = clip(im, lower, upper)
155+
expected = test_hard_clip_func(im, 5, 100)
141156
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
142157

143158
@parameterized.expand([[p] for p in TEST_NDARRAYS])
144159
def test_soft_clipping_two_sided(self, p):
145160
soft_clipper = ClipIntensityPercentiles(upper=95, lower=5, sharpness_factor=1.0)
146161
im = p(self.imt)
147162
result = soft_clipper(im)
148-
lower, upper = percentile(im, (5, 95))
149-
expected = soft_clip(im, sharpness_factor=1.0, minv=lower, maxv=upper, dtype=torch.float32)
163+
expected = test_soft_clip_func(im, 5, 95)
150164
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
151165
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
152166

@@ -155,27 +169,26 @@ def test_soft_clipping_one_sided_high(self, p):
155169
soft_clipper = ClipIntensityPercentiles(upper=95, lower=None, sharpness_factor=1.0)
156170
im = p(self.imt)
157171
result = soft_clipper(im)
158-
upper = percentile(im, 95)
159-
expected = soft_clip(im, sharpness_factor=1.0, minv=None, maxv=upper, dtype=torch.float32)
160-
# the rtol is set to 5e-5 because the logaddexp function used in softplus is not stable accross torch and numpy
172+
expected = test_soft_clip_func(im, None, 95)
173+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
161174
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
162175

163176
@parameterized.expand([[p] for p in TEST_NDARRAYS])
164177
def test_soft_clipping_one_sided_low(self, p):
165178
soft_clipper = ClipIntensityPercentiles(upper=None, lower=5, sharpness_factor=1.0)
166179
im = p(self.imt)
167180
result = soft_clipper(im)
168-
lower = percentile(im, 5)
169-
expected = soft_clip(im, sharpness_factor=1.0, minv=lower, maxv=None, dtype=torch.float32)
170-
# the rtol is set to 1e-6 because the logaddexp function used in softplus is not stable accross torch and numpy
181+
expected = test_soft_clip_func(im, 5, None)
182+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
171183
assert_allclose(result, p(expected), type_test="tensor", rtol=1e-4, atol=0)
172184

173185
@parameterized.expand([[p] for p in TEST_NDARRAYS])
174186
def test_channel_wise(self, p):
175187
clipper = ClipIntensityPercentiles(upper=95, lower=5, channel_wise=True)
176188
im = p(self.imt)
177189
result = clipper(im)
178-
for i, c in enumerate(im):
190+
im_t = convert_to_tensor(self.imt)
191+
for i, c in enumerate(im_t):
179192
lower, upper = percentile(c, (5, 95))
180193
expected = clip(c, lower, upper)
181194
assert_allclose(result[i], p(expected), type_test="tensor", rtol=1e-4, atol=0)

tests/test_clip_intensity_percentilesd.py

+23-32
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,15 @@
1313

1414
import unittest
1515

16-
import torch
1716
from parameterized import parameterized
1817

1918
from monai.transforms import ClipIntensityPercentilesd
20-
from monai.transforms.utils import soft_clip
2119
from monai.transforms.utils_pytorch_numpy_unification import clip, percentile
20+
from monai.utils.type_conversion import convert_to_tensor
2221
from tests.utils import TEST_NDARRAYS, NumpyImageTestCase2D, NumpyImageTestCase3D, assert_allclose
2322

23+
from .test_clip_intensity_percentiles import test_hard_clip_func, test_soft_clip_func
24+
2425

2526
class TestClipIntensityPercentilesd2D(NumpyImageTestCase2D):
2627

@@ -30,8 +31,7 @@ def test_hard_clipping_two_sided(self, p):
3031
hard_clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=5)
3132
im = p(self.imt)
3233
result = hard_clipper({key: im})
33-
lower, upper = percentile(im, (5, 95))
34-
expected = clip(im, lower, upper)
34+
expected = test_hard_clip_func(im, 5, 95)
3535
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
3636

3737
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -40,8 +40,7 @@ def test_hard_clipping_one_sided_high(self, p):
4040
hard_clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=None)
4141
im = p(self.imt)
4242
result = hard_clipper({key: im})
43-
lower, upper = percentile(im, (0, 95))
44-
expected = clip(im, lower, upper)
43+
expected = test_hard_clip_func(im, 0, 95)
4544
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
4645

4746
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -50,8 +49,7 @@ def test_hard_clipping_one_sided_low(self, p):
5049
hard_clipper = ClipIntensityPercentilesd(keys=[key], upper=None, lower=5)
5150
im = p(self.imt)
5251
result = hard_clipper({key: im})
53-
lower, upper = percentile(im, (5, 100))
54-
expected = clip(im, lower, upper)
52+
expected = test_hard_clip_func(im, 5, 100)
5553
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
5654

5755
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -60,9 +58,8 @@ def test_soft_clipping_two_sided(self, p):
6058
soft_clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=5, sharpness_factor=1.0)
6159
im = p(self.imt)
6260
result = soft_clipper({key: im})
63-
lower, upper = percentile(im, (5, 95))
64-
expected = soft_clip(im, sharpness_factor=1.0, minv=lower, maxv=upper, dtype=torch.float32)
65-
# the rtol is set to 1e-6 because the logaddexp function used in softplus is not stable accross torch and numpy
61+
expected = test_soft_clip_func(im, 5, 95)
62+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
6663
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
6764

6865
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -71,9 +68,8 @@ def test_soft_clipping_one_sided_high(self, p):
7168
soft_clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=None, sharpness_factor=1.0)
7269
im = p(self.imt)
7370
result = soft_clipper({key: im})
74-
upper = percentile(im, 95)
75-
expected = soft_clip(im, sharpness_factor=1.0, minv=None, maxv=upper, dtype=torch.float32)
76-
# the rtol is set to 5e-5 because the logaddexp function used in softplus is not stable accross torch and numpy
71+
expected = test_soft_clip_func(im, None, 95)
72+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
7773
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
7874

7975
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -82,9 +78,8 @@ def test_soft_clipping_one_sided_low(self, p):
8278
soft_clipper = ClipIntensityPercentilesd(keys=[key], upper=None, lower=5, sharpness_factor=1.0)
8379
im = p(self.imt)
8480
result = soft_clipper({key: im})
85-
lower = percentile(im, 5)
86-
expected = soft_clip(im, sharpness_factor=1.0, minv=lower, maxv=None, dtype=torch.float32)
87-
# the rtol is set to 1e-6 because the logaddexp function used in softplus is not stable accross torch and numpy
81+
expected = test_soft_clip_func(im, 5, None)
82+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
8883
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
8984

9085
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -93,7 +88,8 @@ def test_channel_wise(self, p):
9388
clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=5, channel_wise=True)
9489
im = p(self.imt)
9590
result = clipper({key: im})
96-
for i, c in enumerate(im):
91+
im_t = convert_to_tensor(self.imt)
92+
for i, c in enumerate(im_t):
9793
lower, upper = percentile(c, (5, 95))
9894
expected = clip(c, lower, upper)
9995
assert_allclose(result[key][i], p(expected), type_test="tensor", rtol=1e-3, atol=0)
@@ -132,8 +128,7 @@ def test_hard_clipping_two_sided(self, p):
132128
hard_clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=5)
133129
im = p(self.imt)
134130
result = hard_clipper({key: im})
135-
lower, upper = percentile(im, (5, 95))
136-
expected = clip(im, lower, upper)
131+
expected = test_hard_clip_func(im, 5, 95)
137132
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
138133

139134
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -142,8 +137,7 @@ def test_hard_clipping_one_sided_high(self, p):
142137
hard_clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=None)
143138
im = p(self.imt)
144139
result = hard_clipper({key: im})
145-
lower, upper = percentile(im, (0, 95))
146-
expected = clip(im, lower, upper)
140+
expected = test_hard_clip_func(im, 0, 95)
147141
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
148142

149143
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -152,8 +146,7 @@ def test_hard_clipping_one_sided_low(self, p):
152146
hard_clipper = ClipIntensityPercentilesd(keys=[key], upper=None, lower=5)
153147
im = p(self.imt)
154148
result = hard_clipper({key: im})
155-
lower, upper = percentile(im, (5, 100))
156-
expected = clip(im, lower, upper)
149+
expected = test_hard_clip_func(im, 5, 100)
157150
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
158151

159152
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -162,8 +155,7 @@ def test_soft_clipping_two_sided(self, p):
162155
soft_clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=5, sharpness_factor=1.0)
163156
im = p(self.imt)
164157
result = soft_clipper({key: im})
165-
lower, upper = percentile(im, (5, 95))
166-
expected = soft_clip(im, sharpness_factor=1.0, minv=lower, maxv=upper, dtype=torch.float32)
158+
expected = test_soft_clip_func(im, 5, 95)
167159
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
168160
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
169161

@@ -173,9 +165,8 @@ def test_soft_clipping_one_sided_high(self, p):
173165
soft_clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=None, sharpness_factor=1.0)
174166
im = p(self.imt)
175167
result = soft_clipper({key: im})
176-
upper = percentile(im, 95)
177-
expected = soft_clip(im, sharpness_factor=1.0, minv=None, maxv=upper, dtype=torch.float32)
178-
# the rtol is set to 5e-5 because the logaddexp function used in softplus is not stable accross torch and numpy
168+
expected = test_soft_clip_func(im, None, 95)
169+
# the rtol is set to 1e-4 because the logaddexp function used in softplus is not stable accross torch and numpy
179170
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
180171

181172
@parameterized.expand([[p] for p in TEST_NDARRAYS])
@@ -184,8 +175,7 @@ def test_soft_clipping_one_sided_low(self, p):
184175
soft_clipper = ClipIntensityPercentilesd(keys=[key], upper=None, lower=5, sharpness_factor=1.0)
185176
im = p(self.imt)
186177
result = soft_clipper({key: im})
187-
lower = percentile(im, 5)
188-
expected = soft_clip(im, sharpness_factor=1.0, minv=lower, maxv=None, dtype=torch.float32)
178+
expected = test_soft_clip_func(im, 5, None)
189179
# the rtol is set to 1e-6 because the logaddexp function used in softplus is not stable accross torch and numpy
190180
assert_allclose(result[key], p(expected), type_test="tensor", rtol=1e-4, atol=0)
191181

@@ -195,7 +185,8 @@ def test_channel_wise(self, p):
195185
clipper = ClipIntensityPercentilesd(keys=[key], upper=95, lower=5, channel_wise=True)
196186
im = p(self.imt)
197187
result = clipper({key: im})
198-
for i, c in enumerate(im):
188+
im_t = convert_to_tensor(im)
189+
for i, c in enumerate(im_t):
199190
lower, upper = percentile(c, (5, 95))
200191
expected = clip(c, lower, upper)
201192
assert_allclose(result[key][i], p(expected), type_test="tensor", rtol=1e-4, atol=0)

0 commit comments

Comments
 (0)