Skip to content

Commit 3ad1da2

Browse files
committed
feat(tests): add unit tests
1 parent facf840 commit 3ad1da2

21 files changed

+703
-5
lines changed

requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,5 @@ numpy==1.25.2
33
open3d==0.17.0
44
octreelib @ git+https://github.com/true-real-michael/octreelib@v0.0.5
55
scikit-learn==1.3.1
6-
PyYAML~=6.0.1
6+
PyYAML~=6.0.1
7+
pytest~=7.4.3

slam/utils/pose_readwriter.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,11 @@ def write(filepath: str, pose: Array4x4[float]) -> None:
4949
pose: Array4x4[float]
5050
Matrix with pose values to write
5151
"""
52+
assert pose.shape == (4, 4)
53+
5254
with open(filepath, "w+") as file:
53-
for pose_line in pose:
54-
for value in pose_line:
55-
file.write(f"{value} ")
56-
file.write("\n")
55+
for ind, pose_line in enumerate(pose):
56+
file.write(" ".join(str(x) for x in pose_line))
57+
58+
if ind != pose.shape[0]:
59+
file.write("\n")

tests/data/poses/correct_pose.txt

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
1 2 3 4
2+
1 2 3 4
3+
1 2 3 4
4+
0 0 0 1

tests/data/poses/incorrect_pose.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
1 2 3 4
2+
1 2
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
dataset:
2+
path: "path/to/dataset"
3+
patches:
4+
start: 0
5+
end: 100
6+
step: 10
7+
iterations: 1
8+
grid:
9+
voxel_edge_length: 8
10+
subdividers:
11+
size: 2
12+
segmenters:
13+
ransac:
14+
threshold: 0.01
15+
initial_points: 6
16+
iterations: 5000
17+
backend:
18+
type: "eigen_factor"
19+
parameters:
20+
iterations_number: 5000
21+
robust_type: HUBER
22+
output:
23+
visualization_path: "output"
24+
optimisation_path: "output"
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
dataset:
2+
path: "path/to/dataset"
3+
patches:
4+
start: 0
5+
end: 100
6+
step: 10
7+
iterations: 1
8+
grid:
9+
voxel_edge_length: 8
10+
subdividers:
11+
size: 2
12+
segmenters:
13+
ransac:
14+
threshold: 0.01
15+
initial_points: 6
16+
iterations: 5000
17+
output:
18+
visualization_path: "output"
19+
optimisation_path: "output"
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
grid:
2+
voxel_edge_length: 8
3+
subdividers:
4+
size: 2
5+
segmenters:
6+
ransac:
7+
threshold: 0.01
8+
initial_points: 6
9+
iterations: 5000
10+
backend:
11+
type: "eigen_factor"
12+
parameters:
13+
iterations_number: 5000
14+
robust_type: HUBER
15+
output:
16+
visualization_path: "output"
17+
optimisation_path: "output"
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
dataset:
2+
path: "path/to/dataset"
3+
grid:
4+
voxel_edge_length: 8
5+
subdividers:
6+
size: 2
7+
segmenters:
8+
ransac:
9+
threshold: 0.01
10+
initial_points: 6
11+
iterations: 5000
12+
backend:
13+
type: "eigen_factor"
14+
parameters:
15+
iterations_number: 5000
16+
robust_type: HUBER
17+
output:
18+
visualization_path: "output"
19+
optimisation_path: "output"
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
dataset:
2+
path: "path/to/dataset"
3+
patches:
4+
start: 0
5+
end: 100
6+
step: 10
7+
iterations: 1
8+
grid:
9+
voxel_edge_length: 8
10+
subdividers:
11+
size: 2
12+
backend:
13+
type: "eigen_factor"
14+
parameters:
15+
iterations_number: 5000
16+
robust_type: HUBER
17+
output:
18+
visualization_path: "output"
19+
optimisation_path: "output"
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
dataset:
2+
path: "path/to/dataset"
3+
patches:
4+
start: 0
5+
end: 100
6+
step: 10
7+
iterations: 1
8+
grid:
9+
voxel_edge_length: 8
10+
segmenters:
11+
ransac:
12+
threshold: 0.01
13+
initial_points: 6
14+
iterations: 5000
15+
backend:
16+
type: "eigen_factor"
17+
parameters:
18+
iterations_number: 5000
19+
robust_type: HUBER
20+
output:
21+
visualization_path: "output"
22+
optimisation_path: "output"

tests/test_cape_segmenter.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
import numpy as np
2+
import pytest
3+
4+
import random
5+
6+
from slam.segmenter import CAPESegmenter
7+
from slam.typing import ArrayNx3
8+
9+
10+
@pytest.mark.parametrize(
11+
"points, correlation, points_size",
12+
[
13+
(
14+
[
15+
np.array(
16+
[random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)]
17+
)
18+
for _ in range(100)
19+
],
20+
100,
21+
100,
22+
),
23+
(
24+
[
25+
np.array(
26+
[random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)]
27+
)
28+
for _ in range(100)
29+
],
30+
0,
31+
0,
32+
),
33+
],
34+
)
35+
def test_cape_segmenter(
36+
points: ArrayNx3[float],
37+
correlation: float,
38+
points_size: int,
39+
):
40+
cape_segmenter = CAPESegmenter(correlation)
41+
actual_points = cape_segmenter(points)
42+
assert len(actual_points) == points_size

tests/test_count_segmenter.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import numpy as np
2+
import pytest
3+
4+
from slam.segmenter import CountSegmenter
5+
from slam.typing import ArrayNx3
6+
7+
8+
@pytest.mark.parametrize(
9+
"points, count, expected_points",
10+
[
11+
(np.array([[0, 0, 0]]), 0, np.array([[0, 0, 0]])),
12+
(np.array([[0, 0, 0]]), 10, np.empty((0, 3))),
13+
],
14+
)
15+
def test_count_segmenter(
16+
points: ArrayNx3[float],
17+
count: int,
18+
expected_points: ArrayNx3[float],
19+
):
20+
count_segmenter = CountSegmenter(count=count)
21+
actual_points = count_segmenter(points)
22+
assert len(actual_points) == len(expected_points)
23+
assert np.all(actual_points == expected_points)

tests/test_count_subdivider.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import numpy as np
2+
import pytest
3+
4+
from slam.subdivider import CountSubdivider
5+
from slam.typing import ArrayNx3
6+
7+
8+
@pytest.mark.parametrize(
9+
"points, count, expected_decision",
10+
[
11+
(
12+
np.array(
13+
[
14+
[0, 0, 0],
15+
[0, 1, 0],
16+
[1, 1, 1],
17+
]
18+
),
19+
1,
20+
True,
21+
),
22+
(
23+
np.array(
24+
[
25+
[0, 0, 0],
26+
[0, 1, 0],
27+
[1, 1, 1],
28+
]
29+
),
30+
5,
31+
False,
32+
),
33+
(
34+
np.empty((0, 3)),
35+
1,
36+
False,
37+
),
38+
],
39+
)
40+
def test_count_subdivider(
41+
points: ArrayNx3[float],
42+
count: float,
43+
expected_decision: bool,
44+
):
45+
count_subdivider = CountSubdivider(count=count)
46+
assert expected_decision == count_subdivider(points)

tests/test_eigen_value_subdivider.py

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import numpy as np
2+
import pytest
3+
4+
import random
5+
6+
from slam.subdivider import EigenValueSubdivider
7+
from slam.typing import ArrayNx3
8+
9+
10+
@pytest.mark.parametrize(
11+
"points, eigen_value, expected_decision",
12+
[
13+
(
14+
[
15+
np.array(
16+
[random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)]
17+
)
18+
for _ in range(100)
19+
],
20+
1,
21+
False,
22+
),
23+
(
24+
[
25+
np.array(
26+
[random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)]
27+
)
28+
for _ in range(100)
29+
],
30+
1000,
31+
False,
32+
),
33+
(
34+
np.empty((0, 3)),
35+
1,
36+
False,
37+
),
38+
],
39+
)
40+
def test_eigen_value_subdivider(
41+
points: ArrayNx3[float],
42+
eigen_value: float,
43+
expected_decision: bool,
44+
):
45+
eigen_value_subdivider = EigenValueSubdivider(value=eigen_value)
46+
assert expected_decision == eigen_value_subdivider(points)

tests/test_empty_voxel_filter.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import numpy as np
2+
import pytest
3+
4+
from slam.filter import EmptyVoxel
5+
from slam.typing import ArrayNx3
6+
7+
8+
@pytest.mark.parametrize(
9+
"points, expected_decision",
10+
[
11+
(np.array([0, 0, 0]), True),
12+
(np.empty(0), False),
13+
],
14+
)
15+
def test_empty_voxel(points: ArrayNx3[float], expected_decision: bool):
16+
empty_voxel_filter = EmptyVoxel()
17+
assert empty_voxel_filter(points) == expected_decision

tests/test_identical_segmenter.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import numpy as np
2+
import pytest
3+
4+
from slam.segmenter import IdenticalSegmenter
5+
from slam.typing import ArrayNx3
6+
7+
8+
@pytest.mark.parametrize(
9+
"points, expected_points",
10+
[
11+
(np.array([[0, 0, 0]]), np.array([[0, 0, 0]])),
12+
(np.empty(0), np.empty(0)),
13+
],
14+
)
15+
def test_identical_segmenter(
16+
points: ArrayNx3[float],
17+
expected_points: ArrayNx3[float],
18+
):
19+
identical_segmenter = IdenticalSegmenter()
20+
actual_points = identical_segmenter(points)
21+
assert len(actual_points) == len(expected_points)
22+
assert np.all(actual_points == expected_points)

0 commit comments

Comments
 (0)