From 3ad1da2d2ba91ba5c09b4c223a2331296eae16e2 Mon Sep 17 00:00:00 2001 From: Pavel Mokeev <pav3l.mokeev@gmail.com> Date: Tue, 5 Dec 2023 14:50:39 +0300 Subject: [PATCH 1/4] feat(tests): add unit tests --- requirements.txt | 3 +- slam/utils/pose_readwriter.py | 11 +- tests/data/poses/correct_pose.txt | 4 + tests/data/poses/incorrect_pose.txt | 2 + .../correct_configuration.yaml | 24 ++++ .../incorrect_configuration_backend.yaml | 19 +++ .../incorrect_configuration_dataset.yaml | 17 +++ .../incorrect_configuration_patches.yaml | 19 +++ .../incorrect_configuration_segmenters.yaml | 19 +++ .../incorrect_configuration_subdividers.yaml | 22 +++ tests/test_cape_segmenter.py | 42 ++++++ tests/test_count_segmenter.py | 23 +++ tests/test_count_subdivider.py | 46 ++++++ tests/test_eigen_value_subdivider.py | 46 ++++++ tests/test_empty_voxel_filter.py | 17 +++ tests/test_identical_segmenter.py | 22 +++ tests/test_pose_readwriter.py | 97 +++++++++++++ tests/test_ransac_segmenter.py | 31 ++++ tests/test_sequential_pipeline.py | 64 +++++++++ tests/test_size_subdivider.py | 45 ++++++ tests/test_yaml_configuration_reader.py | 135 ++++++++++++++++++ 21 files changed, 703 insertions(+), 5 deletions(-) create mode 100644 tests/data/poses/correct_pose.txt create mode 100644 tests/data/poses/incorrect_pose.txt create mode 100644 tests/data/yaml_configurations/correct_configuration.yaml create mode 100644 tests/data/yaml_configurations/incorrect_configuration_backend.yaml create mode 100644 tests/data/yaml_configurations/incorrect_configuration_dataset.yaml create mode 100644 tests/data/yaml_configurations/incorrect_configuration_patches.yaml create mode 100644 tests/data/yaml_configurations/incorrect_configuration_segmenters.yaml create mode 100644 tests/data/yaml_configurations/incorrect_configuration_subdividers.yaml create mode 100644 tests/test_cape_segmenter.py create mode 100644 tests/test_count_segmenter.py create mode 100644 tests/test_count_subdivider.py create mode 100644 tests/test_eigen_value_subdivider.py create mode 100644 tests/test_empty_voxel_filter.py create mode 100644 tests/test_identical_segmenter.py create mode 100644 tests/test_pose_readwriter.py create mode 100644 tests/test_ransac_segmenter.py create mode 100644 tests/test_sequential_pipeline.py create mode 100644 tests/test_size_subdivider.py create mode 100644 tests/test_yaml_configuration_reader.py diff --git a/requirements.txt b/requirements.txt index b7701c3..dfe03bd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,4 +3,5 @@ numpy==1.25.2 open3d==0.17.0 octreelib @ git+https://github.com/true-real-michael/octreelib@v0.0.5 scikit-learn==1.3.1 -PyYAML~=6.0.1 \ No newline at end of file +PyYAML~=6.0.1 +pytest~=7.4.3 \ No newline at end of file diff --git a/slam/utils/pose_readwriter.py b/slam/utils/pose_readwriter.py index d1f93eb..913a190 100644 --- a/slam/utils/pose_readwriter.py +++ b/slam/utils/pose_readwriter.py @@ -49,8 +49,11 @@ def write(filepath: str, pose: Array4x4[float]) -> None: pose: Array4x4[float] Matrix with pose values to write """ + assert pose.shape == (4, 4) + with open(filepath, "w+") as file: - for pose_line in pose: - for value in pose_line: - file.write(f"{value} ") - file.write("\n") + for ind, pose_line in enumerate(pose): + file.write(" ".join(str(x) for x in pose_line)) + + if ind != pose.shape[0]: + file.write("\n") diff --git a/tests/data/poses/correct_pose.txt b/tests/data/poses/correct_pose.txt new file mode 100644 index 0000000..bd81319 --- /dev/null +++ b/tests/data/poses/correct_pose.txt @@ -0,0 +1,4 @@ +1 2 3 4 +1 2 3 4 +1 2 3 4 +0 0 0 1 \ No newline at end of file diff --git a/tests/data/poses/incorrect_pose.txt b/tests/data/poses/incorrect_pose.txt new file mode 100644 index 0000000..ec41856 --- /dev/null +++ b/tests/data/poses/incorrect_pose.txt @@ -0,0 +1,2 @@ +1 2 3 4 +1 2 \ No newline at end of file diff --git a/tests/data/yaml_configurations/correct_configuration.yaml b/tests/data/yaml_configurations/correct_configuration.yaml new file mode 100644 index 0000000..bace78a --- /dev/null +++ b/tests/data/yaml_configurations/correct_configuration.yaml @@ -0,0 +1,24 @@ +dataset: + path: "path/to/dataset" + patches: + start: 0 + end: 100 + step: 10 + iterations: 1 +grid: + voxel_edge_length: 8 +subdividers: + size: 2 +segmenters: + ransac: + threshold: 0.01 + initial_points: 6 + iterations: 5000 +backend: + type: "eigen_factor" + parameters: + iterations_number: 5000 + robust_type: HUBER +output: + visualization_path: "output" + optimisation_path: "output" \ No newline at end of file diff --git a/tests/data/yaml_configurations/incorrect_configuration_backend.yaml b/tests/data/yaml_configurations/incorrect_configuration_backend.yaml new file mode 100644 index 0000000..ea590ea --- /dev/null +++ b/tests/data/yaml_configurations/incorrect_configuration_backend.yaml @@ -0,0 +1,19 @@ +dataset: + path: "path/to/dataset" + patches: + start: 0 + end: 100 + step: 10 + iterations: 1 +grid: + voxel_edge_length: 8 +subdividers: + size: 2 +segmenters: + ransac: + threshold: 0.01 + initial_points: 6 + iterations: 5000 +output: + visualization_path: "output" + optimisation_path: "output" \ No newline at end of file diff --git a/tests/data/yaml_configurations/incorrect_configuration_dataset.yaml b/tests/data/yaml_configurations/incorrect_configuration_dataset.yaml new file mode 100644 index 0000000..e7abc1b --- /dev/null +++ b/tests/data/yaml_configurations/incorrect_configuration_dataset.yaml @@ -0,0 +1,17 @@ +grid: + voxel_edge_length: 8 +subdividers: + size: 2 +segmenters: + ransac: + threshold: 0.01 + initial_points: 6 + iterations: 5000 +backend: + type: "eigen_factor" + parameters: + iterations_number: 5000 + robust_type: HUBER +output: + visualization_path: "output" + optimisation_path: "output" \ No newline at end of file diff --git a/tests/data/yaml_configurations/incorrect_configuration_patches.yaml b/tests/data/yaml_configurations/incorrect_configuration_patches.yaml new file mode 100644 index 0000000..4759515 --- /dev/null +++ b/tests/data/yaml_configurations/incorrect_configuration_patches.yaml @@ -0,0 +1,19 @@ +dataset: + path: "path/to/dataset" +grid: + voxel_edge_length: 8 +subdividers: + size: 2 +segmenters: + ransac: + threshold: 0.01 + initial_points: 6 + iterations: 5000 +backend: + type: "eigen_factor" + parameters: + iterations_number: 5000 + robust_type: HUBER +output: + visualization_path: "output" + optimisation_path: "output" \ No newline at end of file diff --git a/tests/data/yaml_configurations/incorrect_configuration_segmenters.yaml b/tests/data/yaml_configurations/incorrect_configuration_segmenters.yaml new file mode 100644 index 0000000..063dda0 --- /dev/null +++ b/tests/data/yaml_configurations/incorrect_configuration_segmenters.yaml @@ -0,0 +1,19 @@ +dataset: + path: "path/to/dataset" + patches: + start: 0 + end: 100 + step: 10 + iterations: 1 +grid: + voxel_edge_length: 8 +subdividers: + size: 2 +backend: + type: "eigen_factor" + parameters: + iterations_number: 5000 + robust_type: HUBER +output: + visualization_path: "output" + optimisation_path: "output" \ No newline at end of file diff --git a/tests/data/yaml_configurations/incorrect_configuration_subdividers.yaml b/tests/data/yaml_configurations/incorrect_configuration_subdividers.yaml new file mode 100644 index 0000000..aafc198 --- /dev/null +++ b/tests/data/yaml_configurations/incorrect_configuration_subdividers.yaml @@ -0,0 +1,22 @@ +dataset: + path: "path/to/dataset" + patches: + start: 0 + end: 100 + step: 10 + iterations: 1 +grid: + voxel_edge_length: 8 +segmenters: + ransac: + threshold: 0.01 + initial_points: 6 + iterations: 5000 +backend: + type: "eigen_factor" + parameters: + iterations_number: 5000 + robust_type: HUBER +output: + visualization_path: "output" + optimisation_path: "output" \ No newline at end of file diff --git a/tests/test_cape_segmenter.py b/tests/test_cape_segmenter.py new file mode 100644 index 0000000..5e4cf9f --- /dev/null +++ b/tests/test_cape_segmenter.py @@ -0,0 +1,42 @@ +import numpy as np +import pytest + +import random + +from slam.segmenter import CAPESegmenter +from slam.typing import ArrayNx3 + + +@pytest.mark.parametrize( + "points, correlation, points_size", + [ + ( + [ + np.array( + [random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)] + ) + for _ in range(100) + ], + 100, + 100, + ), + ( + [ + np.array( + [random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)] + ) + for _ in range(100) + ], + 0, + 0, + ), + ], +) +def test_cape_segmenter( + points: ArrayNx3[float], + correlation: float, + points_size: int, +): + cape_segmenter = CAPESegmenter(correlation) + actual_points = cape_segmenter(points) + assert len(actual_points) == points_size diff --git a/tests/test_count_segmenter.py b/tests/test_count_segmenter.py new file mode 100644 index 0000000..d59d52f --- /dev/null +++ b/tests/test_count_segmenter.py @@ -0,0 +1,23 @@ +import numpy as np +import pytest + +from slam.segmenter import CountSegmenter +from slam.typing import ArrayNx3 + + +@pytest.mark.parametrize( + "points, count, expected_points", + [ + (np.array([[0, 0, 0]]), 0, np.array([[0, 0, 0]])), + (np.array([[0, 0, 0]]), 10, np.empty((0, 3))), + ], +) +def test_count_segmenter( + points: ArrayNx3[float], + count: int, + expected_points: ArrayNx3[float], +): + count_segmenter = CountSegmenter(count=count) + actual_points = count_segmenter(points) + assert len(actual_points) == len(expected_points) + assert np.all(actual_points == expected_points) diff --git a/tests/test_count_subdivider.py b/tests/test_count_subdivider.py new file mode 100644 index 0000000..39303f9 --- /dev/null +++ b/tests/test_count_subdivider.py @@ -0,0 +1,46 @@ +import numpy as np +import pytest + +from slam.subdivider import CountSubdivider +from slam.typing import ArrayNx3 + + +@pytest.mark.parametrize( + "points, count, expected_decision", + [ + ( + np.array( + [ + [0, 0, 0], + [0, 1, 0], + [1, 1, 1], + ] + ), + 1, + True, + ), + ( + np.array( + [ + [0, 0, 0], + [0, 1, 0], + [1, 1, 1], + ] + ), + 5, + False, + ), + ( + np.empty((0, 3)), + 1, + False, + ), + ], +) +def test_count_subdivider( + points: ArrayNx3[float], + count: float, + expected_decision: bool, +): + count_subdivider = CountSubdivider(count=count) + assert expected_decision == count_subdivider(points) diff --git a/tests/test_eigen_value_subdivider.py b/tests/test_eigen_value_subdivider.py new file mode 100644 index 0000000..b089d85 --- /dev/null +++ b/tests/test_eigen_value_subdivider.py @@ -0,0 +1,46 @@ +import numpy as np +import pytest + +import random + +from slam.subdivider import EigenValueSubdivider +from slam.typing import ArrayNx3 + + +@pytest.mark.parametrize( + "points, eigen_value, expected_decision", + [ + ( + [ + np.array( + [random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)] + ) + for _ in range(100) + ], + 1, + False, + ), + ( + [ + np.array( + [random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1)] + ) + for _ in range(100) + ], + 1000, + False, + ), + ( + np.empty((0, 3)), + 1, + False, + ), + ], +) +def test_eigen_value_subdivider( + points: ArrayNx3[float], + eigen_value: float, + expected_decision: bool, +): + eigen_value_subdivider = EigenValueSubdivider(value=eigen_value) + assert expected_decision == eigen_value_subdivider(points) diff --git a/tests/test_empty_voxel_filter.py b/tests/test_empty_voxel_filter.py new file mode 100644 index 0000000..ea1a3f7 --- /dev/null +++ b/tests/test_empty_voxel_filter.py @@ -0,0 +1,17 @@ +import numpy as np +import pytest + +from slam.filter import EmptyVoxel +from slam.typing import ArrayNx3 + + +@pytest.mark.parametrize( + "points, expected_decision", + [ + (np.array([0, 0, 0]), True), + (np.empty(0), False), + ], +) +def test_empty_voxel(points: ArrayNx3[float], expected_decision: bool): + empty_voxel_filter = EmptyVoxel() + assert empty_voxel_filter(points) == expected_decision diff --git a/tests/test_identical_segmenter.py b/tests/test_identical_segmenter.py new file mode 100644 index 0000000..8621f58 --- /dev/null +++ b/tests/test_identical_segmenter.py @@ -0,0 +1,22 @@ +import numpy as np +import pytest + +from slam.segmenter import IdenticalSegmenter +from slam.typing import ArrayNx3 + + +@pytest.mark.parametrize( + "points, expected_points", + [ + (np.array([[0, 0, 0]]), np.array([[0, 0, 0]])), + (np.empty(0), np.empty(0)), + ], +) +def test_identical_segmenter( + points: ArrayNx3[float], + expected_points: ArrayNx3[float], +): + identical_segmenter = IdenticalSegmenter() + actual_points = identical_segmenter(points) + assert len(actual_points) == len(expected_points) + assert np.all(actual_points == expected_points) diff --git a/tests/test_pose_readwriter.py b/tests/test_pose_readwriter.py new file mode 100644 index 0000000..2f2b439 --- /dev/null +++ b/tests/test_pose_readwriter.py @@ -0,0 +1,97 @@ +import numpy as np +import pytest + +import os + +from slam.typing import ArrayNx4x4 +from slam.utils import OptimisedPoseReadWriter + + +@pytest.mark.parametrize( + "pose_path, expected_pose", + [ + ( + "tests/data/poses/correct_pose.txt", + np.array( + [ + [1, 2, 3, 4], + [1, 2, 3, 4], + [1, 2, 3, 4], + [0, 0, 0, 1], + ] + ), + ), + ], +) +def test_correct_pose_readwriter_read( + pose_path: str, + expected_pose: ArrayNx4x4[float], +): + pose_readwriter = OptimisedPoseReadWriter() + actual_pose = pose_readwriter.read(pose_path) + assert np.all(actual_pose == expected_pose) + + +@pytest.mark.parametrize( + "pose_path", + [ + ("tests/data/poses/incorrect_pose.txt"), + ], +) +def test_incorrect_pose_readwriter_read( + pose_path: str, +): + pose_readwriter = OptimisedPoseReadWriter() + with pytest.raises(ValueError): + pose_readwriter.read(pose_path) + + +@pytest.mark.parametrize( + "pose_path, pose_to_write", + [ + ( + "tests/data/poses/temp1.txt", + np.array( + [ + [1, 2, 3, 4], + [1, 2, 3, 4], + [1, 2, 3, 4], + [0, 0, 0, 1], + ] + ), + ) + ], +) +def test_correct_pose_readwriter_write( + pose_path: str, + pose_to_write: ArrayNx4x4[float], +): + pose_readwriter = OptimisedPoseReadWriter() + pose_readwriter.write(pose_path, pose_to_write) + + actual_pose = pose_readwriter.read(pose_path) + os.remove(pose_path) + + assert np.all(actual_pose == pose_to_write) + + +@pytest.mark.parametrize( + "pose_path, pose_to_write", + [ + ( + "tests/data/poses/temp2.txt", + np.array( + [ + [1, 2, 3, 4], + ] + ), + ) + ], +) +def test_incorrect_pose_readwriter_write( + pose_path: str, + pose_to_write: ArrayNx4x4[float], +): + pose_readwriter = OptimisedPoseReadWriter() + with pytest.raises(AssertionError): + pose_readwriter.write(pose_path, pose_to_write) diff --git a/tests/test_ransac_segmenter.py b/tests/test_ransac_segmenter.py new file mode 100644 index 0000000..b80faa5 --- /dev/null +++ b/tests/test_ransac_segmenter.py @@ -0,0 +1,31 @@ +import numpy as np +import pytest + +from slam.segmenter import RansacSegmenter +from slam.typing import ArrayNx3 + + +@pytest.mark.parametrize( + "points, threshold, expected_points", + [ + ( + np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0]]), + 100, + np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0]]), + ), + ( + np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [100, 100, 100]]), + 0.001, + np.array([[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1]]), + ), + ], +) +def test_ransac_segmenter( + points: ArrayNx3[float], + threshold: float, + expected_points: ArrayNx3[float], +): + ransac_segmenter = RansacSegmenter(threshold=threshold, initial_points=3) + actual_points = ransac_segmenter(points) + assert len(actual_points) == len(expected_points) + assert np.all(actual_points == expected_points) diff --git a/tests/test_sequential_pipeline.py b/tests/test_sequential_pipeline.py new file mode 100644 index 0000000..228038d --- /dev/null +++ b/tests/test_sequential_pipeline.py @@ -0,0 +1,64 @@ +import mrob +import numpy as np +import open3d as o3d +import pytest +from octreelib.grid import GridConfig, VisualizationConfig + +import os +from typing import List + +from slam.backend import Backend, EigenFactorBackend +from slam.filter import Filter +from slam.pipeline import SequentialPipeline, SequentialPipelineRuntimeParameters +from slam.segmenter import CountSegmenter, Segmenter +from slam.subdivider import CountSubdivider, Subdivider +from slam.typing import ArrayNx3, ArrayNx4x4 + + +@pytest.mark.parametrize( + "points," "poses," "subdividers," "filters," "segmenters," "backend,", + [ + ( + np.array( + [ + [0, 0, 0], + [1, 1, 1], + ] + ), + [np.eye(4)], + [CountSubdivider(10)], + [], + [CountSegmenter(0)], + EigenFactorBackend( + poses_number=1, iterations_number=1, robust_type=mrob.HUBER + ), + ), + ], +) +def test_sequential_pipeline( + points: ArrayNx3[float], + poses: ArrayNx4x4[float], + subdividers: List[Subdivider], + filters: List[Filter], + segmenters: List[Segmenter], + backend: Backend, +): + sequential_pipeline = SequentialPipeline( + point_clouds=[o3d.geometry.PointCloud(o3d.utility.Vector3dVector(points))], + poses=poses, + subdividers=subdividers, + segmenters=segmenters, + filters=filters, + backend=backend, + ) + output = sequential_pipeline.run( + SequentialPipelineRuntimeParameters( + grid_configuration=GridConfig(voxel_edge_length=2), + visualization_config=VisualizationConfig( + filepath=os.devnull, + ), + initial_point_cloud_number=0, + ) + ) + + assert output is not None diff --git a/tests/test_size_subdivider.py b/tests/test_size_subdivider.py new file mode 100644 index 0000000..3bb4494 --- /dev/null +++ b/tests/test_size_subdivider.py @@ -0,0 +1,45 @@ +import numpy as np +import pytest + +from slam.subdivider import SizeSubdivider +from slam.typing import ArrayNx3 + + +@pytest.mark.parametrize( + "points, size, expected_decision", + [ + ( + np.array( + [ + [0, 0, 0], + [0, 1, 0], + [1, 1, 1], + ] + ), + 0.5, + True, + ), + ( + np.array( + [ + [0, 0, 0], + [0.1, 0.1, 0.1], + ] + ), + 0.5, + False, + ), + ( + np.empty((0, 3)), + 0.5, + False, + ), + ], +) +def test_size_subdivider( + points: ArrayNx3[float], + size: float, + expected_decision: bool, +): + size_subdivider = SizeSubdivider(size=size) + assert expected_decision == size_subdivider(points) diff --git a/tests/test_yaml_configuration_reader.py b/tests/test_yaml_configuration_reader.py new file mode 100644 index 0000000..624ec1b --- /dev/null +++ b/tests/test_yaml_configuration_reader.py @@ -0,0 +1,135 @@ +import mrob +import pytest +from octreelib.grid import GridConfig + +from typing import List + +from slam.backend import Backend, EigenFactorBackend +from slam.filter import Filter +from slam.pipeline import YAMLConfigurationReader +from slam.segmenter import RansacSegmenter, Segmenter +from slam.subdivider import SizeSubdivider, Subdivider + + +@pytest.mark.parametrize( + "yaml_configuration_path, " + "debug, " + "dataset_path, " + "patches_start, " + "patches_end, " + "patches_step, " + "patches_iterations, " + "visualization_dir, " + "optimisation_dir, " + "subdividers, " + "filters, " + "segmenters, " + "grid_configuration, " + "backend", + [ + ( + "tests/data/yaml_configurations/correct_configuration.yaml", + False, + "path/to/dataset", + 0, + 100, + 10, + 1, + "output", + "output", + [SizeSubdivider(size=2)], + [], + [RansacSegmenter(0.01, 6, 5000)], + GridConfig(voxel_edge_length=8), + EigenFactorBackend( + poses_number=10, iterations_number=5000, robust_type=mrob.HUBER + ), + ), + ], +) +def test_correct_configuration( + yaml_configuration_path: str, + debug: bool, + dataset_path: str, + patches_start: int, + patches_end: int, + patches_step: int, + patches_iterations: int, + visualization_dir: str, + optimisation_dir: str, + subdividers: List[Subdivider], + filters: List[Filter], + segmenters: List[Segmenter], + grid_configuration: GridConfig, + backend: Backend, +): + yaml_reader = YAMLConfigurationReader(yaml_configuration_path) + + assert debug == yaml_reader.debug + assert dataset_path == yaml_reader.dataset_path + assert patches_start == yaml_reader.patches_start + assert patches_end == yaml_reader.patches_end + assert patches_step == yaml_reader.patches_step + assert patches_iterations == yaml_reader.patches_iterations + assert len(subdividers) == len(yaml_reader.subdividers) + assert len(filters) == len(yaml_reader.filters) + assert len(segmenters) == len(yaml_reader.segmenters) + assert ( + grid_configuration.voxel_edge_length + == yaml_reader.grid_configuration.voxel_edge_length + ) + actual_backend = yaml_reader.backend(0, 10) + for field in ["_poses_number", "_iterations_number"]: + assert backend.__dict__[field] == actual_backend.__dict__[field] + + +@pytest.mark.parametrize( + "yaml_configuration_path, missed_field", + [ + ( + "tests/data/yaml_configurations/incorrect_configuration_backend.yaml", + "backend_configuration", + ), + ( + "tests/data/yaml_configurations/incorrect_configuration_dataset.yaml", + "dataset", + ), + ( + "tests/data/yaml_configurations/incorrect_configuration_patches.yaml", + "patches", + ), + ( + "tests/data/yaml_configurations/incorrect_configuration_segmenters.yaml", + "segmenters", + ), + ( + "tests/data/yaml_configurations/incorrect_configuration_subdividers.yaml", + "subdividers", + ), + ], +) +def test_incorrect_yaml_configuration_reader( + yaml_configuration_path: str, + missed_field: str, +): + yaml_reader = YAMLConfigurationReader(yaml_configuration_path) + + with pytest.raises(ValueError) as excinfo: + _ = yaml_reader.debug + _ = yaml_reader.dataset_path + _ = yaml_reader.patches_start + _ = yaml_reader.patches_end + _ = yaml_reader.patches_step + _ = yaml_reader.patches_iterations + _ = yaml_reader.visualization_dir + _ = yaml_reader.optimisation_dir + _ = yaml_reader.subdividers + _ = yaml_reader.filters + _ = yaml_reader.segmenters + _ = yaml_reader.grid_configuration + _ = yaml_reader.backend(0, 1) + + assert ( + str(excinfo.value) == f"'{missed_field}' must be set" + or str(excinfo.value) == f"{missed_field} must be not empty" + ) From 3234a27864a1798480af517185a5c27790f338bc Mon Sep 17 00:00:00 2001 From: Pavel Mokeev <pav3l.mokeev@gmail.com> Date: Tue, 5 Dec 2023 14:55:17 +0300 Subject: [PATCH 2/4] feat(test): add .yaml for tests running --- .github/workflows/tests.yaml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 .github/workflows/tests.yaml diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml new file mode 100644 index 0000000..1ebb7d8 --- /dev/null +++ b/.github/workflows/tests.yaml @@ -0,0 +1,30 @@ +name: Tests + +on: [push] + +jobs: + test-and-lint: + strategy: + matrix: + os: [ ubuntu-22.04 ] + python-version: [ "3.10" ] + + runs-on: ${{ matrix.os }} + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Install Python${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install -r requirements.txt + + - name: Run pytest + run: | + python -m pytest tests/ From f2fa427c945abc9b2639e00645a8cd8de64e1be2 Mon Sep 17 00:00:00 2001 From: Pavel Mokeev <pav3l.mokeev@gmail.com> Date: Tue, 5 Dec 2023 14:57:48 +0300 Subject: [PATCH 3/4] fix(ci): fix k3d requirements --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index dfe03bd..806a377 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,4 +4,5 @@ open3d==0.17.0 octreelib @ git+https://github.com/true-real-michael/octreelib@v0.0.5 scikit-learn==1.3.1 PyYAML~=6.0.1 -pytest~=7.4.3 \ No newline at end of file +pytest~=7.4.3 +k3d==2.16.0 \ No newline at end of file From 558ff7ce09f1556aa03a8fe71f1eba8a3fbabb02 Mon Sep 17 00:00:00 2001 From: Pavel Mokeev <75482510+pmokeev@users.noreply.github.com> Date: Tue, 5 Dec 2023 15:13:49 +0300 Subject: [PATCH 4/4] fix(backend): fix unused_features property (#29) * fix(backend): fix unused_features property * fix(ci): fix flake8 issue --- slam/backend/mrob_backend.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/slam/backend/mrob_backend.py b/slam/backend/mrob_backend.py index 3336410..d0356c1 100644 --- a/slam/backend/mrob_backend.py +++ b/slam/backend/mrob_backend.py @@ -104,7 +104,11 @@ def __get_unused_features(self) -> List[int]: unused_features: List[int] IDs list of unused features """ - robust_mask = self._graph.get_eigen_factors_robust_mask() + try: + robust_mask = self._graph.get_eigen_factors_robust_mask() + except AttributeError: + return [] + unused_features = [] for voxel_id, plane_id in self._planes.items():