Skip to content

Commit 0fd512d

Browse files
committed
feat(yaml_reader): slighlty change yaml configuration
1 parent c7a4e99 commit 0fd512d

17 files changed

+242
-197
lines changed

examples/configurations/hilti.yaml

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,25 @@
11
dataset:
2+
type: "hilti"
23
path: "evaluation/hilti"
34
patches:
45
start: 0
56
end: 29
67
step: 10
78
iterations: 1
8-
grid:
9-
voxel_edge_length: 8
10-
subdividers:
11-
size: 2
12-
segmenters:
13-
ransac:
14-
threshold: 0.01
15-
initial_points: 6
16-
iterations: 5000
17-
backend:
18-
type: "eigen_factor"
19-
parameters:
20-
iterations_number: 5000
21-
robust_type: HUBER
22-
output:
23-
visualization_path: "output/hilti/visualization"
24-
optimisation_path: "output/hilti/optimisation"
25-
debug: true
9+
pipeline:
10+
grid:
11+
voxel_edge_length: 8
12+
subdividers:
13+
size: 1
14+
segmenters:
15+
ransac:
16+
threshold: 0.01
17+
initial_points: 6
18+
iterations: 1000
19+
backend:
20+
type: "eigen_factor"
21+
parameters:
22+
iterations_number: 5000
23+
robust_type: QUADRATIC
24+
output: "output/hilti"
25+
debug: false

examples/configurations/kitti.yaml

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,27 @@
11
dataset:
2+
type: "kitti"
23
path: "evaluation/kitti"
34
patches:
45
start: 0
56
end: 100
67
step: 10
78
iterations: 1
8-
grid:
9-
voxel_edge_length: 16
10-
subdividers:
11-
size: 2
12-
segmenters:
13-
ransac:
14-
threshold: 0.01
15-
initial_points: 6
16-
iterations: 5000
17-
cape:
18-
correlation: 300
19-
backend:
20-
type: "eigen_factor"
21-
parameters:
22-
iterations_number: 5000
23-
robust_type: HUBER
24-
output:
25-
visualization_path: "output/kitti/visualization"
26-
optimisation_path: "output/kitti/optimisation"
9+
pipeline:
10+
grid:
11+
voxel_edge_length: 16
12+
subdividers:
13+
size: 2
14+
segmenters:
15+
ransac:
16+
threshold: 0.01
17+
initial_points: 6
18+
iterations: 5000
19+
cape:
20+
correlation: 300
21+
backend:
22+
type: "eigen_factor"
23+
parameters:
24+
iterations_number: 5000
25+
robust_type: HUBER
26+
output: "output/kitti"
2727
debug: true

examples/pipeline.py

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@
1919
python3 examples/pipeline.py --configuration_path examples/configurations/hilti.yaml
2020
```
2121
"""
22+
from typing import Tuple
23+
2224
import open3d as o3d
2325
from octreelib.grid import VisualizationConfig
2426

@@ -42,6 +44,23 @@
4244
OptimisedPoseReadWriter,
4345
)
4446

47+
48+
def prepare_output_directories(configuration: YAMLConfigurationReader) -> Tuple[str, str]:
49+
if not os.path.exists(configuration.output_directory):
50+
os.makedirs(configuration.output_directory)
51+
52+
poses_dir = os.path.join(configuration.output_directory, "poses")
53+
if not os.path.exists(poses_dir):
54+
os.makedirs(poses_dir)
55+
56+
visualization_dir = os.path.join(configuration.output_directory, "visualization")
57+
if configuration.debug:
58+
if not os.path.join(visualization_dir):
59+
os.makedirs(visualization_dir)
60+
61+
return poses_dir, visualization_dir
62+
63+
4564
if __name__ == "__main__":
4665
parser = argparse.ArgumentParser(prog="Pipeline")
4766
parser.add_argument("--configuration_path", type=str, required=True)
@@ -50,16 +69,18 @@
5069
configuration_reader = YAMLConfigurationReader(args.configuration_path)
5170

5271
dataset_reader = DatasetReader
53-
if "hilti" in configuration_reader.dataset_path.lower():
72+
if "hilti" in configuration_reader.dataset_type.lower():
5473
dataset_reader = HiltiReader()
55-
elif "kitti" in configuration_reader.dataset_path.lower():
74+
elif "kitti" in configuration_reader.dataset_type.lower():
5675
dataset_reader = KittiReader()
57-
elif "nuscenes" in configuration_reader.dataset_path.lower():
76+
elif "nuscenes" in configuration_reader.dataset_type.lower():
5877
dataset_reader = NuscenesReader()
5978
else:
6079
raise ValueError("Unrecognisable type of dataset")
6180
posesWriter = OptimisedPoseReadWriter()
6281

82+
poses_dir, visualization_dir = prepare_output_directories(configuration_reader)
83+
6384
for ind in range(
6485
configuration_reader.patches_start,
6586
configuration_reader.patches_end,
@@ -97,13 +118,14 @@
97118
segmenters=configuration_reader.segmenters,
98119
filters=configuration_reader.filters,
99120
backend=configuration_reader.backend(start, end),
121+
debug=configuration_reader.debug,
100122
)
101123

102124
output = pipeline.run(
103125
SequentialPipelineRuntimeParameters(
104126
grid_configuration=configuration_reader.grid_configuration,
105127
visualization_config=VisualizationConfig(
106-
filepath=f"{configuration_reader.visualization_dir}/{start}-{end - 1}_{iteration_ind}.html"
128+
filepath=f"{visualization_dir}/{start}-{end - 1}_{iteration_ind}.html"
107129
),
108130
initial_point_cloud_number=(end - start) // 2,
109131
)
@@ -141,7 +163,7 @@
141163
for optimised_pose_number in range(start, end):
142164
posesWriter.write(
143165
os.path.join(
144-
configuration_reader.optimisation_dir,
166+
poses_dir,
145167
f"{optimised_pose_number}.txt",
146168
),
147169
poses[optimised_pose_number - start],

slam/backend/mrob_backend.py

Lines changed: 6 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,7 @@ class MROBBackend(Backend):
2525
"""
2626

2727
def __init__(
28-
self,
29-
poses_number: int,
30-
iterations_number: int,
31-
robust_type: int = mrob.HUBER,
28+
self, poses_number: int, iterations_number: int, robust_type: int = mrob.HUBER
3229
) -> None:
3330
self._graph: mrob.FGraph = mrob.FGraph(robust_type)
3431
self._poses_number: int = poses_number
@@ -72,27 +69,19 @@ def process(self, grid: GridBase) -> BackendOutput:
7269
"""
7370
self._init_poses()
7471
self._init_point_clouds(grid)
75-
metrics = [
76-
Metric(name="FGraph initial error", value=self._graph.chi2(True)),
77-
]
72+
metrics = [Metric(name="FGraph initial error", value=self._graph.chi2(True))]
7873
converge_iterations = self._graph.solve(mrob.LM_ELLIPS, self._iterations_number)
7974
while converge_iterations == 0:
8075
print("Optimization didn't converge")
8176
converge_iterations = self._graph.solve(
8277
mrob.LM_ELLIPS, self._iterations_number
8378
)
8479

85-
metrics.append(
86-
Metric(name="Iterations to converge", value=converge_iterations),
87-
)
88-
metrics.append(
89-
Metric(name="chi2", value=self._graph.chi2()),
90-
)
80+
metrics.append(Metric(name="Iterations to converge", value=converge_iterations))
81+
metrics.append(Metric(name="chi2", value=self._graph.chi2()))
9182

9283
return BackendOutput(
93-
self._graph.get_estimated_state(),
94-
metrics,
95-
self.__get_unused_features(),
84+
self._graph.get_estimated_state(), metrics, self.__get_unused_features()
9685
)
9786

9887
def __get_unused_features(self) -> List[int]:
@@ -107,6 +96,7 @@ def __get_unused_features(self) -> List[int]:
10796
try:
10897
robust_mask = self._graph.get_eigen_factors_robust_mask()
10998
except AttributeError:
99+
print("[WARNING] Most likely you are not using robust optimisations")
110100
return []
111101

112102
unused_features = []

slam/pipeline/configuration/reader.py

Lines changed: 35 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,24 @@ def debug(self) -> bool:
6262

6363
return bool(value)
6464

65+
@property
66+
def dataset_type(self) -> str:
67+
"""
68+
Represents dataset type for pose and clouds reading
69+
70+
Returns
71+
-------
72+
dataset_type: str
73+
Dataset type
74+
"""
75+
try:
76+
dataset_configuration = copy.deepcopy(self._configuration["dataset"])
77+
value = dataset_configuration["type"]
78+
except KeyError as e:
79+
raise ValueError(f"{e} must be set")
80+
81+
return value
82+
6583
@property
6684
def dataset_path(self) -> str:
6785
"""
@@ -157,38 +175,20 @@ def patches_iterations(self) -> int:
157175
return int(value)
158176

159177
@property
160-
def visualization_dir(self) -> str:
178+
def output_directory(self) -> str:
161179
"""
162-
Represents visualization directory parameter of pipeline
180+
Represents output directory which contains all products of voxel-based pipeline
163181
164182
Returns
165183
-------
166-
visualization_dir: str
167-
Path to visualisation directory to save
184+
output_directory: str
185+
Path to output directory
168186
"""
169187
try:
170-
output_configuration = copy.deepcopy(self._configuration["output"])
171-
value = output_configuration["visualization_path"]
188+
pipeline_configuration = copy.deepcopy(self._configuration["pipeline"])
189+
value = pipeline_configuration["output"]
172190
except KeyError:
173-
return "output/visualization"
174-
175-
return value
176-
177-
@property
178-
def optimisation_dir(self) -> str:
179-
"""
180-
Represents optimisation directory parameter of pipeline
181-
182-
Returns
183-
-------
184-
optimisation_dir: str
185-
Path to optimisation poses directory to save
186-
"""
187-
try:
188-
output_configuration = copy.deepcopy(self._configuration["output"])
189-
value = output_configuration["optimisation_path"]
190-
except KeyError:
191-
return "output/optimisation"
191+
return "output"
192192

193193
return value
194194

@@ -203,9 +203,8 @@ def subdividers(self) -> List[Subdivider]:
203203
Subdividers list
204204
"""
205205
try:
206-
subdividers_configuration = copy.deepcopy(
207-
self._configuration["subdividers"]
208-
)
206+
pipeline_configuration = copy.deepcopy(self._configuration["pipeline"])
207+
subdividers_configuration = pipeline_configuration["subdividers"]
209208
except KeyError:
210209
raise ValueError("subdividers must be not empty")
211210

@@ -218,9 +217,7 @@ def subdividers(self) -> List[Subdivider]:
218217

219218
for name in subdividers_configuration.keys():
220219
name = name.lower()
221-
subdividers.append(
222-
subdividers_names[name](subdividers_configuration[name]),
223-
)
220+
subdividers.append(subdividers_names[name](subdividers_configuration[name]))
224221

225222
return subdividers
226223

@@ -247,7 +244,8 @@ def segmenters(self) -> List[Segmenter]:
247244
Segmenters list
248245
"""
249246
try:
250-
segmenters_configuration = copy.deepcopy(self._configuration["segmenters"])
247+
pipeline_configuration = copy.deepcopy(self._configuration["pipeline"])
248+
segmenters_configuration = pipeline_configuration["segmenters"]
251249
except KeyError:
252250
raise ValueError("segmenters must be not empty")
253251

@@ -277,7 +275,8 @@ def grid_configuration(self) -> GridConfig:
277275
Grid configuration
278276
"""
279277
try:
280-
grid_configuration = copy.deepcopy(self._configuration["grid"])
278+
pipeline_configuration = copy.deepcopy(self._configuration["pipeline"])
279+
grid_configuration = pipeline_configuration["grid"]
281280
except KeyError:
282281
raise ValueError("grid_configuration must be not empty")
283282

@@ -300,14 +299,12 @@ def backend(self, start: int, end: int) -> Backend:
300299
Backend of pipeline
301300
"""
302301
try:
303-
backend_configuration = copy.deepcopy(self._configuration["backend"])
302+
pipeline_configuration = copy.deepcopy(self._configuration["pipeline"])
303+
backend_configuration = pipeline_configuration["backend"]
304304
except KeyError:
305305
raise ValueError("backend_configuration must be not empty")
306306

307-
backend_names = {
308-
"eigen_factor": EigenFactorBackend,
309-
"bareg": BaregBackend,
310-
}
307+
backend_names = {"eigen_factor": EigenFactorBackend, "bareg": BaregBackend}
311308
robust_types = {
312309
"huber": mrob.HUBER,
313310
"quadratic": mrob.QUADRATIC,

slam/pipeline/pipeline.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ class Pipeline(ABC):
5151
Filter conditions to filter voxels in grid
5252
backend: Backend
5353
Backend of SLAM algorithm that optimises given poses
54+
debug: bool
55+
Represents debug parameter. If it is specified, the pipeline will save the visualization files.
5456
"""
5557

5658
def __init__(
@@ -61,6 +63,7 @@ def __init__(
6163
segmenters: List[Segmenter],
6264
filters: List[Filter],
6365
backend: Backend,
66+
debug: bool,
6467
) -> None:
6568
if len(point_clouds) != len(poses):
6669
raise ValueError("Sizes of point_cloud and poses arrays must be equal")
@@ -71,6 +74,7 @@ def __init__(
7174
self._segmenters: List[Segmenter] = segmenters
7275
self._filters: List[Filter] = filters
7376
self._backend: Backend = backend
77+
self._debug: bool = debug
7478

7579
@abstractmethod
7680
def run(self, parameters: PipelineRuntimeParameters) -> BackendOutput:

slam/pipeline/sequential_pipeline.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,10 @@ def run(self, parameters: SequentialPipelineRuntimeParameters) -> BackendOutput:
6666

6767
backend_output = self._backend.process(grid)
6868

69-
parameters.visualization_config.unused_voxels = backend_output.unused_features
70-
grid.visualize(parameters.visualization_config)
69+
if self._debug:
70+
parameters.visualization_config.unused_voxels = (
71+
backend_output.unused_features
72+
)
73+
grid.visualize(parameters.visualization_config)
7174

7275
return backend_output

0 commit comments

Comments
 (0)