Skip to content

Commit

Permalink
reimplement voxel filter as static function (fixed crashing)
Browse files Browse the repository at this point in the history
  • Loading branch information
S1ink committed Feb 21, 2024
1 parent aeda6a9 commit 7720e34
Show file tree
Hide file tree
Showing 3 changed files with 179 additions and 24 deletions.
2 changes: 1 addition & 1 deletion src/shared/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ endif()
set(SICK_SCAN_DIR "${PROJECT_SOURCE_DIR}/submodule-deps/sick_scan_xd")
set(WPILIB_DIR "${PROJECT_SOURCE_DIR}/submodule-deps/allwpilib")

if(${USE_WPILIB})
if(${USE_WPILIB}) # submodule manually checkout out to the same commit ref as is included with wpilib
set(EIGEN_INCLUDE_DIR "${PROJECT_SOURCE_DIR}/submodule-deps/eigen" CACHE STRING "" FORCE)
endif()
find_package(PCL REQUIRED COMPONENTS common io filters features kdtree octree segmentation sample_consensus)
Expand Down
154 changes: 154 additions & 0 deletions src/shared/filtering.h
Original file line number Diff line number Diff line change
@@ -1,14 +1,168 @@
#pragma once

#include <vector>
#include <limits>

#include <pcl/types.h>
#include <pcl/common/io.h>
#include <pcl/point_types.h>
#include <pcl/point_cloud.h>
#include <pcl/filters/impl/voxel_grid.hpp> // includes <pcl/common/centroid.h> and <boost/sort/spreadsort/integer_sort.hpp> which we use
#include <pcl/filters/morphological_filter.h>


/** Copied from VoxelGrid<>::applyFilter() and removed unnecessary additions */
template<
typename PointT = pcl::PointXYZ,
typename IntT = pcl::index_t,
typename FloatT = float>
void voxel_filter(
const pcl::PointCloud<PointT>& cloud,
const std::vector<IntT>& selection,
pcl::PointCloud<PointT>& voxelized,
FloatT leaf_x, FloatT leaf_y, FloatT leaf_z,
unsigned int min_points_per_voxel_ = 0,
bool downsample_all_data_ = false
) {
const bool use_selection = !selection.empty();

const Eigen::Vector4f
leaf_size_{ leaf_x, leaf_y, leaf_z, 1.f };
const Eigen::Array4f
inverse_leaf_size_{ Eigen::Array4f::Ones() / leaf_size_.array() };

// Copy the header (and thus the frame_id) + allocate enough space for points
voxelized.height = 1; // downsampling breaks the organized structure
voxelized.is_dense = true; // we filter out invalid points

Eigen::Vector4f min_p, max_p;
// Get the minimum and maximum dimensions
if(use_selection) {
pcl::getMinMax3D<PointT>(cloud, selection, min_p, max_p);
} else {
pcl::getMinMax3D<PointT>(cloud, min_p, max_p);
}

// Check that the leaf size is not too small, given the size of the data
std::int64_t dx = static_cast<std::int64_t>((max_p[0] - min_p[0]) * inverse_leaf_size_[0]) + 1;
std::int64_t dy = static_cast<std::int64_t>((max_p[1] - min_p[1]) * inverse_leaf_size_[1]) + 1;
std::int64_t dz = static_cast<std::int64_t>((max_p[2] - min_p[2]) * inverse_leaf_size_[2]) + 1;

if( (dx * dy * dz) > static_cast<std::int64_t>(std::numeric_limits<std::int32_t>::max()) ) {
voxelized.clear();
return;
}

Eigen::Vector4i min_b_, max_b_, div_b_, divb_mul_;

// Compute the minimum and maximum bounding box values
min_b_[0] = static_cast<int> ( std::floor(min_p[0] * inverse_leaf_size_[0]) );
max_b_[0] = static_cast<int> ( std::floor(max_p[0] * inverse_leaf_size_[0]) );
min_b_[1] = static_cast<int> ( std::floor(min_p[1] * inverse_leaf_size_[1]) );
max_b_[1] = static_cast<int> ( std::floor(max_p[1] * inverse_leaf_size_[1]) );
min_b_[2] = static_cast<int> ( std::floor(min_p[2] * inverse_leaf_size_[2]) );
max_b_[2] = static_cast<int> ( std::floor(max_p[2] * inverse_leaf_size_[2]) );

// Compute the number of divisions needed along all axis
div_b_ = max_b_ - min_b_ + Eigen::Vector4i::Ones ();
div_b_[3] = 0;

// Set up the division multiplier
divb_mul_ = Eigen::Vector4i{ 1, div_b_[0], div_b_[0] * div_b_[1], 0 };

// Storage for mapping leaf and pointcloud indexes
std::vector<cloud_point_index_idx> index_vector;

// First pass: go over all points and insert them into the index_vector vector
// with calculated idx. Points with the same idx value will contribute to the
// same point of resulting CloudPoint
if(use_selection) {
index_vector.reserve(selection.size());
for(const auto& index : selection) {
if(!cloud.is_dense && !pcl::isXYZFinite(cloud[index])) continue;

int ijk0 = static_cast<int>( std::floor(cloud[index].x * inverse_leaf_size_[0]) - static_cast<float>(min_b_[0]) );
int ijk1 = static_cast<int>( std::floor(cloud[index].y * inverse_leaf_size_[1]) - static_cast<float>(min_b_[1]) );
int ijk2 = static_cast<int>( std::floor(cloud[index].z * inverse_leaf_size_[2]) - static_cast<float>(min_b_[2]) );

// Compute the centroid leaf index
int idx = ijk0 * divb_mul_[0] + ijk1 * divb_mul_[1] + ijk2 * divb_mul_[2];
index_vector.emplace_back( static_cast<unsigned int>(idx), index );
}
} else {
index_vector.reserve(cloud.size());
for(IntT index = 0; index < cloud.size(); index++) {
if(!cloud.is_dense && !pcl::isXYZFinite(cloud[index])) continue;

int ijk0 = static_cast<int>( std::floor(cloud[index].x * inverse_leaf_size_[0]) - static_cast<float>(min_b_[0]) );
int ijk1 = static_cast<int>( std::floor(cloud[index].y * inverse_leaf_size_[1]) - static_cast<float>(min_b_[1]) );
int ijk2 = static_cast<int>( std::floor(cloud[index].z * inverse_leaf_size_[2]) - static_cast<float>(min_b_[2]) );

// Compute the centroid leaf index
int idx = ijk0 * divb_mul_[0] + ijk1 * divb_mul_[1] + ijk2 * divb_mul_[2];
index_vector.emplace_back( static_cast<unsigned int>(idx), index );
}
}

// Second pass: sort the index_vector vector using value representing target cell as index
// in effect all points belonging to the same output cell will be next to each other
auto rightshift_func = [](const cloud_point_index_idx &x, const unsigned offset) { return x.idx >> offset; };
boost::sort::spreadsort::integer_sort(index_vector.begin(), index_vector.end(), rightshift_func);

// Third pass: count output cells
// we need to skip all the same, adjacent idx values
unsigned int total = 0;
unsigned int index = 0;
// first_and_last_indices_vector[i] represents the index in index_vector of the first point in
// index_vector belonging to the voxel which corresponds to the i-th output point,
// and of the first point not belonging to.
std::vector<std::pair<unsigned int, unsigned int> > first_and_last_indices_vector;
// Worst case size
first_and_last_indices_vector.reserve (index_vector.size());
while(index < index_vector.size()) {
unsigned int i = index + 1;
for(; i < index_vector.size() && index_vector[i].idx == index_vector[index].idx; ++i);
if (i - index >= min_points_per_voxel_) {
++total;
first_and_last_indices_vector.emplace_back(index, i);
}
index = i;
}

// Fourth pass: compute centroids, insert them into their final position
voxelized.resize(total);

index = 0;
for (const auto &cp : first_and_last_indices_vector) {
// calculate centroid - sum values from all input points, that have the same idx value in index_vector array
unsigned int first_index = cp.first;
unsigned int last_index = cp.second;

//Limit downsampling to coords
if (!downsample_all_data_) {
Eigen::Vector4f centroid{ Eigen::Vector4f::Zero() };

for (unsigned int li = first_index; li < last_index; ++li) {
centroid += cloud[index_vector[li].cloud_point_index].getVector4fMap();
}
centroid /= static_cast<float> (last_index - first_index);
voxelized[index].getVector4fMap() = centroid;
}
else {
pcl::CentroidPoint<PointT> centroid;

// fill in the accumulator with leaf points
for (unsigned int li = first_index; li < last_index; ++li) {
centroid.add( cloud[index_vector[li].cloud_point_index] );
}
centroid.get(voxelized[index]);
}
++index;
}
voxelized.width = voxelized.size ();

}

/** PMF Filter Reimpl -- See <pcl/filters/progressive_morphological_filter.h> */
template<
typename PointT = pcl::PointXYZ,
Expand Down
47 changes: 24 additions & 23 deletions src/shared/lidar_api.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <pcl/io/pcd_io.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/filters/crop_box.h>
#include <pcl/filters/morphological_filter.h>
#include <pcl/segmentation/progressive_morphological_filter.h>
Expand Down Expand Up @@ -328,10 +327,16 @@ namespace ldrp {
* ::countBits(this->_config.enabled_segments)
* this->_config.buffered_frames
};
const Eigen::Isometry3f default_pose = Eigen::Isometry3f::Identity();
pcl::PointCloud<pcl::PointXYZ>::Ptr point_cloud{ new pcl::PointCloud<pcl::PointXYZ> }; // reuse the buffer
static const Eigen::Isometry3f default_pose = Eigen::Isometry3f::Identity();
static const pcl::Indices default_no_selection = pcl::Indices{};

// buffers are reused between loops
pcl::PointCloud<pcl::PointXYZ>
point_cloud,
voxelized_points;
std::vector<float> point_ranges;
point_cloud->points.reserve( max_points );

point_cloud.points.reserve( max_points );
point_ranges.reserve( max_points );

LDRP_LOG( LOG_STANDARD, "LDRP Filter Instance {} [Init]: Resources initialized - running filter loop...", f_inst->index )
Expand All @@ -341,7 +346,7 @@ namespace ldrp {
f_inst->nt.is_active.Set(true);

// 1. transform points based on timestamp
point_cloud->clear(); // clear the vector and set w,h to 0
point_cloud.clear(); // clear the vector and set w,h to 0

for(size_t i = 0; i < f_inst->samples.size(); i++) { // we could theoretically multithread this part -- just use a mutex for inserting points into the master collection
for(size_t j = 0; j < f_inst->samples[i].size(); j++) {
Expand Down Expand Up @@ -375,8 +380,8 @@ namespace ldrp {
#else
{
#endif
point_cloud->points.emplace_back();
reinterpret_cast<Eigen::Vector4f&>(point_cloud->points.back()) = transform * Eigen::Vector4f{lidar_point.x, lidar_point.y, lidar_point.z, 1.f};
point_cloud.points.emplace_back();
reinterpret_cast<Eigen::Vector4f&>(point_cloud.points.back()) = transform * Eigen::Vector4f{lidar_point.x, lidar_point.y, lidar_point.z, 1.f};
point_ranges.push_back(lidar_point.range);
}
}
Expand All @@ -389,20 +394,20 @@ namespace ldrp {

} // loop segments

LDRP_LOG( LOG_DEBUG && LOG_VERBOSE, "LDRP Filter Instance {} [Filter Loop]: Collected {} points", f_inst->index, point_cloud->size() )
LDRP_LOG( LOG_DEBUG && LOG_VERBOSE, "LDRP Filter Instance {} [Filter Loop]: Collected {} points", f_inst->index, point_cloud.size() )

point_cloud->width = point_cloud->points.size();
point_cloud->height = 1;
point_cloud->is_dense = true;
point_cloud.width = point_cloud.points.size();
point_cloud.height = 1;
point_cloud.is_dense = true;

if(this->_config.pcd_logging_mode & PCD_LOGGING_TAR) {
this->pcd_writer.addCloud(*point_cloud);
this->pcd_writer.addCloud(point_cloud);
}
if(this->_config.pcd_logging_mode & PCD_LOGGING_NT) {
this->_nt.raw_scan_points.Set(
std::span<const uint8_t>{
reinterpret_cast<uint8_t*>( point_cloud->points.data() ),
reinterpret_cast<uint8_t*>( point_cloud->points.data() + point_cloud->points.size() )
reinterpret_cast<uint8_t*>( point_cloud.points.data() ),
reinterpret_cast<uint8_t*>( point_cloud.points.data() + point_cloud.points.size() )
}
);
}
Expand All @@ -417,8 +422,6 @@ namespace ldrp {
0.f
};

pcl::PointCloud<pcl::PointXYZ>::Ptr
voxelized_points{ new pcl::PointCloud<pcl::PointXYZ> };
pcl::PointIndices::Ptr
z_high_filtered{ new pcl::PointIndices },
z_low_subset_filtered{ new pcl::PointIndices },
Expand All @@ -427,26 +430,24 @@ namespace ldrp {
pmf_filtered_ground{ new pcl::PointIndices },
pmf_filtered_obstacles{ new pcl::PointIndices };

pcl::VoxelGrid<pcl::PointXYZ> voxel_filter{};
pcl::CropBox<pcl::PointXYZ> cartesian_filter{};

LDRP_LOG( LOG_DEBUG, "Exhibit A" )
voxelized_points.clear();

// voxelize points
voxel_filter.setInputCloud(point_cloud);
voxel_filter.setLeafSize(
voxel_filter(
point_cloud, default_no_selection, voxelized_points,
this->_config.fpipeline.voxel_size_cm,
this->_config.fpipeline.voxel_size_cm,
this->_config.fpipeline.voxel_size_cm
);
voxel_filter.filter(*voxelized_points);

// TEST
if(this->_config.pcd_logging_mode & PCD_LOGGING_NT) {
this->_nt.test_filtered_points.Set(
std::span<const uint8_t>{
reinterpret_cast<uint8_t*>( voxelized_points->points.data() ),
reinterpret_cast<uint8_t*>( voxelized_points->points.data() + voxelized_points->points.size() )
reinterpret_cast<uint8_t*>( voxelized_points.points.data() ),
reinterpret_cast<uint8_t*>( voxelized_points.points.data() + voxelized_points.points.size() )
}
);
}
Expand Down

0 comments on commit 7720e34

Please sign in to comment.