diff --git a/.github/workflows/build-python.yml b/.github/workflows/build-python.yml index b108d46..893212d 100644 --- a/.github/workflows/build-python.yml +++ b/.github/workflows/build-python.yml @@ -1,4 +1,4 @@ -name: PyPI +name: Build Py env: BLENDER_VERSION: "5.0" diff --git a/CMakeLists.txt b/CMakeLists.txt index 2514656..bc79210 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -19,7 +19,7 @@ endif() # ============================================================================== # Project Configuration # ============================================================================== -project(trueform VERSION 0.4.0 LANGUAGES CXX) +project(trueform VERSION 0.5.0 LANGUAGES CXX) configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/cmake/version.hpp.in diff --git a/README.md b/README.md index 0c3290c..7f6967e 100644 --- a/README.md +++ b/README.md @@ -186,6 +186,7 @@ Sample comparisons against VTK, CGAL, libigl, Coal, FCL, and nanoflann: |-----------|-------|------|---------|----------|----------| | Boolean Union | 2 × 1M | 28 ms | **84×** | CGAL `Simple_cartesian` | reduction diagrams, double | | Mesh–Mesh Curves | 2 × 1M | 7 ms | **233×** | CGAL `Simple_cartesian` | reduction diagrams, double | +| ICP Registration | 1M | 7.7 ms | **93×** | libigl | AABB tree, random subsampling | | Self-Intersection | 1M | 78 ms | **37×** | libigl EPECK (GMP/MPFR) | reduction diagrams, double | | Isocontours | 1M, 16 cuts | 3.8 ms | **38×** | VTK `vtkContourFilter` | reduction diagrams, float | | Connected Components | 1M | 15 ms | **10×** | CGAL | parallel union-find | diff --git a/README_python.md b/README_python.md index 03e79cd..8511f0d 100644 --- a/README_python.md +++ b/README_python.md @@ -84,6 +84,7 @@ Cached meshes with automatic dirty-tracking for live preview add-ons. See [Blend |-----------|-------|------|---------|----------| | Boolean Union | 2 × 1M | 28 ms | **84×** | CGAL `Simple_cartesian` | | Mesh–Mesh Curves | 2 × 1M | 7 ms | **233×** | CGAL `Simple_cartesian` | +| ICP Registration | 1M | 7.7 ms | **93×** | libigl | | Self-Intersection | 1M | 78 ms | **37×** | libigl EPECK (GMP/MPFR) | | Isocontours | 1M, 16 cuts | 3.8 ms | **38×** | VTK `vtkContourFilter` | | Connected Components | 1M | 15 ms | **10×** | CGAL | diff --git a/benchmarks/CMakeLists.txt b/benchmarks/CMakeLists.txt index 9015d78..a5fceb4 100644 --- a/benchmarks/CMakeLists.txt +++ b/benchmarks/CMakeLists.txt @@ -441,6 +441,8 @@ set(BENCHMARK_TARGETS geometry-principal_curvatures-igl geometry-point_normals-tf geometry-point_normals-igl + geometry-icp_registration-tf + geometry-icp_registration-igl io-read_stl-tf io-read_stl-igl run-benchmarks @@ -454,6 +456,7 @@ if(HAVE_VTK) intersect-isocontours-vtk cut-embedded_isocurves-vtk geometry-point_normals-vtk + geometry-icp_registration-vtk io-read_stl-vtk ) endif() @@ -476,6 +479,7 @@ if(OpenMP_CXX_FOUND) cut-embedded_self_intersection_curves-igl geometry-principal_curvatures-igl geometry-point_normals-igl + geometry-icp_registration-igl io-read_stl-igl run-benchmarks ) diff --git a/benchmarks/geometry/CMakeLists.txt b/benchmarks/geometry/CMakeLists.txt index 538125c..d0916a7 100644 --- a/benchmarks/geometry/CMakeLists.txt +++ b/benchmarks/geometry/CMakeLists.txt @@ -34,3 +34,24 @@ target_link_libraries(geometry-point_normals-vtk-impl benchmark_vtk_common) add_executable(geometry-point_normals-vtk point_normals-vtk.cpp) target_link_libraries(geometry-point_normals-vtk geometry-point_normals-vtk-impl) + +# TrueForm ICP registration +add_library(geometry-icp_registration-tf-impl OBJECT icp_registration-tf-impl.cpp) +target_link_libraries(geometry-icp_registration-tf-impl benchmark_common) + +add_executable(geometry-icp_registration-tf icp_registration-tf.cpp) +target_link_libraries(geometry-icp_registration-tf geometry-icp_registration-tf-impl) + +# VTK ICP registration +add_library(geometry-icp_registration-vtk-impl OBJECT icp_registration-vtk-impl.cpp) +target_link_libraries(geometry-icp_registration-vtk-impl benchmark_vtk_common) + +add_executable(geometry-icp_registration-vtk icp_registration-vtk.cpp) +target_link_libraries(geometry-icp_registration-vtk geometry-icp_registration-vtk-impl) + +# libigl ICP registration +add_library(geometry-icp_registration-igl-impl OBJECT icp_registration-igl-impl.cpp) +target_link_libraries(geometry-icp_registration-igl-impl benchmark_igl_common) + +add_executable(geometry-icp_registration-igl icp_registration-igl.cpp) +target_link_libraries(geometry-icp_registration-igl geometry-icp_registration-igl-impl) diff --git a/benchmarks/geometry/icp_registration-igl-impl.cpp b/benchmarks/geometry/icp_registration-igl-impl.cpp new file mode 100644 index 0000000..251fa9b --- /dev/null +++ b/benchmarks/geometry/icp_registration-igl-impl.cpp @@ -0,0 +1,151 @@ +/** + * ICP registration benchmark with libigl - Implementation + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#include "icp_registration-igl.hpp" +#include "../common/timing.hpp" +#include "../igl-common/conversions.hpp" +#include + +#include + +namespace benchmark { + +int run_icp_registration_igl_benchmark(const std::vector &mesh_paths, + int n_samples, std::ostream &out) { + out << "polygons,time_ms,chamfer_error\n"; + + for (const auto &path : mesh_paths) { + auto mesh = tf::read_stl(path); + auto polygons = mesh.polygons(); + const auto n_polys = polygons.size(); + + // Build vertex link for Taubin smoothing + auto vlink = tf::make_vertex_link(polygons); + + // Create target: Taubin smoothed version (200 iterations) + auto target_points_buf = + tf::taubin_smoothed(polygons.points() | tf::tag(vlink), 200, 0.5f, 0.1f); + + // Build target polygons with smoothed points + tf::polygons_buffer target_mesh; + target_mesh.faces_buffer() = mesh.faces_buffer(); + target_mesh.points_buffer() = std::move(target_points_buf); + auto target_polygons = target_mesh.polygons(); + + // Build tree on target for chamfer computation + tf::aabb_tree tree; + tree.build(target_polygons.points(), tf::config_tree(4, 4)); + auto target_tf = target_polygons.points() | tf::tag(tree); + + // Convert target to libigl format + Eigen::MatrixXd VY = benchmark::igl::to_igl_vertices(target_polygons.points()); + Eigen::MatrixXi FY = benchmark::igl::to_igl_faces(target_polygons.faces()); + + // Pre-convert original mesh faces (don't change per iteration) + Eigen::MatrixXi FX = benchmark::igl::to_igl_faces(polygons.faces()); + + // Compute target centroid for center alignment + Eigen::RowVector3d target_centroid = VY.colwise().mean(); + + // Pre-generate all random angles + std::vector> angles(n_samples); + for (int i = 0; i < n_samples; ++i) { + angles[i] = {tf::random(-10.0f, 10.0f), + tf::random(-10.0f, 10.0f), + tf::random(-10.0f, 10.0f)}; + } + + // Store ICP results for chamfer computation + std::vector> icp_results( + n_samples); + // Store source centroids for chamfer computation + std::vector source_centroids(n_samples); + int iter = 0; + + // Current source vertices (set in prepare) + Eigen::MatrixXd current_VX; + auto center = tf::centroid(polygons.points()); + + auto time = benchmark::mean_time_of( + [&]() { + // Prepare: build source for this iteration (not timed) + auto [ax, ay, az] = angles[iter]; + auto rx = tf::make_rotation(tf::deg(ax), tf::axis<0>, center); + auto ry = tf::make_rotation(tf::deg(ay), tf::axis<1>, center); + auto rz = tf::make_rotation(tf::deg(az), tf::axis<2>, center); + auto rotation = tf::transformed(tf::transformed(rx, ry), rz); + + current_VX.resize(polygons.points().size(), 3); + for (std::size_t i = 0; i < polygons.points().size(); ++i) { + auto pt = tf::transformed(polygons.points()[i], rotation); + current_VX(i, 0) = pt[0]; + current_VX(i, 1) = pt[1]; + current_VX(i, 2) = pt[2]; + } + + // Center-align source to target + source_centroids[iter] = current_VX.colwise().mean(); + current_VX.rowwise() -= source_centroids[iter]; + current_VX.rowwise() += target_centroid; + }, + [&]() { + // Timed: run libigl ICP + Eigen::Matrix3d R; + Eigen::RowVector3d t; + ::igl::iterative_closest_point(current_VX, FX, VY, FY, 1000, 30, R, + t); + icp_results[iter] = {R, t}; + ++iter; + }, + n_samples); + + // Compute mean chamfer error (relative to mesh diagonal) + auto aabb = tf::aabb_from(polygons.points()); + float diagonal = aabb.diagonal().length(); + + float total_chamfer = 0.0f; + for (int i = 0; i < n_samples; ++i) { + auto [ax, ay, az] = angles[i]; + auto rx = tf::make_rotation(tf::deg(ax), tf::axis<0>, center); + auto ry = tf::make_rotation(tf::deg(ay), tf::axis<1>, center); + auto rz = tf::make_rotation(tf::deg(az), tf::axis<2>, center); + auto rotation = tf::transformed(tf::transformed(rx, ry), rz); + + // Rebuild centered source + tf::points_buffer source_pts; + source_pts.allocate(polygons.points().size()); + for (std::size_t j = 0; j < polygons.points().size(); ++j) { + auto pt = tf::transformed(polygons.points()[j], rotation); + // Apply center alignment + source_pts[j] = tf::make_point( + float(pt[0] - source_centroids[i](0) + target_centroid(0)), + float(pt[1] - source_centroids[i](1) + target_centroid(1)), + float(pt[2] - source_centroids[i](2) + target_centroid(2))); + } + + // Apply ICP result (R, t): p' = p * R + t + auto [R, t] = icp_results[i]; + tf::points_buffer aligned_pts; + aligned_pts.allocate(source_pts.size()); + for (std::size_t j = 0; j < source_pts.size(); ++j) { + auto p = source_pts[j]; + aligned_pts[j] = tf::make_point( + float(p[0] * R(0, 0) + p[1] * R(1, 0) + p[2] * R(2, 0) + t(0)), + float(p[0] * R(0, 1) + p[1] * R(1, 1) + p[2] * R(2, 1) + t(1)), + float(p[0] * R(0, 2) + p[1] * R(1, 2) + p[2] * R(2, 2) + t(2))); + } + + total_chamfer += tf::chamfer_error(aligned_pts.points(), target_tf) / diagonal; + } + float mean_chamfer = total_chamfer / n_samples; + + out << n_polys << "," << time << "," << mean_chamfer << "\n"; + } + + return 0; +} + +} // namespace benchmark diff --git a/benchmarks/geometry/icp_registration-igl.cpp b/benchmarks/geometry/icp_registration-igl.cpp new file mode 100644 index 0000000..c5052ec --- /dev/null +++ b/benchmarks/geometry/icp_registration-igl.cpp @@ -0,0 +1,14 @@ +/** + * ICP registration benchmark with libigl - Entry point + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#include "icp_registration-igl.hpp" +#include "../common/test_meshes.hpp" +#include + +int main() { + return benchmark::run_icp_registration_igl_benchmark(benchmark::BENCHMARK_MESHES, + 30, std::cout); +} diff --git a/benchmarks/geometry/icp_registration-igl.hpp b/benchmarks/geometry/icp_registration-igl.hpp new file mode 100644 index 0000000..f7c6055 --- /dev/null +++ b/benchmarks/geometry/icp_registration-igl.hpp @@ -0,0 +1,32 @@ +/** + * ICP registration benchmark with libigl + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#pragma once + +#include +#include +#include + +namespace benchmark { + +/** + * Run ICP registration benchmark using libigl. + * + * For each mesh: + * - Creates target by Taubin smoothing (200 iterations) + * - Creates source by applying random rotation to original + * - Center-aligns source to target (libigl starts from identity) + * - Runs igl::iterative_closest_point + * + * @param mesh_paths Paths to test meshes + * @param n_samples Number of timing samples per mesh + * @param out Output stream for CSV results + * @return Exit code (0 for success) + */ +int run_icp_registration_igl_benchmark(const std::vector &mesh_paths, + int n_samples, std::ostream &out); + +} // namespace benchmark diff --git a/benchmarks/geometry/icp_registration-tf-impl.cpp b/benchmarks/geometry/icp_registration-tf-impl.cpp new file mode 100644 index 0000000..7452b0c --- /dev/null +++ b/benchmarks/geometry/icp_registration-tf-impl.cpp @@ -0,0 +1,129 @@ +/** + * ICP registration benchmark with TrueForm - Implementation + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#include "icp_registration-tf.hpp" +#include "../common/timing.hpp" +#include + +namespace benchmark { + +int run_icp_registration_tf_benchmark(const std::vector &mesh_paths, + int n_samples, std::ostream &out) { + out << "polygons,time_ms,chamfer_error\n"; + + for (const auto &path : mesh_paths) { + auto mesh = tf::read_stl(path); + auto polygons = mesh.polygons(); + const auto n_polys = polygons.size(); + + // Build vertex link for Taubin smoothing + auto vlink = tf::make_vertex_link(polygons); + + // Create target: Taubin smoothed version (200 iterations) + auto target_points_buf = + tf::taubin_smoothed(polygons.points() | tf::tag(vlink), 200, 0.5f, 0.1f); + + // Build target polygons with smoothed points + tf::polygons_buffer target_mesh; + target_mesh.faces_buffer() = mesh.faces_buffer(); + target_mesh.points_buffer() = std::move(target_points_buf); + auto target_polygons = target_mesh.polygons(); + + // Build tree on target points (point-to-point ICP, no normals) + tf::aabb_tree tree; + tree.build(target_polygons.points(), tf::config_tree(4, 4)); + + // Prepare target with tree only + auto target = target_polygons.points() | tf::tag(tree); + + // Compute target centroid for center alignment + auto target_center = tf::centroid(target_polygons.points()); + + // ICP config - fixed 30 iterations, no early termination + tf::icp_config config; + config.max_iterations = 30; + config.min_relative_improvement = 0; + config.n_samples = 1000; + config.k = 1; + + // Pre-generate all random angles + std::vector> angles(n_samples); + for (int i = 0; i < n_samples; ++i) { + angles[i] = {tf::random(-10.0f, 10.0f), + tf::random(-10.0f, 10.0f), + tf::random(-10.0f, 10.0f)}; + } + + // Store ICP results for chamfer computation + std::vector> icp_results(n_samples); + int iter = 0; + + // Current source points (set in prepare) - fresh copy like VTK/libigl + auto center = tf::centroid(polygons.points()); + tf::points_buffer current_source; + current_source.allocate(polygons.points().size()); + + auto time = benchmark::mean_time_of( + [&]() { + // Prepare: build source points for this iteration (not timed) + auto [ax, ay, az] = angles[iter]; + auto rx = tf::make_rotation(tf::deg(ax), tf::axis<0>, center); + auto ry = tf::make_rotation(tf::deg(ay), tf::axis<1>, center); + auto rz = tf::make_rotation(tf::deg(az), tf::axis<2>, center); + auto rotation = tf::transformed(tf::transformed(rx, ry), rz); + + // Apply rotation and center alignment + for (std::size_t i = 0; i < polygons.points().size(); ++i) { + auto pt = tf::transformed(polygons.points()[i], rotation); + current_source[i] = tf::make_point( + pt[0] - center[0] + target_center[0], + pt[1] - center[1] + target_center[1], + pt[2] - center[2] + target_center[2]); + } + }, + [&]() { + // Timed: run ICP + auto T = tf::fit_icp_alignment(current_source.points(), target, config); + icp_results[iter] = T; + ++iter; + }, + n_samples); + + // Compute mean chamfer error (relative to mesh diagonal) + auto aabb = tf::aabb_from(polygons.points()); + float diagonal = aabb.diagonal().length(); + + float total_chamfer = 0.0f; + for (int i = 0; i < n_samples; ++i) { + auto [ax, ay, az] = angles[i]; + auto rx = tf::make_rotation(tf::deg(ax), tf::axis<0>, center); + auto ry = tf::make_rotation(tf::deg(ay), tf::axis<1>, center); + auto rz = tf::make_rotation(tf::deg(az), tf::axis<2>, center); + auto rotation = tf::transformed(tf::transformed(rx, ry), rz); + + // Rebuild source points + tf::points_buffer source_pts; + source_pts.allocate(polygons.points().size()); + for (std::size_t j = 0; j < polygons.points().size(); ++j) { + auto pt = tf::transformed(polygons.points()[j], rotation); + source_pts[j] = tf::make_point( + pt[0] - center[0] + target_center[0], + pt[1] - center[1] + target_center[1], + pt[2] - center[2] + target_center[2]); + } + + auto aligned = source_pts.points() | tf::tag(icp_results[i]); + total_chamfer += tf::chamfer_error(aligned, target) / diagonal; + } + float mean_chamfer = total_chamfer / n_samples; + + out << n_polys << "," << time << "," << mean_chamfer << "\n"; + } + + return 0; +} + +} // namespace benchmark diff --git a/benchmarks/geometry/icp_registration-tf.cpp b/benchmarks/geometry/icp_registration-tf.cpp new file mode 100644 index 0000000..4219cf6 --- /dev/null +++ b/benchmarks/geometry/icp_registration-tf.cpp @@ -0,0 +1,14 @@ +/** + * ICP registration benchmark with TrueForm - Entry point + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#include "icp_registration-tf.hpp" +#include "../common/test_meshes.hpp" +#include + +int main() { + return benchmark::run_icp_registration_tf_benchmark(benchmark::BENCHMARK_MESHES, + 30, std::cout); +} diff --git a/benchmarks/geometry/icp_registration-tf.hpp b/benchmarks/geometry/icp_registration-tf.hpp new file mode 100644 index 0000000..5b617de --- /dev/null +++ b/benchmarks/geometry/icp_registration-tf.hpp @@ -0,0 +1,31 @@ +/** + * ICP registration benchmark with TrueForm + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#pragma once + +#include +#include +#include + +namespace benchmark { + +/** + * Run ICP registration benchmark using TrueForm. + * + * For each mesh: + * - Creates target by Taubin smoothing (200 iterations) + * - Creates source by applying random rotation to original + * - Runs ICP with point-to-plane using normals + * + * @param mesh_paths Paths to test meshes + * @param n_samples Number of timing samples per mesh + * @param out Output stream for CSV results + * @return Exit code (0 for success) + */ +int run_icp_registration_tf_benchmark(const std::vector &mesh_paths, + int n_samples, std::ostream &out); + +} // namespace benchmark diff --git a/benchmarks/geometry/icp_registration-vtk-impl.cpp b/benchmarks/geometry/icp_registration-vtk-impl.cpp new file mode 100644 index 0000000..84ec288 --- /dev/null +++ b/benchmarks/geometry/icp_registration-vtk-impl.cpp @@ -0,0 +1,152 @@ +/** + * ICP registration benchmark with VTK - Implementation + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#include "icp_registration-vtk.hpp" +#include "../common/timing.hpp" +#include "../vtk-common/conversions.hpp" +#include + +#include +#include +#include +#include + +namespace benchmark { + +int run_icp_registration_vtk_benchmark(const std::vector &mesh_paths, + int n_samples, std::ostream &out) { + out << "polygons,time_ms,chamfer_error\n"; + + for (const auto &path : mesh_paths) { + auto mesh = tf::read_stl(path); + auto polygons = mesh.polygons(); + const auto n_polys = polygons.size(); + + // Build vertex link for Taubin smoothing + auto vlink = tf::make_vertex_link(polygons); + + // Create target: Taubin smoothed version (200 iterations) + auto target_points_buf = + tf::taubin_smoothed(polygons.points() | tf::tag(vlink), 200, 0.5f, 0.1f); + + // Build target polygons with smoothed points + tf::polygons_buffer target_mesh; + target_mesh.faces_buffer() = mesh.faces_buffer(); + target_mesh.points_buffer() = std::move(target_points_buf); + auto target_polygons = target_mesh.polygons(); + + // Convert target to VTK + auto target_vtk = benchmark::vtk::to_vtk_polydata(target_mesh); + + // Build tree on target points for chamfer computation + tf::aabb_tree tree; + tree.build(target_polygons.points(), tf::config_tree(4, 4)); + auto target = target_polygons.points() | tf::tag(tree); + + // Pre-generate all random angles + std::vector> angles(n_samples); + for (int i = 0; i < n_samples; ++i) { + angles[i] = {tf::random(-10.0f, 10.0f), + tf::random(-10.0f, 10.0f), + tf::random(-10.0f, 10.0f)}; + } + + // Store ICP results as 4x4 matrices for chamfer computation + std::vector> icp_results(n_samples); + int iter = 0; + + // Current source (set in prepare) + vtkSmartPointer current_source_vtk; + auto center = tf::centroid(polygons.points()); + auto target_center = tf::centroid(target_polygons.points()); + + auto time = benchmark::mean_time_of( + [&]() { + // Prepare: build source mesh for this iteration (not timed) + auto [ax, ay, az] = angles[iter]; + auto rx = tf::make_rotation(tf::deg(ax), tf::axis<0>, center); + auto ry = tf::make_rotation(tf::deg(ay), tf::axis<1>, center); + auto rz = tf::make_rotation(tf::deg(az), tf::axis<2>, center); + auto rotation = tf::transformed(tf::transformed(rx, ry), rz); + + tf::polygons_buffer source_mesh; + source_mesh.faces_buffer() = mesh.faces_buffer(); + source_mesh.points_buffer().allocate(polygons.points().size()); + for (std::size_t i = 0; i < polygons.points().size(); ++i) { + auto pt = tf::transformed(polygons.points()[i], rotation); + source_mesh.points_buffer()[i] = pt; + } + current_source_vtk = benchmark::vtk::to_vtk_polydata(source_mesh); + }, + [&]() { + // Timed: run VTK ICP + auto icp = vtkSmartPointer::New(); + icp->SetSource(current_source_vtk); + icp->SetTarget(target_vtk); + icp->GetLandmarkTransform()->SetModeToRigidBody(); + icp->StartByMatchingCentroidsOn(); + icp->SetMaximumNumberOfIterations(30); + icp->SetMaximumNumberOfLandmarks(1000); + icp->CheckMeanDistanceOff(); + icp->Update(); + + // Store 4x4 matrix elements row-major + auto *m = icp->GetMatrix(); + for (int r = 0; r < 4; ++r) { + for (int c = 0; c < 4; ++c) { + icp_results[iter][r * 4 + c] = m->GetElement(r, c); + } + } + ++iter; + }, + n_samples); + + // Compute mean chamfer error (relative to mesh diagonal) + auto aabb = tf::aabb_from(polygons.points()); + float diagonal = aabb.diagonal().length(); + + float total_chamfer = 0.0f; + for (int i = 0; i < n_samples; ++i) { + auto [ax, ay, az] = angles[i]; + auto rx = tf::make_rotation(tf::deg(ax), tf::axis<0>, center); + auto ry = tf::make_rotation(tf::deg(ay), tf::axis<1>, center); + auto rz = tf::make_rotation(tf::deg(az), tf::axis<2>, center); + auto rotation = tf::transformed(tf::transformed(rx, ry), rz); + + // VTK does center alignment, so rebuild centered source + tf::points_buffer rotated_pts; + rotated_pts.allocate(polygons.points().size()); + for (std::size_t j = 0; j < polygons.points().size(); ++j) { + rotated_pts[j] = tf::transformed(polygons.points()[j], rotation); + } + auto source_center = tf::centroid(rotated_pts.points()); + + // Apply center alignment + ICP transform: p' = M * (p + center_offset) + const auto &M = icp_results[i]; + tf::points_buffer aligned_pts; + aligned_pts.allocate(rotated_pts.size()); + for (std::size_t j = 0; j < rotated_pts.size(); ++j) { + // Center-aligned point + float px = rotated_pts[j][0] - source_center[0] + target_center[0]; + float py = rotated_pts[j][1] - source_center[1] + target_center[1]; + float pz = rotated_pts[j][2] - source_center[2] + target_center[2]; + // Apply 4x4 transform + aligned_pts[j] = tf::make_point( + float(M[0] * px + M[1] * py + M[2] * pz + M[3]), + float(M[4] * px + M[5] * py + M[6] * pz + M[7]), + float(M[8] * px + M[9] * py + M[10] * pz + M[11])); + } + total_chamfer += tf::chamfer_error(aligned_pts.points(), target) / diagonal; + } + float mean_chamfer = total_chamfer / n_samples; + + out << n_polys << "," << time << "," << mean_chamfer << "\n"; + } + + return 0; +} + +} // namespace benchmark diff --git a/benchmarks/geometry/icp_registration-vtk.cpp b/benchmarks/geometry/icp_registration-vtk.cpp new file mode 100644 index 0000000..a09b5d8 --- /dev/null +++ b/benchmarks/geometry/icp_registration-vtk.cpp @@ -0,0 +1,14 @@ +/** + * ICP registration benchmark with VTK - Entry point + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#include "icp_registration-vtk.hpp" +#include "../common/test_meshes.hpp" +#include + +int main() { + return benchmark::run_icp_registration_vtk_benchmark(benchmark::BENCHMARK_MESHES, + 30, std::cout); +} diff --git a/benchmarks/geometry/icp_registration-vtk.hpp b/benchmarks/geometry/icp_registration-vtk.hpp new file mode 100644 index 0000000..e0e9c3d --- /dev/null +++ b/benchmarks/geometry/icp_registration-vtk.hpp @@ -0,0 +1,31 @@ +/** + * ICP registration benchmark with VTK + * + * Copyright (c) 2025 Ziga Sajovic, XLAB + */ + +#pragma once + +#include +#include +#include + +namespace benchmark { + +/** + * Run ICP registration benchmark using VTK. + * + * For each mesh: + * - Creates target by Taubin smoothing (200 iterations) + * - Creates source by applying random rotation to original + * - Runs vtkIterativeClosestPointTransform with StartByMatchingCentroids + * + * @param mesh_paths Paths to test meshes + * @param n_samples Number of timing samples per mesh + * @param out Output stream for CSV results + * @return Exit code (0 for success) + */ +int run_icp_registration_vtk_benchmark(const std::vector &mesh_paths, + int n_samples, std::ostream &out); + +} // namespace benchmark diff --git a/docs/app/components/TryItBanner.vue b/docs/app/components/TryItBanner.vue index fab72f4..6a749dc 100644 --- a/docs/app/components/TryItBanner.vue +++ b/docs/app/components/TryItBanner.vue @@ -8,7 +8,7 @@ const props = withDefaults( { to: "/live-examples/boolean", title: "Try it in your browser", - description: "Interactive mesh booleans, collisions, isobands and more. No install needed.", + description: "Interactive mesh booleans, registration, isobands, and more. No install needed.", }, ); diff --git a/docs/app/components/charts/GeometryIcpRegistration.vue b/docs/app/components/charts/GeometryIcpRegistration.vue new file mode 100644 index 0000000..1c5161a --- /dev/null +++ b/docs/app/components/charts/GeometryIcpRegistration.vue @@ -0,0 +1,80 @@ + + diff --git a/docs/app/components/charts/GeometryIcpRegistrationSpeedup.vue b/docs/app/components/charts/GeometryIcpRegistrationSpeedup.vue new file mode 100644 index 0000000..4b77d7a --- /dev/null +++ b/docs/app/components/charts/GeometryIcpRegistrationSpeedup.vue @@ -0,0 +1,50 @@ + + diff --git a/docs/app/examples/AlignmentExample.ts b/docs/app/examples/AlignmentExample.ts new file mode 100644 index 0000000..cc372af --- /dev/null +++ b/docs/app/examples/AlignmentExample.ts @@ -0,0 +1,269 @@ +import type { MainModule } from "@/examples/native"; +import { ThreejsBase } from "@/examples/ThreejsBase"; +import { fitCameraToAllMeshesFromZPlane } from "@/utils/sceneUtils"; +import * as THREE from "three"; + +export type InteractionMode = "move" | "rotate"; + +export class AlignmentExample extends ThreejsBase { + private alignmentTime = 0; + private interactionMode: InteractionMode = "move"; + + // Rotation state + private isRotating = false; + private lastMouseX = 0; + private lastMouseY = 0; + + constructor( + wasmInstance: MainModule, + paths: string[], + container: HTMLElement, + isDarkMode = true, + ) { + // skipUpdate = true so we can position meshes before fitting camera + super(wasmInstance, paths, container, undefined, true, false, isDarkMode); + this.updateMeshes(); + this.positionMeshesForScreen(container); + this.setupOrthographicCamera(container); + + // Warmup alignment (run once, then reposition) + this.wasmInstance.alignment_run_align(); + this.positionMeshesForScreen(container); + this.updateMeshes(); + } + + private setupOrthographicCamera(container: HTMLElement): void { + const orthoCamera = new THREE.OrthographicCamera( + -1, 1, 1, -1, 0.1, 1000 + ); + + // Replace camera in scene bundle + (this.sceneBundle1 as any).camera = orthoCamera; + this.sceneBundle1.controls.setCamera(orthoCamera); + + // Now fit the orthographic camera to all meshes + this.fitOrthographicCamera(container); + } + + private fitOrthographicCamera(container: HTMLElement): void { + const rect = container.getBoundingClientRect(); + const aspect = rect.width / rect.height; + const isLandscape = rect.width > rect.height; + const camera = this.sceneBundle1.camera as unknown as THREE.OrthographicCamera; + + // Get positions from both instances to compute bounding box + const positions: THREE.Vector3[] = []; + const diag = this.wasmInstance.alignment_get_aabb_diagonal() ?? 1; + + for (let i = 0; i < 2; i++) { + const inst = this.wasmInstance.get_instance_on_idx(i); + if (!inst) continue; + const matrix = new Float32Array(inst.get_matrix()); + const m = new THREE.Matrix4().fromArray(matrix).transpose(); + const pos = new THREE.Vector3(); + pos.setFromMatrixPosition(m); + positions.push(pos); + } + + // Compute center and extent + const center = new THREE.Vector3(); + if (positions.length >= 2) { + center.addVectors(positions[0]!, positions[1]!).multiplyScalar(0.5); + } + + const separation = positions.length >= 2 ? positions[0]!.distanceTo(positions[1]!) : 0; + // Different zoom for landscape vs portrait + const zoomFactor = isLandscape ? 0.5 : 0.7; + const extent = (separation + diag) * zoomFactor; + + // Set orthographic frustum + camera.left = -extent * aspect; + camera.right = extent * aspect; + camera.top = extent; + camera.bottom = -extent; + camera.updateProjectionMatrix(); + + // Position camera + camera.position.set(center.x, center.y, center.z + diag * 3); + camera.lookAt(center); + + this.sceneBundle1.controls.target.copy(center); + this.sceneBundle1.controls.update(); + } + + private positionMeshesForScreen(container: HTMLElement): void { + const rect = container.getBoundingClientRect(); + const isLandscape = rect.width > rect.height; + const diag = this.wasmInstance.alignment_get_aabb_diagonal() ?? 1; + + // More spacing for the axis we're spreading along + // Landscape: spread in X, Portrait: spread in Z + const spacing = isLandscape ? diag * 1.2 : diag * 1.0; + + // Target stays at origin, source gets offset + // Camera on Z axis looking at XY plane: X = screen horizontal, Y = screen vertical + // Landscape: target left, source right (positive X) + // Portrait: target below, source above (positive Y) + const offset = isLandscape + ? [spacing, 0, 0] + : [0, spacing, 0]; + + // Build translation matrix for source + const m = new THREE.Matrix4().makeTranslation(offset[0], offset[1], offset[2]); + m.transpose(); + + const arr = m.toArray() as [ + number, number, number, number, + number, number, number, number, + number, number, number, number, + number, number, number, number + ]; + this.wasmInstance.alignment_set_source_matrix(arr); + this.updateMeshes(); + } + + public setMode(mode: InteractionMode): void { + this.interactionMode = mode; + } + + public getMode(): InteractionMode { + return this.interactionMode; + } + + // Override pointer handlers to support rotate mode + public override onPointerDown(event: PointerEvent): void { + if (this.interactionMode === "rotate" && event.buttons === 1) { + // First do a mouse move to update selection state in WASM + const rect = this.renderer.domElement.getBoundingClientRect(); + const ndc = new THREE.Vector2( + ((event.clientX - rect.left) / rect.width) * 2 - 1, + -((event.clientY - rect.top) / rect.height) * 2 + 1 + ); + + const raycaster = new THREE.Raycaster(); + raycaster.setFromCamera(ndc, this.sceneBundle1.camera); + const ray = raycaster.ray; + const cameraPos = this.sceneBundle1.camera.position; + const dir = new THREE.Vector3(); + this.sceneBundle1.camera.getWorldDirection(dir); + const focalPoint = cameraPos.clone().add(dir.multiplyScalar(100)); + + this.wasmInstance.OnMouseMove( + [ray.origin.x, ray.origin.y, ray.origin.z], + [ray.direction.x, ray.direction.y, ray.direction.z], + [cameraPos.x, cameraPos.y, cameraPos.z], + [focalPoint.x, focalPoint.y, focalPoint.z] + ); + + // Check if we hit a selectable mesh + const hitMesh = this.wasmInstance.OnLeftButtonDown(); + if (hitMesh) { + // Cancel WASM's drag mode, we handle rotation ourselves + this.wasmInstance.OnLeftButtonUp(); + this.isRotating = true; + this.lastMouseX = event.clientX; + this.lastMouseY = event.clientY; + this.sceneBundle1.controls.enabled = false; + event.stopPropagation(); + } + } else { + super.onPointerDown(event); + } + } + + public override onPointerMove(event: PointerEvent, touchHover = false): boolean { + if (this.interactionMode === "rotate" && this.isRotating) { + const dx = event.clientX - this.lastMouseX; + const dy = event.clientY - this.lastMouseY; + this.lastMouseX = event.clientX; + this.lastMouseY = event.clientY; + + // Convert mouse movement to rotation (similar to C++ example) + const angleX = dy * 0.5; // degrees + const angleY = dx * 0.5; + + // Get current source matrix (source is instance 1) + const sourceInst = this.wasmInstance.get_instance_on_idx(1); + if (!sourceInst) return true; + + const matrix = new Float32Array(sourceInst.get_matrix()); + const m = new THREE.Matrix4().fromArray(matrix).transpose(); + + // Get rotation center (translation part of current matrix) + const center = new THREE.Vector3(); + center.setFromMatrixPosition(m); + + // Create rotation matrices around world X and Y axes + const rotX = new THREE.Matrix4().makeRotationAxis( + new THREE.Vector3(1, 0, 0), + THREE.MathUtils.degToRad(angleX) + ); + const rotY = new THREE.Matrix4().makeRotationAxis( + new THREE.Vector3(0, 1, 0), + THREE.MathUtils.degToRad(angleY) + ); + + // Apply rotations centered at the mesh position + const toOrigin = new THREE.Matrix4().makeTranslation(-center.x, -center.y, -center.z); + const fromOrigin = new THREE.Matrix4().makeTranslation(center.x, center.y, center.z); + + const newMatrix = new THREE.Matrix4() + .multiply(fromOrigin) + .multiply(rotY) + .multiply(rotX) + .multiply(toOrigin) + .multiply(m); + + // Send to WASM + newMatrix.transpose(); + const arr = newMatrix.toArray() as [ + number, number, number, number, + number, number, number, number, + number, number, number, number, + number, number, number, number + ]; + this.wasmInstance.alignment_set_source_matrix(arr); + this.updateMeshes(); + + event.stopPropagation(); + return true; + } else { + return super.onPointerMove(event, touchHover); + } + } + + public override onPointerUp(event: PointerEvent): void { + if (this.isRotating) { + this.isRotating = false; + this.sceneBundle1.controls.enabled = true; + event.stopPropagation(); + } else { + super.onPointerUp(event); + } + } + + public runMain() { + const v = new this.wasmInstance.VectorString(); + for (const path of this.paths) { + v.push_back(path); + } + this.wasmInstance.run_main_alignment(v); + for (const path of this.paths) { + this.wasmInstance.FS.unlink(path); + } + } + + public align(): number { + this.alignmentTime = this.wasmInstance.alignment_run_align(); + this.updateMeshes(); + return this.alignmentTime; + } + + public isAligned(): boolean { + return this.wasmInstance.alignment_is_aligned(); + } + + public getAlignmentTime(): number { + return this.alignmentTime; + } +} diff --git a/docs/app/examples/PositioningExample.ts b/docs/app/examples/PositioningExample.ts index 21030af..87331cb 100644 --- a/docs/app/examples/PositioningExample.ts +++ b/docs/app/examples/PositioningExample.ts @@ -1,17 +1,25 @@ import type { MainModule } from "@/examples/native"; -import { fitCameraToAllMeshesFromZPlane } from "@/utils/sceneUtils"; import { ThreejsBase } from "@/examples/ThreejsBase"; import * as THREE from "three"; export class PositioningExample extends ThreejsBase { private keyPressed = false; + + // Closest points visualization + private closestPointsGroup!: THREE.Group; + private sphere1!: THREE.Mesh; + private sphere2!: THREE.Mesh; + private connector!: THREE.Mesh; + constructor( wasmInstance: MainModule, paths: string[], container: HTMLElement, isDarkMode = true, ) { - super(wasmInstance, paths, container, undefined, false, false, isDarkMode); + // skipUpdate = true so we can set up camera before fitting + super(wasmInstance, paths, container, undefined, true, false, isDarkMode); + this.updateMeshes(); const interceptKeyDownEvent = (event: KeyboardEvent) => { if (this.keyPressed) return; @@ -25,7 +33,193 @@ export class PositioningExample extends ThreejsBase { window.addEventListener("keydown", interceptKeyDownEvent); window.addEventListener("keyup", interceptKeyUpEvent); - fitCameraToAllMeshesFromZPlane(this.sceneBundle1, 1.5); + this.setupClosestPointsVisuals(); + this.positionMeshesForScreen(container); + this.setupOrthographicCamera(container); + + // Update to show initial visualization + this.updateMeshes(); + } + + private setupOrthographicCamera(container: HTMLElement): void { + const orthoCamera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0.1, 1000); + + // Replace camera in scene bundle + (this.sceneBundle1 as unknown as { camera: THREE.Camera }).camera = orthoCamera; + this.sceneBundle1.controls.setCamera(orthoCamera); + + this.fitOrthographicCamera(container); + } + + private positionMeshesForScreen(container: HTMLElement): void { + const rect = container.getBoundingClientRect(); + const isLandscape = rect.width > rect.height; + const aabbDiag = this.wasmInstance.positioning_get_aabb_diagonal?.() ?? 10; + + // Spacing between meshes + const spacing = isLandscape ? aabbDiag * 1.2 : aabbDiag * 1.0; + + // Camera on Z axis looking at XY plane: X = screen horizontal, Y = screen vertical + // Landscape: side by side (X axis) + // Portrait: stacked (Y axis) + const offsets: [number, number, number][] = isLandscape + ? [[-spacing / 2, 0, 0], [spacing / 2, 0, 0]] + : [[0, -spacing / 2, 0], [0, spacing / 2, 0]]; + + for (let i = 0; i < 2; i++) { + const [ox, oy, oz] = offsets[i]!; + const m = new THREE.Matrix4().makeTranslation(ox, oy, oz); + m.transpose(); + + const arr = m.toArray() as [ + number, number, number, number, + number, number, number, number, + number, number, number, number, + number, number, number, number + ]; + this.wasmInstance.positioning_set_instance_matrix?.(i, arr); + } + this.updateMeshes(); + } + + private fitOrthographicCamera(container: HTMLElement): void { + const rect = container.getBoundingClientRect(); + const aspect = rect.width / rect.height; + const isLandscape = rect.width > rect.height; + const camera = this.sceneBundle1.camera as unknown as THREE.OrthographicCamera; + + // Get bounding box of all instances + const positions: THREE.Vector3[] = []; + const aabbDiag = this.wasmInstance.positioning_get_aabb_diagonal?.() ?? 10; + + for (let i = 0; i < this.wasmInstance.get_number_of_instances(); i++) { + const inst = this.wasmInstance.get_instance_on_idx(i); + if (!inst) continue; + const matrix = new Float32Array(inst.get_matrix()); + const m = new THREE.Matrix4().fromArray(matrix).transpose(); + const pos = new THREE.Vector3(); + pos.setFromMatrixPosition(m); + positions.push(pos); + } + + // Compute center and extent + const center = new THREE.Vector3(); + if (positions.length >= 2) { + center.addVectors(positions[0]!, positions[1]!).multiplyScalar(0.5); + } + + const separation = positions.length >= 2 ? positions[0]!.distanceTo(positions[1]!) : 0; + // Different zoom for landscape vs portrait + const zoomFactor = isLandscape ? 0.5 : 0.7; + const extent = (separation + aabbDiag) * zoomFactor; + + // Set orthographic frustum + camera.left = -extent * aspect; + camera.right = extent * aspect; + camera.top = extent; + camera.bottom = -extent; + camera.updateProjectionMatrix(); + + // Position camera + camera.position.set(center.x, center.y, center.z + aabbDiag * 3); + camera.lookAt(center); + + this.sceneBundle1.controls.target.copy(center); + this.sceneBundle1.controls.update(); + } + + private setupClosestPointsVisuals(): void { + this.closestPointsGroup = new THREE.Group(); + this.closestPointsGroup.name = "closest_points"; + + // Sphere geometry and materials + const sphereGeom = new THREE.SphereGeometry(1, 16, 12); + const sphereMat = new THREE.MeshStandardMaterial({ + color: 0x00d5be, // Bright teal (like cross-section boundary) + roughness: 0.3, + metalness: 0.1, + }); + + // Cylinder geometry and material + const cylGeom = new THREE.CylinderGeometry(1, 1, 1, 8); + const cylMat = new THREE.MeshStandardMaterial({ + color: 0x00a89a, // Darker teal (like cross-section fill) + roughness: 0.3, + metalness: 0.1, + }); + + this.sphere1 = new THREE.Mesh(sphereGeom, sphereMat); + this.sphere2 = new THREE.Mesh(sphereGeom.clone(), sphereMat.clone()); + this.connector = new THREE.Mesh(cylGeom, cylMat); + + this.closestPointsGroup.add(this.sphere1); + this.closestPointsGroup.add(this.sphere2); + this.closestPointsGroup.add(this.connector); + + this.closestPointsGroup.visible = false; + this.sceneBundle1.scene.add(this.closestPointsGroup); + } + + private updateClosestPointsVisuals(): void { + if (!this.closestPointsGroup) return; + + const hasPoints = this.wasmInstance.positioning_has_closest_points?.(); + if (!hasPoints) { + this.closestPointsGroup.visible = false; + return; + } + + const pts = this.wasmInstance.positioning_get_closest_points?.() as number[] | undefined; + if (!pts || pts.length < 6) { + this.closestPointsGroup.visible = false; + return; + } + + const p0 = new THREE.Vector3(pts[0], pts[1], pts[2]); + const p1 = new THREE.Vector3(pts[3], pts[4], pts[5]); + const dist = p0.distanceTo(p1); + + // Fixed size based on AABB diagonal (1.5% for spheres, 0.75% for cylinder) + const aabbDiag = this.wasmInstance.positioning_get_aabb_diagonal?.() ?? 10; + const sphereRadius = aabbDiag * 0.015; + const cylRadius = aabbDiag * 0.0075; + + // Position and scale spheres + this.sphere1.position.copy(p0); + this.sphere1.scale.setScalar(sphereRadius); + + this.sphere2.position.copy(p1); + this.sphere2.scale.setScalar(sphereRadius); + + // Position and orient cylinder + if (dist > 0.001) { + const mid = new THREE.Vector3().addVectors(p0, p1).multiplyScalar(0.5); + this.connector.position.copy(mid); + + // Orient cylinder to point from p0 to p1 + const direction = new THREE.Vector3().subVectors(p1, p0).normalize(); + const quaternion = new THREE.Quaternion(); + quaternion.setFromUnitVectors(new THREE.Vector3(0, 1, 0), direction); + this.connector.quaternion.copy(quaternion); + + // Scale: radius for X/Z, length for Y + this.connector.scale.set(cylRadius, dist, cylRadius); + this.connector.visible = true; + } else { + this.connector.visible = false; + } + + this.closestPointsGroup.visible = true; + } + + public override updateMeshes(): void { + super.updateMeshes(); + this.updateClosestPointsVisuals(); + this.refreshTimeValue?.(); + } + + public override getAverageTime(): number { + return this.wasmInstance.get_average_time(); } public randomize() { diff --git a/docs/app/examples/ThreejsBase.ts b/docs/app/examples/ThreejsBase.ts index 22996a8..ced7475 100644 --- a/docs/app/examples/ThreejsBase.ts +++ b/docs/app/examples/ThreejsBase.ts @@ -401,7 +401,7 @@ export abstract class ThreejsBase implements IThreejsBase { indexCounters.set(meshDataId, 0); } - // Build instanceIndices map + // Build instanceIndices map and handle per-mesh opacity for (let i = 0; i < numInstances; i++) { const inst = this.wasmInstance.get_instance_on_idx(i); if (!inst) continue; @@ -409,6 +409,15 @@ export abstract class ThreejsBase implements IThreejsBase { const indexInBatch = indexCounters.get(meshDataId) ?? 0; this.instanceIndices.set(i, { meshDataId, indexInBatch }); indexCounters.set(meshDataId, indexInBatch + 1); + + // Handle opacity - if single instance per mesh, set material opacity + const instancedMesh = this.instancedMeshes.get(meshDataId); + if (instancedMesh && countPerMeshData.get(meshDataId) === 1 && inst.opacity < 1.0) { + const material = instancedMesh.material as THREE.MeshMatcapMaterial; + material.transparent = true; + material.opacity = inst.opacity; + material.needsUpdate = true; + } } } diff --git a/docs/app/pages/live-examples/alignment.vue b/docs/app/pages/live-examples/alignment.vue new file mode 100644 index 0000000..7fd2206 --- /dev/null +++ b/docs/app/pages/live-examples/alignment.vue @@ -0,0 +1,173 @@ + + + diff --git a/docs/app/pages/live-examples/closest-points.vue b/docs/app/pages/live-examples/closest-points.vue index 37c54bd..101468d 100644 --- a/docs/app/pages/live-examples/closest-points.vue +++ b/docs/app/pages/live-examples/closest-points.vue @@ -31,7 +31,17 @@ const meshCount = 2; const meshes = computed(() => buildMeshes(meshCount)); const polygonLabel = computed(() => formatPolygonLabel(meshCount)); +const avgTime = ref("0"); +const updateAvgTime = () => { + if (exampleClass) { + avgTime.value = exampleClass.getAverageTime().toFixed(2); + } +}; + const badge = computed(() => ({ + icon: "i-lucide-gauge", + label: "Query:", + value: `${avgTime.value} ms`, polygons: polygonLabel.value, })); @@ -63,12 +73,16 @@ const loadThreejs = async () => { return null; } - return new PositioningExample( + const instance = new PositioningExample( wasmInstance, meshFilenames, el, isDark.value, ); + instance.refreshTimeValue = updateAvgTime; + // Update time after initial load + nextTick(() => updateAvgTime()); + return instance; }, }); }; @@ -101,7 +115,7 @@ watch(isDark, (dark) => {