From 16aaf4cda5fd97da12a7f1da8b0a5168fac2e289 Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 11 Oct 2016 13:57:03 +0000 Subject: Problem of merge with tangentialcomplex branch. Redo in an integration branch git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/branches/tangential_integration@1701 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: fa029e8e90b3e203ea675f02098ec6fe95596f9f --- src/Tangential_complex/benchmark/CMakeLists.txt | 40 + src/Tangential_complex/benchmark/RIB_exporter.h | 269 +++ src/Tangential_complex/benchmark/XML_exporter.h | 207 ++ .../benchmark/benchmark_script.txt | 221 ++ src/Tangential_complex/benchmark/benchmark_tc.cpp | 785 +++++++ .../doc/Intro_tangential_complex.h | 119 + src/Tangential_complex/doc/tc_example_01.png | Bin 0 -> 20323 bytes src/Tangential_complex/doc/tc_example_02.png | Bin 0 -> 36017 bytes src/Tangential_complex/doc/tc_example_03.png | Bin 0 -> 62990 bytes src/Tangential_complex/doc/tc_example_05.png | Bin 0 -> 36032 bytes src/Tangential_complex/doc/tc_example_06.png | Bin 0 -> 37195 bytes src/Tangential_complex/doc/tc_example_07.png | Bin 0 -> 49399 bytes src/Tangential_complex/doc/tc_example_07_after.png | Bin 0 -> 50132 bytes .../doc/tc_example_07_before.png | Bin 0 -> 48898 bytes src/Tangential_complex/doc/tc_example_08.png | Bin 0 -> 63636 bytes src/Tangential_complex/doc/tc_example_09.png | Bin 0 -> 35453 bytes src/Tangential_complex/doc/tc_examples.png | Bin 0 -> 150776 bytes src/Tangential_complex/example/CMakeLists.txt | 30 + src/Tangential_complex/example/example_basic.cpp | 46 + .../example/example_with_perturb.cpp | 53 + .../include/gudhi/Tangential_complex.h | 2277 ++++++++++++++++++++ .../gudhi/Tangential_complex/Simplicial_complex.h | 539 +++++ .../include/gudhi/Tangential_complex/config.h | 44 + .../include/gudhi/Tangential_complex/utilities.h | 195 ++ src/Tangential_complex/test/CMakeLists.txt | 31 + .../test/test_tangential_complex.cpp | 70 + 26 files changed, 4926 insertions(+) create mode 100644 src/Tangential_complex/benchmark/CMakeLists.txt create mode 100644 src/Tangential_complex/benchmark/RIB_exporter.h create mode 100644 src/Tangential_complex/benchmark/XML_exporter.h create mode 100644 src/Tangential_complex/benchmark/benchmark_script.txt create mode 100644 src/Tangential_complex/benchmark/benchmark_tc.cpp create mode 100644 src/Tangential_complex/doc/Intro_tangential_complex.h create mode 100644 src/Tangential_complex/doc/tc_example_01.png create mode 100644 src/Tangential_complex/doc/tc_example_02.png create mode 100644 src/Tangential_complex/doc/tc_example_03.png create mode 100644 src/Tangential_complex/doc/tc_example_05.png create mode 100644 src/Tangential_complex/doc/tc_example_06.png create mode 100644 src/Tangential_complex/doc/tc_example_07.png create mode 100644 src/Tangential_complex/doc/tc_example_07_after.png create mode 100644 src/Tangential_complex/doc/tc_example_07_before.png create mode 100644 src/Tangential_complex/doc/tc_example_08.png create mode 100644 src/Tangential_complex/doc/tc_example_09.png create mode 100644 src/Tangential_complex/doc/tc_examples.png create mode 100644 src/Tangential_complex/example/CMakeLists.txt create mode 100644 src/Tangential_complex/example/example_basic.cpp create mode 100644 src/Tangential_complex/example/example_with_perturb.cpp create mode 100644 src/Tangential_complex/include/gudhi/Tangential_complex.h create mode 100644 src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h create mode 100644 src/Tangential_complex/include/gudhi/Tangential_complex/config.h create mode 100644 src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h create mode 100644 src/Tangential_complex/test/CMakeLists.txt create mode 100644 src/Tangential_complex/test/test_tangential_complex.cpp (limited to 'src/Tangential_complex') diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt new file mode 100644 index 00000000..12488201 --- /dev/null +++ b/src/Tangential_complex/benchmark/CMakeLists.txt @@ -0,0 +1,40 @@ +cmake_minimum_required(VERSION 2.6) +project(Tangential_complex_benchmark) + +if (GCOVR_PATH) + # for gcovr to make coverage reports - Corbera Jenkins plugin + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage") +endif() +if (GPROF_PATH) + # for gprof to make coverage reports - Jenkins + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") +endif() + +# need CGAL 4.8 +if(CGAL_FOUND) + if (NOT CGAL_VERSION VERSION_LESS 4.8.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + + add_executable(Tangential_complex_benchmark benchmark_tc.cpp) + target_link_libraries(Tangential_complex_benchmark + ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(Tangential_complex_benchmark ${TBB_LIBRARIES}) + endif() + + # Do not forget to copy test files in current binary dir + #file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) + + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for Tangential complex feature.") + endif() + else() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Tangential complex feature. Version 4.8.0 is required.") + endif () +endif() + diff --git a/src/Tangential_complex/benchmark/RIB_exporter.h b/src/Tangential_complex/benchmark/RIB_exporter.h new file mode 100644 index 00000000..73c14041 --- /dev/null +++ b/src/Tangential_complex/benchmark/RIB_exporter.h @@ -0,0 +1,269 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef GUDHI_TC_RIB_EXPORTER_H +#define GUDHI_TC_RIB_EXPORTER_H + +#include + +#include +#include + +template +class RIB_exporter { + typedef typename PointRandomAccessRange::value_type Point; + typedef typename SimplexRange::value_type Simplex; + public: + + typedef std::tuple Color; // RGBA + typedef std::tuple Coords_choice; + + // Constructor + RIB_exporter( + PointRandomAccessRange const& points, + SimplexRange const& simplices, + std::ofstream &out, + std::string const& rendered_image_filename = "export.tif", + bool is_preview = false, // low-quality + Coords_choice coords_choice = std::make_tuple(0, 1, 2), + int image_width = 1920, + int image_height = 1080, + Color const& triangle_color = std::make_tuple(1., 1., 1., 1.), + bool ambient_light = true, + double ambient_intensity = 0.3, + bool shadow = true, + double shadow_intensity = 0.85, + double point_sphere_radius = 0.003) + : m_points(points), + m_simplices(simplices), + m_out(out), + m_rendered_image_filename(rendered_image_filename), + m_is_preview(is_preview), + m_coords_choice(coords_choice), + m_image_width(image_width), + m_image_height(image_height), + m_current_color(0., 0., 0., 0.), + m_current_alpha(1), + m_triangle_color(triangle_color), + m_ambient_light(ambient_light), + m_ambient_intensity(ambient_intensity), + m_shadow(shadow), + m_shadow_intensity(shadow_intensity), + m_point_sphere_radius(point_sphere_radius) { + m_out.precision(8); + } + + void write_file() { + write_header(); + write_lights(); + /*if (m_point_sphere_radius != 0.) + write_point_spheres();*/ + write_triangles(); + + m_out << "WorldEnd\n"; + } + + private: + + void write_header() { + m_out << "Option \"searchpath\" \"shader\" " + "\".:./shaders:%PIXIE_SHADERS%:%PIXIEHOME%/shaders\"\n"; + + if (m_is_preview) { + m_out << "Attribute \"visibility\" \"specular\" 1\n" + << "Attribute \"visibility\" \"transmission\" 1\n\n"; + } + + m_out << "Display \"" << m_rendered_image_filename << "\" \"file\" \"rgb\"\n"; + + if (!m_is_preview) { + m_out << "Format " << m_image_width << " " << m_image_height << " 1\n"; + } else { + double ratio = double(m_image_height) / double(m_image_width); + + int width = (ratio < 1.) ? 300 : int(300. / ratio); + int height = (ratio < 1.) ? int(ratio * 300.) : 300; + + m_out << "Format " << width << " " << height << " 1\n"; + } + + + if (m_image_width > m_image_height) { + double ratio = double(m_image_height) / double(m_image_width); + m_out << "ScreenWindow -1 1 " << -ratio << " " << ratio << "\n"; + } else if (m_image_height > m_image_width) { + double ratio = double(m_image_width) / double(m_image_height); + m_out << "ScreenWindow " << -ratio << " " << ratio << " -1 1\n"; + } + + m_out << "Projection \"perspective\" \"fov\" 45\n" + << "Translate 0 0 3\n" + << "PixelSamples 4 4\n" + << "PixelFilter \"catmull-rom\" 3 3\n" + << "ShadingInterpolation \"smooth\"\n" + << "Rotate -10 20 0 1\n" + << "WorldBegin\n"; + } + + void write_lights() { + if (!m_is_preview) { + // ShadowLight + m_out << "LightSource \"shadowdistant\" 1 \"from\" [0 0 0] \"to\" [0 0 1]" + << " \"shadowname\" \"raytrace\" \"intensity\" " + << m_shadow_intensity << "\n"; + + // Ambient light + m_out << "LightSource \"ambientlight\" 2 \"intensity\" " + << m_ambient_intensity << "\n"; + } else { + m_out << "LightSource \"distantLight\" 1 \"from\" [0 0 0] \"to\" [0 0 1]" + << " \"intensity\" " << m_shadow_intensity << "\n"; + + // Ambient light + m_out << "LightSource \"ambientlight\" 2 \"intensity\" " + << m_ambient_intensity << "\n"; + } + + // Background light + m_out << "LightSource \"ambientlight\" 99 \"intensity\" 1\n"; + + // Turn background light OFF + turn_background_light(false); + } + + void turn_background_light(bool turn_on) { + if (!turn_on) { + m_out << "Illuminate 1 1" << std::endl; + if (!m_is_preview) + m_out << "Illuminate 2 1" << std::endl; + m_out << "Illuminate 99 0" << std::endl; + } else { + m_out << "Illuminate 1 0" << std::endl; + if (!m_is_preview) + m_out << "Illuminate 2 0" << std::endl; + m_out << "Illuminate 99 1" << std::endl; + } + } + + void write_color(Color const& color, bool use_transparency) { + if (m_current_color == color) + return; + + m_current_color = color; + + // Write opacity data + if (use_transparency) + write_opacity(std::get<3>(color)); + + // Write color data + m_out << "Color [ " << std::get<0>(color) << " " << std::get<1>(color) + << " " << std::get<2>(color) << " ]\n"; + } + + void write_opacity(const double alpha) { + if (m_current_alpha == alpha) + return; + + m_current_alpha = alpha; + + // Write opacity data + m_out << "Opacity " << alpha << " " << alpha << " " << alpha << std::endl; + } + + void write_point(Point const& p) { + m_out << " " << p[std::get<0>(m_coords_choice)] + << " " << p[std::get<1>(m_coords_choice)] + << " " << p[std::get<2>(m_coords_choice)] << " "; + } + + void write_triangles() { + m_out << "Surface \"plastic\" \"Ka\" 0.65 \"Kd\" 0.85 \"Ks\" 0.25 \"roughness\" 0.1" << std::endl; + + for (auto const& simplex : m_simplices) { + std::vector triangles; + // Get the triangles composing the simplex + combinations(simplex, 3, std::back_inserter(triangles)); + for (auto const& t : triangles) + write_triangle(t); + } + } + + template + void write_triangle(PointIndexRange const& t) { + // Color + write_color(m_triangle_color, true); + + // Triangle + m_out << "Polygon \"P\" ["; + for (auto idx : t) + write_point(m_points[idx]); + m_out << "]" << std::endl; + + // Edges (will be drawn later on) + /*add_edge(p, q, edge_color); + add_edge(p, r, edge_color); + add_edge(q, r, edge_color); + + // Vertices (will be drawn later on) + add_vertex(p, edge_color); + add_vertex(q, edge_color); + add_vertex(r, edge_color);*/ + } + + void write_point_sphere(Point const& p) { + if (m_point_sphere_radius == 0.) + return; + + m_out << "Translate " << p[0] << " " << p[1] << " " << p[2] << std::endl; + // Sphere radius zmin zmax thetamax + m_out << "Sphere " << m_point_sphere_radius << " " << -m_point_sphere_radius + << " " << m_point_sphere_radius << " 360" << std::endl; + m_out << "Identity" << std::endl; + } + + void write_point_spheres() { + write_color(std::make_tuple(0.7, 0.7, 0.7, 0.5), true); + for (auto const& p : m_points) + write_point_sphere(p); + } + + //=========================================================================== + + PointRandomAccessRange const& m_points; + SimplexRange const& m_simplices; + std::ofstream &m_out; + std::string m_rendered_image_filename; + bool m_is_preview; + Coords_choice m_coords_choice; + int m_image_width; + int m_image_height; + Color m_current_color; + Color m_triangle_color; + double m_current_alpha; + bool m_ambient_light; + double m_ambient_intensity; + bool m_shadow; + double m_shadow_intensity; + double m_point_sphere_radius; +}; + +#endif // GUDHI_TC_RIB_EXPORTER_H diff --git a/src/Tangential_complex/benchmark/XML_exporter.h b/src/Tangential_complex/benchmark/XML_exporter.h new file mode 100644 index 00000000..ed44f90a --- /dev/null +++ b/src/Tangential_complex/benchmark/XML_exporter.h @@ -0,0 +1,207 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#include +#include +#include +#include +#include + +template +class Simple_XML_exporter { + public: + typedef value_type Value_type; + typedef std::vector Element; + typedef std::map Element_with_map; + typedef std::vector List_of_elements; + + Simple_XML_exporter( + const std::string &list_name, + const std::string &element_name, + const std::vector &subelement_names, + bool add_timestamp = true) + : m_list_name(list_name), + m_element_name(element_name), + m_subelement_names(subelement_names), + m_add_timestamp(add_timestamp) { } + + bool add_element(const Element &element) { + if (element.size() == m_subelement_names.size()) { + m_list_of_elements.push_back(element); + return true; + } else { + std::cerr << "ERROR: element.size() == m_subelement_names.size()" << std::endl; + return false; + } + } + + bool add_element(Element_with_map &element) { + Element elt; + + std::vector::const_iterator + it_subelement_name = m_subelement_names.begin(); + std::vector::const_iterator + it_subelement_name_end = m_subelement_names.end(); + for (; it_subelement_name != it_subelement_name_end; ++it_subelement_name) { + elt.push_back(element[*it_subelement_name]); + } + + return add_element(elt); + } + + bool export_to_xml(const std::string &filename) const { + std::ofstream xmlfile; + xmlfile.open(filename.c_str()); + xmlfile << "" << std::endl; + xmlfile << "<" << m_list_name << ">" << std::endl; + + typename List_of_elements::const_iterator it_element = m_list_of_elements.begin(); + typename List_of_elements::const_iterator it_element_end = m_list_of_elements.end(); + for (int id = 1; it_element != it_element_end; ++it_element, ++id) { + xmlfile << " <" << m_element_name << ">" << std::endl; + std::vector::const_iterator + it_subelement_name = m_subelement_names.begin(); + std::vector::const_iterator + it_subelement_name_end = m_subelement_names.end(); + + if (m_add_timestamp) + xmlfile << " " << time(NULL) << " " << std::endl; + + for (int i = 0; + it_subelement_name != it_subelement_name_end; + ++it_subelement_name, ++i) { + xmlfile + << " <" << *it_subelement_name << "> " + << (*it_element)[i] + << " " << std::endl; + } + xmlfile << " " << std::endl; + } + + xmlfile << "" << std::endl; + xmlfile.close(); + return 0; + + } + + protected: + std::string m_list_name; + std::string m_element_name; + std::vector m_subelement_names; + List_of_elements m_list_of_elements; + bool m_add_timestamp; +}; + +template +class Streaming_XML_exporter { + public: + typedef value_type Value_type; + typedef std::vector Element; + typedef std::map Element_with_map; + typedef std::vector List_of_elements; + + Streaming_XML_exporter( + const std::string &filename, + const std::string &list_name, + const std::string &element_name, + const std::vector &subelement_names, + bool add_timestamp = true) + : m_list_name(list_name), + m_element_name(element_name), + m_subelement_names(subelement_names), + m_add_timestamp(add_timestamp) { + m_xml_fstream.open(filename.c_str()); + if (m_xml_fstream.good()) { + m_xml_fstream << "" << std::endl; + m_xml_fstream << "<" << m_list_name << ">" << std::endl; + } else { + std::cerr << "Could not open file '" << filename << "'." << std::endl; + } + } + + virtual ~Streaming_XML_exporter() { + close_file(); + } + + void close_file() { + m_xml_fstream.close(); + } + + bool add_element(const Element &element) { + if (element.size() == m_subelement_names.size()) { + m_xml_fstream << " <" << m_element_name << ">" << std::endl; + std::vector::const_iterator + it_subelement_name = m_subelement_names.begin(); + std::vector::const_iterator + it_subelement_name_end = m_subelement_names.end(); + + if (m_add_timestamp) { + m_xml_fstream << " " << time(NULL) << " " << std::endl; + } + + for (int i = 0; + it_subelement_name != it_subelement_name_end; + ++it_subelement_name, ++i) { + m_xml_fstream + << " <" << *it_subelement_name << "> " + << element[i] + << " " << std::endl; + } + m_xml_fstream << " " << std::endl; + + // Save current pointer position + std::ofstream::streampos pos = m_xml_fstream.tellp(); + // Close the XML file (temporarily) so that the XML file is always correct + m_xml_fstream << "" << std::endl; + // Restore the pointer position so that the next "add_element" will overwrite + // the end of the file + m_xml_fstream.seekp(pos); + + m_xml_fstream.flush(); + return true; + } else { + std::cerr << "ERROR: element.size() == m_subelement_names.size()" << std::endl; + return false; + } + } + + bool add_element(Element_with_map &element) { + Element elt; + + std::vector::const_iterator + it_subelement_name = m_subelement_names.begin(); + std::vector::const_iterator + it_subelement_name_end = m_subelement_names.end(); + for (; it_subelement_name != it_subelement_name_end; ++it_subelement_name) { + elt.push_back(element[*it_subelement_name]); + } + + return add_element(elt); + } + + protected: + std::ofstream m_xml_fstream; + std::string m_list_name; + std::string m_element_name; + std::vector m_subelement_names; + bool m_add_timestamp; +}; diff --git a/src/Tangential_complex/benchmark/benchmark_script.txt b/src/Tangential_complex/benchmark/benchmark_script.txt new file mode 100644 index 00000000..f4ddaac3 --- /dev/null +++ b/src/Tangential_complex/benchmark/benchmark_script.txt @@ -0,0 +1,221 @@ +#--------------------------------------------------------------------------------------------------------------------------------------------------------- +# Input PARAM1 PARAM2 PARAM3 NUM_P AMB INTR SPARSITY MAX_PERTURB PERTURB ADD_HDIM COLLAPSE FIX_TIME_LIMIT NUM_ITERATIONS +#--------------------------------------------------------------------------------------------------------------------------------------------------------- + +#---------------------------------------------------------------- Alpha TC tests ------------------------------------------------------------------------ +#generate_sphere_d 1 0 - 8 2 1 0.01 0.005 N Y N 3 1 #No noise => OK: 6 2d with a perturb sometimes +#generate_sphere_d 1 0 - 50 2 1 0.01 0.005 N Y N 3 1 #No noise => OK: 49 1d +#generate_sphere_d 1 1 - 50 2 1 0.01 0.005 N Y N 3 1 #Noise => OK: 45 2d + 3 3d +#generate_torus_d N - - 15 2 1 0.01 0.05 N Y N 10 1 +#generate_sphere_d 0.302 0 - 8 3 2 0.01 0.005 N Y N 60 1 #No noise => OK: 7 3d with a perturb sometimes +#generate_sphere_d 0.302 0 - 50 3 2 0.01 0.005 N Y N 60 1 #No noise => no inconsitencies +#generate_sphere_d 0.302 3 - 50 3 2 0.01 0.005 N Y N 60 1 #Noise => OK: 90 2d + 3 3d +#generate_sphere_d 1 1 - 500 4 3 0.01 0.005 N Y N 60 1 #Noise 1% => OK: 3113 3d + 35 4d +#generate_sphere_d 1 2 - 500 4 3 0.01 0.005 N Y N 60 1 #Noise 2% => OK: 2969 3d + 91 4d +#generate_sphere_d 1 2 - 5000 4 3 0.01 0.005 N Y N 60 1 #Noise 2% => OK: 27905 3d + 2485 4d +#generate_sphere_d 0.302 2 - 300 2 1 0.01 0.005 N Y N 60 1 +#generate_torus_3D 2 1 N 200 3 2 0.01 0.05 N Y N 600 1 #OK: 1048 3d ~170s +#generate_torus_3D 2 1 N 2000 3 2 0.01 0.05 N Y N 600 1 #OK: 3545 2d + 27 3d ~35s +#generate_torus_d N 1 - 50 4 2 0.01 0.05 N Y N 3 1 #OK: 431 4d +#generate_torus_d N 1 - 500 4 2 0.01 0.05 N Y N 3 1 #OK: 881 2d + 37 3d +#generate_torus_d Y 1 - 250 4 2 0.01 0.05 N Y N 3 1 #OK: 80 d2 + 185 d3 +#generate_torus_d N - - 50 6 3 0.01 0.05 Y Y N 10 1 # +#generate_torus_d Y - - 700 6 3 0.01 0.05 Y Y N 100 1 #Grid +#generate_torus_d N - - 10000 6 3 0.01 0.05 Y Y N 30000 1 +#generate_moment_curve 0 1 - 10 3 1 0.01 0.005 N Y N 60 1 +#generate_two_spheres_d 3 4 - 500 3 2 0.01 0.05 N Y N 10 1 #OK: 320 2d + 1167 3d +#generate_klein_bottle_4D 40 15 - 500 4 2 0.01 0.2 N Y N 60 1 #OK: 901 d2 + 50 d3 + 1 d4 +#data/SO3_10000.xyz - - - 0 9 3 0.01 0.05 Y Y N 300 1 #Too long. Be careful with the memory consumption! +#data/buddha_100kv.xyz - - - 0 3 2 0.01 0.005 Y Y N 120 1 #Too long... +#data/fandisk.xyz - - - 0 3 2 0.01 0.005 Y Y N 5 1 #NOT OK: Tq & V do not intersect + +#---------------------------------------------------------- Spatial search benchmarking -------------------------------------------------------------- +#generate_torus_3D 2 1 Y 10000 3 2 0 0 Y N N 600 1 +#data/buddha_100kv.xyz - - - 0 3 2 0 0 N Y N 120 1 +#generate_torus_d N - - 10000 30 15 0 0 Y N N 3600 1 +#generate_torus_d N - - 100000 12 6 0 0 Y N N 3600 1 +#data/SO3_50000.xyz - - - 0 9 3 0 0 Y N N 60 1 +#data/Cy8.xyz - - - 0 24 2 0 0 N Y N 60 1 +#generate_sphere_d 0.5 - - 10000 2 1 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 3 2 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 4 3 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 5 4 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 6 5 0 0 N N Y 60 1 +#generate_sphere_d 0.5 - - 10000 7 6 0 0 N N Y 60 1 + +#---------------------------------------------------------- Very small cases for Debug mode -------------------------------------------------------------- +#generate_sphere_d 4 - - 20 3 2 0.05 0.025 Y N N 60 1 +generate_sphere_d 3 10 - 70 3 2 0.05 0.025 Y N N 60 1 +#generate_sphere_d 3 - - 1000 3 2 0.05 0.025 Y N N 60 1 +#generate_sphere_d 3 - - 10 4 3 0.05 0.025 Y N N 60 1 +#generate_sphere_d 3 - - 70 5 4 0.05 0.025 Y N N 60 1 +#generate_klein_bottle_4D 4 3 - 70 4 2 0.05 0.025 Y N N 3 1 +#generate_klein_bottle_variant_5D 4 3 - 70 5 2 0.05 0.025 Y N N 3 1 +#data/SO3_10000.xyz - - - 0 9 3 0.7 0.35 Y N N 60 1 +#generate_moment_curve 0 1 - 30 3 1 0.005 0.0025 Y N N 60 1 + +#------------------------------------------------------------------ From files -------------------------------------------------------------------------- +#data/SO3_50000.xyz - - - 0 9 3 0.05 0.05 Y N N 6000 1 +#data/SO3_10000.xyz - - - 0 9 3 0.1 0.1 Y N N 60000 1 +#data/cube3D_eps_0.1.xyz - - - 0 3 2 0.05 0.05 Y N N 3000 1 +#data/cube4D_eps_0.1.xyz - - - 0 4 3 0.05 0.05 N Y N 3000 1 +#data/cube5D_eps_0.1.xyz - - - 0 5 4 0.05 0.05 N Y N 3000 1 +#data/Cy8.xyz - - - 0 24 2 0.1 0.1 N Y N 60 1 +#data/Kl.xyz - - - 0 5 2 0.05 0.05 N Y N 60 1 +#data/S3.xyz - - - 0 4 3 0.05 0.05 N Y N 60 1 + +#data/Alvarez_variete_k2_D4_29700p.xyz - - - 0 4 2 0.01 0.01 Y N N 60 1 # points on a "grid" +#data/Alvarez_variete_k2_D4_10k_1x1_v2.xyz - - - 0 4 2 0.001 0.001 Y N N 200 1 +#data/Alvarez_variete_k2_D4_30k_1x1_v2.xyz - - - 0 4 2 0.001 0.001 Y N N 6000 1 +#data/Alvarez_variete_k2_D4_120k_2x2_denser_in_1x1.xyz - - - 0 4 2 0.002 0.002 Y N N 60000 1 +#data/Alvarez_variete_k2_D4_300k_2x2.xyz - - - 0 4 2 0.005 0.005 Y N N 100000 1 +#data/Alvarez_variete_k2_D4_300k_2x2.xyz - - - 0 4 2 0.05 0.05 Y N N 50000 1 # heavy sparsification (e.g. 0.05 => 33k points) +#data/Alvarez_variete_k2_D4_90k_2x2.xyz - - - 0 4 2 0.003 0.003 Y N N 6000 1 +#data/Alvarez_variete_k2_D4_30k_10x10.xyz - - - 0 4 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_variete_k2_D4_60k_10x10.xyz - - - 0 4 2 0.01 0.01 Y N N 1800 1 + +#data/Alvarez_variete_k2_D8_9003p.xyz - - - 0 8 2 0.001 0.001 Y N N 60 1 +#data/Alvarez_variete_k2_D8_90K.xyz - - - 0 8 2 0.001 0.001 Y N N 60 1 +#data/Alvarez_variete_k2_D8_300k_10x10.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 # heavy sparsification +#data/Alvarez_variete_k2_D8_900k_2x2.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 # heavy sparsification +#data/Alvarez_variete_k2_D8_900k_10x10.xyz - - - 0 8 2 0.02 0.02 Y N N 60 1 # heavy sparsification + +#data/Alvarez_courbeElliptique_k2_D8_200K_2x2.xyz - - - 0 8 2 0.006 0.006 Y N N 60 1 + +#data/Alvarez_surface_deg2_k2_D8_6000K_10x10.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.003 0.003 Y N N 3600 1 +#data/Alvarez_surface_deg4_k2_D8_382K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_surface_deg5_k2_D8_112K.xyz - - - 0 8 2 0.001 0.001 Y N N 240 1 +#data/Alvarez_surface_deg6_k2_D8_67K.xyz - - - 0 8 2 0.015 0.015 Y N N 60 1 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.025 0.025 Y N N 60 1 +#data/Alvarez_surface_deg9_k2_D8_42K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 +#data/Alvarez_surface_deg10_k2_D8_41K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 + +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 600 1 +#data/sparsified/Alvarez_deg8_k2_D8_32K_sparsified_from_41K_0.01.xyz - - - 0 8 2 0.05 0.05 Y N N 600 1 + +# PAS VRAIMENT DE DIFFERENCE +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.003 0.007 Y N N 3600 1 +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.014 0.007 Y N N 3600 1 + +# PAS VRAIMENT DE DIFFERENCE +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.01 0.005 Y N N 120 1 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.005 Y N N 120 1 + +# PAS VRAIMENT DE DIFFERENCE +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.001 0.01 Y N N 3600 1 +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.02 0.01 Y N N 3600 1 +#data/sparsified/Alvarez_deg3_k2_D8_534k_sparsified_from_902K_0.001.xyz - - - 0 8 2 0.01 0.01 Y N N 3600 1 + +# PAS TRES CLAIR, MAIS DIFFERENCE EN NOMBRE D'ETAPES (>100 vs 15-20) : +#data/sparsified/Alvarez_deg8_k2_D8_38K_sparsified_from_41K_0.005.xyz - - - 0 8 2 0.02 0.02 Y N N 600 1 +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.001 0.02 Y N N 60 1 +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.025 0.02 Y N N 60 1 + +# With pre-computed tangent spaces +#data/test.pwt - - - 0 4 2 0.01 0.01 N N N 500000 1 +#data/Alvarez_variete_k2_D4_30000p.xyz - - - 0 4 2 0.01 0.01 Y N N 500000 1 +#data/Alvarez_variete_k2_D4_30000p_with_TSB.pwt - - - 0 4 2 0.01 0.01 Y N N 500000 1 + +#---------------------------------------------------------------------- 3D meshes ----------------------------------------------------------------------- +#data/buddha_100kv.xyz - - - 0 3 2 0.005 0.005 Y N N 3 1 +#data/fandisk.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/fertility.xyz - - - 0 3 2 0.4 0.4 Y N N 3 1 +#data/bunny.xyz - - - 0 3 2 0.0006 0.0003 Y N N 3000 1 +#data/blob.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/3holes.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/785_hand_2500v.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/785_hand_50kv.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#data/bumpy_sphere.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#D:\INRIA\Data\_Models\Pointclouds\ajax_jotero.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#D:\INRIA\Data\_Models\Pointclouds\house.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1 +#D:\INRIA\Data\_Models\Pointclouds\lucy_14M.xyz - - - 0 3 2 0.6 0.3 Y N N 3 1 + +#----------------------------------------------------------- Generated point sets ----------------------------------------------------------------------- +#generate_sphere_d 3 - - 4 3 2 0.05 0.05 Y N N 3000 1 +#generate_sphere_d 3 - - 30000 2 1 0.005 0.005 Y N N 3000 1 +#generate_sphere_d 1 - - 500000 3 2 0.005 0.005 Y N N 3000 1 +#generate_sphere_d 3 - - 30000 4 3 0.05 0.05 Y N N 3000 1 +#generate_sphere_d 3 0 - 300 3 2 0.005 0.005 Y N N 60 1 +#generate_sphere_d 3 4 - 3000 3 2 0.005 0.005 Y N N 60 1 +#generate_sphere_d 3 7 - 3000 3 2 0.005 0.005 Y N N 60 1 +#generate_torus_3D 2 1 N 300 3 2 0.05 0.05 Y N N 600 1 +#generate_torus_d N - - 200 4 2 0.05 0.05 Y N N 600 1 + +#generate_torus_d Y - - 100 6 3 0.1 0.19 Y N N 600 1 +#generate_torus_d Y - - 1000 6 3 0. 0.19 Y N N 600 1 +#generate_torus_d Y - - 10000 6 3 0. 0.19 Y N N 600 1 +#generate_torus_d Y - - 100000 6 3 0. 0.19 Y N N 600 1 +#generate_plane - - - 30000 3 2 0.005 0.005 Y N N 3000 1 +#generate_moment_curve 0 1 - 30000 6 1 0.005 0.005 Y N N 60 1 +#generate_klein_bottle_4D 4 3 - 700 4 2 0.05 0.05 Y N N 500 20 +#generate_klein_bottle_variant_5D 4 3 - 30000 5 2 0.05 0.05 Y N N 600 1 +#generate_klein_bottle_4D 8 5 - 5000 4 2 0.2 0.2 Y N N 60 1 #Takes forever +#data/sparsified/Flat_torus_195p_sparsified_0.05_from_200p.xyz N - - 0 4 2 -1 0.2 Y N N 600 1 + +#----------------------------------------------------------- Performance testing ------------------------------------------------------------------------ +# TC: 5.55 / 1st fix step : 0.2 +#data/fertility.xyz - - - 0 3 2 0.1 0.1 Y N N 10 1 + +#---------------------------------------------------------- 04/04/2016 - for stats ---------------------------------------------------------- + +#generate_torus_3D 2 1 N 5000 3 2 0.05 0.05 Y N N 120 1 +#generate_torus_d N - - 500 4 2 0.05 0.05 Y N N 120 1 +#data/Alvarez_variete_k2_D8_900k_2x2.xyz - - - 0 8 2 0.005 0.005 Y N N 120 1 +#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.01 0.01 Y N N 120 1 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.02 Y N N 600 10 +#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 120 1 +#data/Alvarez_surface_deg10_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 120 1 +#generate_torus_d N - - 200000 6 3 0.05 0.05 Y N N 1200 1 + +#---------------------------------------------------------- 14/04/2016 - stats about noise ---------------------------------------------------------- + +#generate_torus_d Y 0 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 1 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 2 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 3 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 4 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 5 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 6 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 7 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 8 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 9 - 1000 4 2 0.05 0.19 Y N N 120 4 +#generate_torus_d Y 10 - 1000 4 2 0.05 0.19 Y N N 120 4 + +#generate_sphere_d 3 0 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 1 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 2 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 3 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 4 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 5 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 6 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 7 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 8 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 9 - 1000 4 3 0.05 0.05 Y N N 120 4 +#generate_sphere_d 3 10 - 1000 4 3 0.05 0.05 Y N N 120 4 + +#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.01 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.02 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.03 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.04 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.05 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.06 5000 4 2 0.05 0.05 Y N N 120 4 +#generate_klein_bottle_4D 4 3 0.07 5000 4 2 0.05 0.05 Y N N 120 4 + +#---------------------------------------------------------- 04/2016 - stats with different perturb techniques ---------------------------------------------------------- + +# Tangential translation +#data/SO3_50000.xyz - - - 0 9 3 0 0.05 Y N N 500 10 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.01 Y N N 120 10 +#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.05 Y N N 120 10 +#generate_torus_d Y 0 - 1000 4 2 0.05 0.19 Y N N 120 10 +#generate_sphere_d 3 1 - 1000 4 3 0.05 0.05 Y N N 120 10 + +# Weight +#data/SO3_50000.xyz - - - 0 9 3 0.1 0.05 Y N N 500 10 +#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.01 Y N N 120 10 +#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.025 Y N N 20000 10 +#generate_torus_d Y 0 - 1000 4 2 0.05 0.025 Y N N 120 10 +#generate_sphere_d 3 1 - 1000 4 3 0.05 0.025 Y N N 12000 10 \ No newline at end of file diff --git a/src/Tangential_complex/benchmark/benchmark_tc.cpp b/src/Tangential_complex/benchmark/benchmark_tc.cpp new file mode 100644 index 00000000..943fcb54 --- /dev/null +++ b/src/Tangential_complex/benchmark/benchmark_tc.cpp @@ -0,0 +1,785 @@ +/****************************************************************************** +This benchmark allows to compute the Tangential Complex from input files or +generated point sets. + +It reads the benchmark_script.txt file (located in the same folder as this +file) and compute one or several complexes for each line. Unless TC_NO_EXPORT +is defined, each complex is exported as an OFF file and/or as a RIB file +(RenderMan). In addition an XML file is created at each run of the benchmark. +It contains statistics about the complexes that were created. This XML file +can be processed in Excel, for example. + ******************************************************************************/ + +// Without TBB_USE_THREADING_TOOL Intel Inspector XE will report false positives in Intel TBB +// (http://software.intel.com/en-us/articles/compiler-settings-for-threading-error-analysis-in-intel-inspector-xe/) +#ifdef _DEBUG +#define TBB_USE_THREADING_TOOL +#endif + +#include + +//#define GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM +//#define TC_INPUT_STRIDES 3 // only take one point every TC_INPUT_STRIDES points +#define TC_NO_EXPORT // do not output OFF files +//#define TC_EXPORT_TO_RIB // +//#define GUDHI_TC_EXPORT_SPARSIFIED_POINT_SET +//#define GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF + +const std::size_t ONLY_LOAD_THE_FIRST_N_POINTS = 20000000; + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include // for std::sqrt + +#ifdef GUDHI_USE_TBB +#include +#endif +#include "XML_exporter.h" +#include "RIB_exporter.h" +#define GUDHI_TC_EXPORT_PERFORMANCE_DATA +#define GUDHI_TC_SET_PERFORMANCE_DATA(value_name, value) \ + XML_perf_data::set(value_name, value); + + +namespace subsampl = Gudhi::subsampling; +namespace tc = Gudhi::tangential_complex; + +const char * const BENCHMARK_SCRIPT_FILENAME = "benchmark_script.txt"; + +typedef CGAL::Epick_d Kernel; +typedef Kernel::FT FT; +typedef Kernel::Point_d Point; +typedef Kernel::Vector_d Vector; +typedef tc::Tangential_complex< +Kernel, CGAL::Dynamic_dimension_tag, +CGAL::Parallel_tag> TC; +typedef TC::Simplex Simplex; +typedef TC::Simplex_set Simplex_set; + +class XML_perf_data { + public: + typedef Streaming_XML_exporter XML_exporter; + + XML_perf_data(const std::string &filename) + : m_xml(filename, "ContainerPerformance", "Perf", + construct_subelements_names()) { } + + virtual ~XML_perf_data() { } + + static XML_perf_data &get() { + static XML_perf_data singleton(build_filename()); + return singleton; + } + + template + static void set(const std::string &name, Value_type value) { + get().set_data(name, value); + } + + static void commit() { + get().commit_current_element(); + } + + protected: + + static std::string build_filename() { + std::stringstream sstr; + sstr << "perf_logs/Performance_log_" << time(0) << ".xml"; + return sstr.str(); + } + + static std::vector construct_subelements_names() { + std::vector subelements; + subelements.push_back("Input"); + subelements.push_back("Param1"); + subelements.push_back("Param2"); + subelements.push_back("Param3"); + subelements.push_back("Intrinsic_dim"); + subelements.push_back("Ambient_dim"); + subelements.push_back("Num_threads"); + subelements.push_back("Sparsity"); + subelements.push_back("Max_perturb"); + subelements.push_back("Num_points_in_input"); + subelements.push_back("Num_points"); + subelements.push_back("Perturb_technique"); + subelements.push_back("Perturb_which_points"); + subelements.push_back("Initial_num_inconsistent_local_tr"); + subelements.push_back("Best_num_inconsistent_local_tr"); + subelements.push_back("Final_num_inconsistent_local_tr"); + subelements.push_back("Init_time"); + subelements.push_back("Comput_time"); + subelements.push_back("Perturb_successful"); + subelements.push_back("Perturb_time"); + subelements.push_back("Perturb_steps"); + subelements.push_back("Result_pure_pseudomanifold"); + subelements.push_back("Result_num_wrong_dim_simplices"); + subelements.push_back("Result_num_wrong_number_of_cofaces"); + subelements.push_back("Result_num_unconnected_stars"); + subelements.push_back("Info"); + + return subelements; + } + + void set_data(const std::string &name, const std::string &value) { + m_current_element[name] = value; + } + + template + void set_data(const std::string &name, Value_type value) { + std::stringstream sstr; + sstr << value; + set_data(name, sstr.str()); + } + + void commit_current_element() { + m_xml.add_element(m_current_element); + m_current_element.clear(); + } + + XML_exporter m_xml; + XML_exporter::Element_with_map m_current_element; +}; + +template< +typename Kernel, typename OutputIteratorPoints> +bool load_points_from_file( + const std::string &filename, + OutputIteratorPoints points, + std::size_t only_first_n_points = std::numeric_limits::max()) { + typedef typename Kernel::Point_d Point; + + std::ifstream in(filename); + if (!in.is_open()) { + std::cerr << "Could not open '" << filename << "'" << std::endl; + return false; + } + + Kernel k; + Point p; + int num_ppints; + in >> num_ppints; + + std::size_t i = 0; + while (i < only_first_n_points && in >> p) { + *points++ = p; + ++i; + } + +#ifdef DEBUG_TRACES + std::cerr << "'" << filename << "' loaded." << std::endl; +#endif + + return true; +} + +template< +typename Kernel, typename Tangent_space_basis, +typename OutputIteratorPoints, typename OutputIteratorTS> +bool load_points_and_tangent_space_basis_from_file( + const std::string &filename, + OutputIteratorPoints points, + OutputIteratorTS tangent_spaces, + int intrinsic_dim, + std::size_t only_first_n_points = std::numeric_limits::max()) { + typedef typename Kernel::Point_d Point; + typedef typename Kernel::Vector_d Vector; + + std::ifstream in(filename); + if (!in.is_open()) { + std::cerr << "Could not open '" << filename << "'" << std::endl; + return false; + } + + Kernel k; + Point p; + int num_ppints; + in >> num_ppints; + + std::size_t i = 0; + while (i < only_first_n_points && in >> p) { + *points++ = p; + + Tangent_space_basis tsb(i); + for (int d = 0; d < intrinsic_dim; ++d) { + Vector v; + in >> v; + tsb.push_back(tc::internal::normalize_vector(v, k)); + } + *tangent_spaces++ = tsb; + ++i; + } + +#ifdef DEBUG_TRACES + std::cerr << "'" << filename << "' loaded." << std::endl; +#endif + + return true; +} + +// color_inconsistencies: only works if p_complex = NULL +template +bool export_to_off( + TC const& tc, + std::string const& input_name_stripped, + std::string const& suffix, + bool color_inconsistencies = false, + typename TC::Simplicial_complex const* p_complex = NULL, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL) { +#ifdef TC_NO_EXPORT + return true; +#endif + + CGAL::Identity proj_functor; + + if (tc.intrinsic_dimension() <= 3) { + std::stringstream output_filename; + output_filename << "output/" << input_name_stripped << "_" + << tc.intrinsic_dimension() << "_in_R" + << tc.ambient_dimension() << "_" + << tc.number_of_vertices() << "v" + << suffix << ".off"; + std::ofstream off_stream(output_filename.str().c_str()); + + if (p_complex) { +#ifndef TC_NO_EXPORT + tc.export_to_off( + *p_complex, off_stream, + p_simpl_to_color_in_red, + p_simpl_to_color_in_green, + p_simpl_to_color_in_blue, + proj_functor); +#endif + } else { + tc.export_to_off( + off_stream, color_inconsistencies, + p_simpl_to_color_in_red, + p_simpl_to_color_in_green, + p_simpl_to_color_in_blue, + NULL, + proj_functor); + } + return true; + } + return false; +} + +void make_tc(std::vector &points, + TC::TS_container const& tangent_spaces, // can be empty + int intrinsic_dim, + double sparsity = 0.01, + double max_perturb = 0.005, + bool perturb = true, + bool add_high_dim_simpl = false, + bool collapse = false, + double time_limit_for_perturb = 0., + const char *input_name = "tc") { + Kernel k; + + if (sparsity > 0. && !tangent_spaces.empty()) { + std::cerr << "Error: cannot sparsify point set with pre-computed normals.\n"; + return; + } + + //=========================================================================== + // Init + //=========================================================================== + Gudhi::Clock t; + + // Get input_name_stripped + std::string input_name_stripped(input_name); + size_t slash_index = input_name_stripped.find_last_of('/'); + if (slash_index == std::string::npos) + slash_index = input_name_stripped.find_last_of('\\'); + if (slash_index == std::string::npos) + slash_index = 0; + else + ++slash_index; + input_name_stripped = input_name_stripped.substr( + slash_index, input_name_stripped.find_last_of('.') - slash_index); + + GUDHI_TC_SET_PERFORMANCE_DATA("Num_points_in_input", points.size()); + +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + std::vector points_not_sparse = points; +#endif + + //=========================================================================== + // Sparsify point set if requested + //=========================================================================== + if (sparsity > 0.) { + std::size_t num_points_before = points.size(); + std::vector sparsified_points; + subsampl::sparsify_point_set(k, points, sparsity*sparsity, + std::back_inserter(sparsified_points)); + sparsified_points.swap(points); + std::cerr << "Number of points before/after sparsification: " + << num_points_before << " / " << points.size() << "\n"; + +#ifdef GUDHI_TC_EXPORT_SPARSIFIED_POINT_SET + std::ofstream ps_stream("output/sparsified_point_set.txt"); + tc::internal::export_point_set(k, points, ps_stream); +#endif + } + + GUDHI_TC_SET_PERFORMANCE_DATA("Sparsity", sparsity); + GUDHI_TC_SET_PERFORMANCE_DATA("Max_perturb", max_perturb); + GUDHI_TC_SET_PERFORMANCE_DATA("Num_points", points.size()); + + //=========================================================================== + // Compute Tangential Complex + //=========================================================================== + + TC tc( + points, + intrinsic_dim, +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + points_not_sparse.begin(), points_not_sparse.end(), +#endif + k); + + if (!tangent_spaces.empty()) { + tc.set_tangent_planes(tangent_spaces); + } + + t.end(); + double init_time = t.num_seconds(); + + t.begin(); + tc.compute_tangential_complex(); + t.end(); + double computation_time = t.num_seconds(); + + //=========================================================================== + // Export to OFF + //=========================================================================== + + // Create complex + int max_dim = -1; + TC::Simplicial_complex complex; + Simplex_set inconsistent_simplices; + max_dim = tc.create_complex(complex, true, false, 2, &inconsistent_simplices); + + // TODO(CJ): TEST + Gudhi::Simplex_tree<> stree; + tc.create_complex(stree, true, false); + // std::cerr << stree; + + t.begin(); + bool ret = export_to_off( + tc, input_name_stripped, "_INITIAL_TC", true, + &complex, &inconsistent_simplices); + t.end(); + double export_before_time = (ret ? t.num_seconds() : -1); + + unsigned int num_perturb_steps = 0; + double perturb_time = -1; + double export_after_perturb_time = -1.; + bool perturb_success = false; + if (perturb) { + //========================================================================= + // Try to fix inconsistencies by perturbing points + //========================================================================= + t.begin(); + auto fix_result = + tc.fix_inconsistencies_using_perturbation(max_perturb, time_limit_for_perturb); + t.end(); + perturb_time = t.num_seconds(); + + perturb_success = fix_result.success; + GUDHI_TC_SET_PERFORMANCE_DATA("Initial_num_inconsistent_local_tr", + fix_result.initial_num_inconsistent_stars); + GUDHI_TC_SET_PERFORMANCE_DATA("Best_num_inconsistent_local_tr", + fix_result.best_num_inconsistent_stars); + GUDHI_TC_SET_PERFORMANCE_DATA("Final_num_inconsistent_local_tr", + fix_result.final_num_inconsistent_stars); + + //========================================================================= + // Export to OFF + //========================================================================= + + // Re-build the complex + Simplex_set inconsistent_simplices; + max_dim = tc.create_complex(complex, true, false, 2, &inconsistent_simplices); + + t.begin(); + bool exported = export_to_off( + tc, input_name_stripped, "_AFTER_FIX", true, &complex, + &inconsistent_simplices); + t.end(); + export_after_perturb_time = (exported ? t.num_seconds() : -1); + + //std::string fn = "output/inc_stars/"; + //fn += input_name_stripped; + //tc.export_inconsistent_stars_to_OFF_files(fn); + +#if !defined(TC_NO_EXPORT) && defined(TC_EXPORT_TO_RIB) + std::ofstream rib(std::string("output/") + input_name_stripped + ".rib"); + RIB_exporter rib_exporter( + tc.points(), + complex.simplex_range(), + rib, + input_name_stripped + ".tif", + false, // is_preview + std::make_tuple(2, 4, 6), + 1600, 503 // resolution + ); + rib_exporter.write_file(); + + std::ofstream rib_LQ(std::string("output/") + input_name_stripped + "_LQ.rib"); + RIB_exporter rib_exporter_LQ( + tc.points(), + complex.simplex_range(), + rib_LQ, + input_name_stripped + "_LQ.tif", + true, // is_preview + std::make_tuple(0, 4, 5) + ); + rib_exporter_LQ.write_file(); +#endif + } else { + GUDHI_TC_SET_PERFORMANCE_DATA("Initial_num_inconsistent_local_tr", "N/A"); + GUDHI_TC_SET_PERFORMANCE_DATA("Best_num_inconsistent_local_tr", "N/A"); + GUDHI_TC_SET_PERFORMANCE_DATA("Final_num_inconsistent_local_tr", "N/A"); + } + + max_dim = tc.create_complex(complex, true, false, 2); + + complex.display_stats(); + + if (intrinsic_dim == 2) + complex.euler_characteristic(true); + + //=========================================================================== + // Collapse + //=========================================================================== + if (collapse) { + complex.collapse(max_dim); + complex.display_stats(); + } + + //=========================================================================== + // Is the result a pure pseudomanifold? + //=========================================================================== + std::size_t num_wrong_dim_simplices, + num_wrong_number_of_cofaces, + num_unconnected_stars; + Simplex_set wrong_dim_simplices; + Simplex_set wrong_number_of_cofaces_simplices; + Simplex_set unconnected_stars_simplices; + bool is_pure_pseudomanifold = complex.is_pure_pseudomanifold( + intrinsic_dim, tc.number_of_vertices(), + false, // do NOT allow borders + false, 1, + &num_wrong_dim_simplices, &num_wrong_number_of_cofaces, + &num_unconnected_stars, + &wrong_dim_simplices, &wrong_number_of_cofaces_simplices, + &unconnected_stars_simplices); + + //=========================================================================== + // Export to OFF + //=========================================================================== + + double export_after_collapse_time = -1.; + if (collapse) { + t.begin(); + bool exported = export_to_off( + tc, input_name_stripped, "_AFTER_COLLAPSE", false, &complex, + &wrong_dim_simplices, &wrong_number_of_cofaces_simplices, + &unconnected_stars_simplices); + t.end(); + std::cerr + << " OFF colors:\n" + << " * Red: wrong dim simplices\n" + << " * Green: wrong number of cofaces simplices\n" + << " * Blue: not-connected stars\n"; + export_after_collapse_time = (exported ? t.num_seconds() : -1.); + } + + //=========================================================================== + // Display info + //=========================================================================== + + std::cerr + << "\n================================================\n" + << "Number of vertices: " << tc.number_of_vertices() << "\n" + << "Computation times (seconds): \n" + << " * Tangential complex: " << init_time + computation_time << "\n" + << " - Init + kd-tree = " << init_time << "\n" + << " - TC computation = " << computation_time << "\n" + << " * Export to OFF (before perturb): " << export_before_time << "\n" + << " * Fix inconsistencies 1: " << perturb_time + << " (" << num_perturb_steps << " steps) ==> " + << (perturb_success ? "FIXED" : "NOT fixed") << "\n" + << " * Export to OFF (after perturb): " << export_after_perturb_time << "\n" + << " * Export to OFF (after collapse): " + << export_after_collapse_time << "\n" + << "================================================\n"; + + //=========================================================================== + // Export info + //=========================================================================== + GUDHI_TC_SET_PERFORMANCE_DATA("Init_time", init_time); + GUDHI_TC_SET_PERFORMANCE_DATA("Comput_time", computation_time); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_successful", + (perturb_success ? 1 : 0)); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_time", perturb_time); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_steps", num_perturb_steps); + GUDHI_TC_SET_PERFORMANCE_DATA("Result_pure_pseudomanifold", + (is_pure_pseudomanifold ? 1 : 0)); + GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_wrong_dim_simplices", + num_wrong_dim_simplices); + GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_wrong_number_of_cofaces", + num_wrong_number_of_cofaces); + GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_unconnected_stars", + num_unconnected_stars); + GUDHI_TC_SET_PERFORMANCE_DATA("Info", ""); +} + +int main() { + CGAL::set_error_behaviour(CGAL::ABORT); + +#ifdef GUDHI_USE_TBB +#ifdef _DEBUG + int num_threads = 1; +#else + int num_threads = tbb::task_scheduler_init::default_num_threads() - 4; +#endif +#endif + + unsigned int seed = static_cast (time(NULL)); + CGAL::default_random = CGAL::Random(seed); // TODO(CJ): use set_default_random + std::cerr << "Random seed = " << seed << "\n"; + + std::ifstream script_file; + script_file.open(BENCHMARK_SCRIPT_FILENAME); + // Script? + // Script file format: each line gives + // - Filename (point set) or "generate_XXX" (point set generation) + // - Ambient dim + // - Intrinsic dim + // - Number of iterations with these parameters + if (script_file.is_open()) { + int i = 1; +#ifdef GUDHI_USE_TBB +#ifdef BENCHMARK_WITH_1_TO_MAX_THREADS + for (num_threads = 1; + num_threads <= tbb::task_scheduler_init::default_num_threads(); + ++num_threads) +#endif +#endif + /*for (Concurrent_mesher_config::get().num_work_items_per_batch = 5 ; + Concurrent_mesher_config::get().num_work_items_per_batch < 100 ; + Concurrent_mesher_config::get().num_work_items_per_batch += 5)*/ { +#ifdef GUDHI_USE_TBB + tbb::task_scheduler_init init( + num_threads > 0 ? num_threads : tbb::task_scheduler_init::automatic); +#endif + + std::cerr << "Script file '" << BENCHMARK_SCRIPT_FILENAME << "' found.\n"; + script_file.seekg(0); + while (script_file.good()) { + std::string line; + std::getline(script_file, line); + if (line.size() > 1 && line[0] != '#') { + boost::replace_all(line, "\t", " "); + boost::trim_all(line); + std::cerr << "\n\n"; + std::cerr << "*****************************************\n"; + std::cerr << "******* " << line << "\n"; + std::cerr << "*****************************************\n"; + std::stringstream sstr(line); + + std::string input; + std::string param1; + std::string param2; + std::string param3; + std::size_t num_points; + int ambient_dim; + int intrinsic_dim; + double sparsity; + double max_perturb; + char perturb, add_high_dim_simpl, collapse; + double time_limit_for_perturb; + int num_iteration; + sstr >> input; + sstr >> param1; + sstr >> param2; + sstr >> param3; + sstr >> num_points; + sstr >> ambient_dim; + sstr >> intrinsic_dim; + sstr >> sparsity; + sstr >> max_perturb; + sstr >> perturb; + sstr >> add_high_dim_simpl; + sstr >> collapse; + sstr >> time_limit_for_perturb; + sstr >> num_iteration; + + for (int j = 0; j < num_iteration; ++j) { + std::string input_stripped = input; + size_t slash_index = input_stripped.find_last_of('/'); + if (slash_index == std::string::npos) + slash_index = input_stripped.find_last_of('\\'); + if (slash_index == std::string::npos) + slash_index = 0; + else + ++slash_index; + input_stripped = input_stripped.substr( + slash_index, input_stripped.find_last_of('.') - slash_index); + + GUDHI_TC_SET_PERFORMANCE_DATA("Input", input_stripped); + GUDHI_TC_SET_PERFORMANCE_DATA("Param1", param1); + GUDHI_TC_SET_PERFORMANCE_DATA("Param2", param2); + GUDHI_TC_SET_PERFORMANCE_DATA("Param3", param3); + GUDHI_TC_SET_PERFORMANCE_DATA("Ambient_dim", ambient_dim); + GUDHI_TC_SET_PERFORMANCE_DATA("Intrinsic_dim", intrinsic_dim); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_technique", "Tangential_translation"); + GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_which_points", "Center_vertex"); + +#ifdef GUDHI_USE_TBB + GUDHI_TC_SET_PERFORMANCE_DATA( + "Num_threads", + (num_threads == -1 ? tbb::task_scheduler_init::default_num_threads() : num_threads)); +#else + GUDHI_TC_SET_PERFORMANCE_DATA("Num_threads", "N/A"); +#endif + + std::cerr << "\nTC #" << i << "...\n"; + +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t_gen; +#endif + + std::vector points; + TC::TS_container tangent_spaces; + + if (input == "generate_moment_curve") { + points = Gudhi::generate_points_on_moment_curve( + num_points, ambient_dim, + std::atof(param1.c_str()), std::atof(param2.c_str())); + } else if (input == "generate_plane") { + points = Gudhi::generate_points_on_plane( + num_points, intrinsic_dim, ambient_dim); + } else if (input == "generate_sphere_d") { + points = Gudhi::generate_points_on_sphere_d( + num_points, ambient_dim, + std::atof(param1.c_str()), // radius + std::atof(param2.c_str())); // radius_noise_percentage + } else if (input == "generate_two_spheres_d") { + points = Gudhi::generate_points_on_two_spheres_d( + num_points, ambient_dim, + std::atof(param1.c_str()), + std::atof(param2.c_str()), + std::atof(param3.c_str())); + } else if (input == "generate_3sphere_and_circle_d") { + GUDHI_CHECK(intrinsic_dim == 3, + std::logic_error("Intrinsic dim should be 3")); + GUDHI_CHECK(ambient_dim == 5, + std::logic_error("Ambient dim should be 5")); + points = Gudhi::generate_points_on_3sphere_and_circle( + num_points, + std::atof(param1.c_str())); + } else if (input == "generate_torus_3D") { + points = Gudhi::generate_points_on_torus_3D( + num_points, + std::atof(param1.c_str()), + std::atof(param2.c_str()), + param3 == "Y"); + } else if (input == "generate_torus_d") { + points = Gudhi::generate_points_on_torus_d( + num_points, + intrinsic_dim, + param1 == "Y", // uniform + std::atof(param2.c_str())); // radius_noise_percentage + } else if (input == "generate_klein_bottle_3D") { + points = Gudhi::generate_points_on_klein_bottle_3D( + num_points, + std::atof(param1.c_str()), std::atof(param2.c_str())); + } else if (input == "generate_klein_bottle_4D") { + points = Gudhi::generate_points_on_klein_bottle_4D( + num_points, + std::atof(param1.c_str()), std::atof(param2.c_str()), + std::atof(param3.c_str())); // noise + } else if (input == "generate_klein_bottle_variant_5D") { + points = Gudhi::generate_points_on_klein_bottle_variant_5D( + num_points, + std::atof(param1.c_str()), std::atof(param2.c_str())); + } else { + // Contains tangent space basis + if (input.substr(input.size() - 3) == "pwt") { + load_points_and_tangent_space_basis_from_file + ( + input, std::back_inserter(points), + std::back_inserter(tangent_spaces), + intrinsic_dim, + ONLY_LOAD_THE_FIRST_N_POINTS); + } else { + load_points_from_file( + input, std::back_inserter(points), + ONLY_LOAD_THE_FIRST_N_POINTS); + } + } + +#ifdef GUDHI_TC_PROFILING + t_gen.end(); + std::cerr << "Point set generated/loaded in " << t_gen.num_seconds() + << " seconds.\n"; +#endif + + if (!points.empty()) { +#if defined(TC_INPUT_STRIDES) && TC_INPUT_STRIDES > 1 + auto p = points | boost::adaptors::strided(TC_INPUT_STRIDES); + std::vector points(p.begin(), p.end()); + std::cerr << "****************************************\n" + << "WARNING: taking 1 point every " << TC_INPUT_STRIDES + << " points.\n" + << "****************************************\n"; +#endif + + make_tc(points, tangent_spaces, intrinsic_dim, + sparsity, max_perturb, + perturb == 'Y', add_high_dim_simpl == 'Y', collapse == 'Y', + time_limit_for_perturb, input.c_str()); + + std::cerr << "TC #" << i++ << " done.\n"; + std::cerr << "\n---------------------------------\n"; + } else { + std::cerr << "TC #" << i++ << ": no points loaded.\n"; + } + + XML_perf_data::commit(); + } + } + } + script_file.seekg(0); + script_file.clear(); + } + + script_file.close(); + } // Or not script? + else { + std::cerr << "Script file '" << BENCHMARK_SCRIPT_FILENAME << "' NOT found.\n"; + } + + // system("pause"); + return 0; +} diff --git a/src/Tangential_complex/doc/Intro_tangential_complex.h b/src/Tangential_complex/doc/Intro_tangential_complex.h new file mode 100644 index 00000000..3d687c1d --- /dev/null +++ b/src/Tangential_complex/doc/Intro_tangential_complex.h @@ -0,0 +1,119 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_ +#define DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_ + +// needs namespaces for Doxygen to link on classes +namespace Gudhi { +namespace tangential_complex { + +/** \defgroup tangential_complex Tangential complex + +\author Clément Jamin + +@{ + +\section tangentialdefinition Definition + +A Tangential Delaunay complex is a simplicial complex +designed to reconstruct a \f$k\f$-dimensional smooth manifold embedded in \f$d\f$-dimensional Euclidean space. +The input is a point sample coming from an unknown manifold, which means that the points lie close to a structure of "small" intrinsic dimension. +The running time depends only linearly on the extrinsic dimension \f$ d \f$ +and exponentially on the intrinsic dimension \f$ k \f$. + +An extensive description of the Tangential complex can be found in \cite tangentialcomplex2014. + +\subsection whatisthetc What is a Tangential Complex? + +Let us start with the description of the Tangential complex of a simple example, with \f$ k=1 \f$ and \f$ d=2 \f$. +The input data is 4 points \f$ P \f$ located on a curve embedded in 2D. +\image html "tc_example_01.png" "The input" +For each point \f$ p \f$, estimate its tangent subspace \f$ T_p \f$ (e.g. using PCA). +\image html "tc_example_02.png" "The estimated normals" +Let us add the Voronoi diagram of the points in orange. For each point \f$ p \f$, construct its star in the Delaunay triangulation of \f$ P \f$ restricted to \f$ T_p \f$. +\image html "tc_example_03.png" "The Voronoi diagram" +The Tangential Delaunay complex is the union of those stars. + +In practice, neither the ambient Voronoi diagram nor the ambient Delaunay triangulation is computed. +Instead, local \f$ k \f$-dimensional regular triangulations are computed with a limited number of points as we only need the star of each point. +More details can be found in \cite tangentialcomplex2014. + +\subsection inconsistencies Inconsistencies + +Inconsistencies between the stars can occur. +An inconsistency occurs when a simplex is not in the star of all its vertices. + +Let us take the same example. +\image html "tc_example_07_before.png" "Before" +Let us slightly move the tangent subspace \f$ T_q \f$ +\image html "tc_example_07_after.png" "After" +Now, the star of \f$ Q \f$ contains \f$ QP \f$, but the star of \f$ P \f$ does not contain \f$ QP \f$. We have an inconsistency. +\image html "tc_example_08.png" "After" + +One way to solve inconsistencies is to randomly perturb the positions of the points involved in an inconsistency. +In the current implementation, this perturbation is done in the tangent subspace of each point. +The maximum perturbation radius is given as a parameter to the constructor. + +In most cases, we recommend to provide a point set where the minimum distance between any two points +is not too small. This can be achieved using the functions provided by the Subsampling module. Then, a good value to start with for +the maximum perturbation radius would be around half the minimum distance between any two points. +The \ref example_with_perturb below shows an example of such a process. + +In most cases, this process is able to dramatically reduce the number of inconsistencies, but is not guaranteed to succeed. + +\subsection output Output + +The result of the computation is exported as a `Simplex_tree`. It is the union of the stars of all the input points. +A vertex in the Simplex Tree is the index of the point in the range provided by the user. +The point corresponding to a vertex can also be obtained through the `Tangential_complex::get_point` function. +Note that even if the positions of the points are perturbed, their original positions are kept (e.g. `Tangential_complex::get_point` returns the original position of the point). + +The result can be obtained after the computation of the Tangential complex itself and/or after the perturbation process. + +\section simple_example Simple example + +This example builds the Tangential complex of point set. +Note that the dimension of the kernel here is dynamic, which is slower, but more flexible: +the intrinsic and ambient dimensions does not have to be known at compile-time. + +\include Tangential_complex/example_basic.cpp + +\section example_with_perturb Example with perturbation + +This example builds the Tangential complex of a point set, then tries to solve inconsistencies +by perturbing the positions of points involved in inconsistent simplices. +Note that the dimension of the kernel here is static, which is the best choice when the +dimensions are known at compile-time. + +\include Tangential_complex/example_with_perturb.cpp + +\copyright GNU General Public License v3. +\verbatim Contact: gudhi-users@lists.gforge.inria.fr \endverbatim + */ +/** @} */ // end defgroup tangential_complex + +} // namespace tangential_complex + +} // namespace Gudhi + +#endif // DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/doc/tc_example_01.png b/src/Tangential_complex/doc/tc_example_01.png new file mode 100644 index 00000000..8afe6198 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_01.png differ diff --git a/src/Tangential_complex/doc/tc_example_02.png b/src/Tangential_complex/doc/tc_example_02.png new file mode 100644 index 00000000..01591c1d Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_02.png differ diff --git a/src/Tangential_complex/doc/tc_example_03.png b/src/Tangential_complex/doc/tc_example_03.png new file mode 100644 index 00000000..5de04e01 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_03.png differ diff --git a/src/Tangential_complex/doc/tc_example_05.png b/src/Tangential_complex/doc/tc_example_05.png new file mode 100644 index 00000000..fdd5e5fa Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_05.png differ diff --git a/src/Tangential_complex/doc/tc_example_06.png b/src/Tangential_complex/doc/tc_example_06.png new file mode 100644 index 00000000..31ad3c43 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_06.png differ diff --git a/src/Tangential_complex/doc/tc_example_07.png b/src/Tangential_complex/doc/tc_example_07.png new file mode 100644 index 00000000..47e34de7 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_07.png differ diff --git a/src/Tangential_complex/doc/tc_example_07_after.png b/src/Tangential_complex/doc/tc_example_07_after.png new file mode 100644 index 00000000..981350d2 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_07_after.png differ diff --git a/src/Tangential_complex/doc/tc_example_07_before.png b/src/Tangential_complex/doc/tc_example_07_before.png new file mode 100644 index 00000000..ddc6bc7b Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_07_before.png differ diff --git a/src/Tangential_complex/doc/tc_example_08.png b/src/Tangential_complex/doc/tc_example_08.png new file mode 100644 index 00000000..119a87de Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_08.png differ diff --git a/src/Tangential_complex/doc/tc_example_09.png b/src/Tangential_complex/doc/tc_example_09.png new file mode 100644 index 00000000..31bac1e0 Binary files /dev/null and b/src/Tangential_complex/doc/tc_example_09.png differ diff --git a/src/Tangential_complex/doc/tc_examples.png b/src/Tangential_complex/doc/tc_examples.png new file mode 100644 index 00000000..b6544afe Binary files /dev/null and b/src/Tangential_complex/doc/tc_examples.png differ diff --git a/src/Tangential_complex/example/CMakeLists.txt b/src/Tangential_complex/example/CMakeLists.txt new file mode 100644 index 00000000..7ba043f0 --- /dev/null +++ b/src/Tangential_complex/example/CMakeLists.txt @@ -0,0 +1,30 @@ +cmake_minimum_required(VERSION 2.6) +project(Tangential_complex_examples) + +if(CGAL_FOUND) + if (NOT CGAL_VERSION VERSION_LESS 4.8.0) + message(STATUS "CGAL version: ${CGAL_VERSION}.") + + find_package(Eigen3 3.1.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + include_directories (BEFORE "../../include") + + add_executable( Tangential_complex_example_basic example_basic.cpp ) + target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY}) + add_executable( Tangential_complex_example_with_perturb example_with_perturb.cpp ) + target_link_libraries(Tangential_complex_example_with_perturb ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(Tangential_complex_example_basic ${TBB_LIBRARIES}) + target_link_libraries(Tangential_complex_example_with_perturb ${TBB_LIBRARIES}) + endif() + else() + message(WARNING "Eigen3 not found. Version 3.1.0 is required for the Tangential_complex examples.") + endif() + else() + message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Tangential_complex examples. Version 4.8.0 is required.") + endif () +else() + message(WARNING "CGAL not found. It is required for the Tangential_complex examples.") +endif() diff --git a/src/Tangential_complex/example/example_basic.cpp b/src/Tangential_complex/example/example_basic.cpp new file mode 100644 index 00000000..4f2b859e --- /dev/null +++ b/src/Tangential_complex/example/example_basic.cpp @@ -0,0 +1,46 @@ +#include +#include + +#include +#include + +#include +#include + +namespace tc = Gudhi::tangential_complex; + +typedef CGAL::Epick_d Kernel; +typedef Kernel::FT FT; +typedef Kernel::Point_d Point; +typedef Kernel::Vector_d Vector; +typedef tc::Tangential_complex< +Kernel, CGAL::Dynamic_dimension_tag, +CGAL::Parallel_tag> TC; + +int main(void) { + const int INTRINSIC_DIM = 2; + const int AMBIENT_DIM = 3; + const int NUM_POINTS = 1000; + + Kernel k; + + // Generate points on a 2-sphere + CGAL::Random_points_on_sphere_d generator(AMBIENT_DIM, 3.); + std::vector points; + points.reserve(NUM_POINTS); + for (int i = 0; i < NUM_POINTS; ++i) + points.push_back(*generator++); + + // Compute the TC + TC tc(points, INTRINSIC_DIM, k); + tc.compute_tangential_complex(); + + // Export the TC into a Simplex_tree + Gudhi::Simplex_tree<> stree; + tc.create_complex(stree); + + // Display stats about inconsistencies + tc.number_of_inconsistent_simplices(true); // verbose + + return 0; +} diff --git a/src/Tangential_complex/example/example_with_perturb.cpp b/src/Tangential_complex/example/example_with_perturb.cpp new file mode 100644 index 00000000..d0d877ea --- /dev/null +++ b/src/Tangential_complex/example/example_with_perturb.cpp @@ -0,0 +1,53 @@ +#include +#include + +#include +#include + +#include +#include + +namespace subsampl = Gudhi::subsampling; +namespace tc = Gudhi::tangential_complex; + +typedef CGAL::Epick_d> Kernel; +typedef Kernel::FT FT; +typedef Kernel::Point_d Point; +typedef Kernel::Vector_d Vector; +typedef tc::Tangential_complex< +Kernel, CGAL::Dimension_tag<2>, +CGAL::Parallel_tag> TC; + +int main(void) { + const int INTRINSIC_DIM = 2; + const int AMBIENT_DIM = 3; + const int NUM_POINTS = 50; + + Kernel k; + + // Generate points on a 2-sphere + CGAL::Random_points_on_sphere_d generator(AMBIENT_DIM, 3.); + std::vector points; + points.reserve(NUM_POINTS); + for (int i = 0; i < NUM_POINTS; ++i) + points.push_back(*generator++); + + // Sparsify the point set + std::vector sparsified_points; + subsampl::sparsify_point_set(k, points, 0.1 * 0.1, + std::back_inserter(sparsified_points)); + sparsified_points.swap(points); + + // Compute the TC + TC tc(points, INTRINSIC_DIM, k); + tc.compute_tangential_complex(); + + // Try to fix inconsistencies. Give it 10 seconds to succeed + tc.fix_inconsistencies_using_perturbation(0.05, 10); + + // Export the TC into a Simplex_tree + Gudhi::Simplex_tree<> stree; + tc.create_complex(stree); + + return 0; +} diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h new file mode 100644 index 00000000..7cf5c498 --- /dev/null +++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h @@ -0,0 +1,2277 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef TANGENTIAL_COMPLEX_H_ +#define TANGENTIAL_COMPLEX_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include // for CGAL::Identity +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include // for std::sqrt +#include + +#ifdef GUDHI_USE_TBB +#include +#include +#include +#endif + +// #define GUDHI_TC_EXPORT_NORMALS // Only for 3D surfaces (k=2, d=3) + +namespace sps = Gudhi::spatial_searching; + +namespace Gudhi { + +namespace tangential_complex { + +using namespace internal; + +class Vertex_data { + public: + Vertex_data(std::size_t data = std::numeric_limits::max()) + : m_data(data) { } + + operator std::size_t() { + return m_data; + } + + operator std::size_t() const { + return m_data; + } + + private: + std::size_t m_data; +}; + +/** + * \class Tangential_complex Tangential_complex.h gudhi/Tangential_complex.h + * \brief Tangential complex data structure. + * + * \ingroup tangential_complex + * + * \details + * The class Tangential_complex represents a tangential complex. + * After the computation of the complex, an optional post-processing called perturbation can + * be run to attempt to remove inconsistencies. + * + * \tparam Kernel_ requires a CGAL::Epick_d class, which + * can be static if you know the ambiant dimension at compile-time, or dynamic if you don't. + * \tparam DimensionTag can be either Dimension_tag + * if you know the intrinsic dimension at compile-time, + * or CGAL::Dynamic_dimension_tag + * if you don't. + * \tparam Concurrency_tag enables sequential versus parallel computation. Possible values are `CGAL::Parallel_tag` (the default) and `CGAL::Sequential_tag`. + * \tparam Triangulation_ is the type used for storing the local regular triangulations. We highly recommend to use the default value (`CGAL::Regular_triangulation`). + * + */ +template < +typename Kernel_, // ambiant kernel +typename DimensionTag, // intrinsic dimension +typename Concurrency_tag = CGAL::Parallel_tag, +typename Triangulation_ = CGAL::Default +> +class Tangential_complex { + typedef Kernel_ K; + typedef typename K::FT FT; + typedef typename K::Point_d Point; + typedef typename K::Weighted_point_d Weighted_point; + typedef typename K::Vector_d Vector; + + typedef typename CGAL::Default::Get + < + Triangulation_, + CGAL::Regular_triangulation + < + CGAL::Epick_d, + CGAL::Triangulation_data_structure + < + typename CGAL::Epick_d::Dimension, + CGAL::Triangulation_vertex >, Vertex_data >, + CGAL::Triangulation_full_cell > > + > + > + >::type Triangulation; + typedef typename Triangulation::Geom_traits Tr_traits; + typedef typename Triangulation::Weighted_point Tr_point; + typedef typename Triangulation::Bare_point Tr_bare_point; + typedef typename Triangulation::Vertex_handle Tr_vertex_handle; + typedef typename Triangulation::Full_cell_handle Tr_full_cell_handle; + typedef typename Tr_traits::Vector_d Tr_vector; + +#if defined(GUDHI_USE_TBB) + typedef tbb::mutex Mutex_for_perturb; + typedef Vector Translation_for_perturb; + typedef std::vector > Weights; +#else + typedef Vector Translation_for_perturb; + typedef std::vector Weights; +#endif + typedef std::vector Translations_for_perturb; + + // Store a local triangulation and a handle to its center vertex + + struct Tr_and_VH { + public: + Tr_and_VH() + : m_tr(NULL) { } + + Tr_and_VH(int dim) + : m_tr(new Triangulation(dim)) { } + + ~Tr_and_VH() { + destroy_triangulation(); + } + + Triangulation & construct_triangulation(int dim) { + delete m_tr; + m_tr = new Triangulation(dim); + return tr(); + } + + void destroy_triangulation() { + delete m_tr; + m_tr = NULL; + } + + Triangulation & tr() { + return *m_tr; + } + + Triangulation const& tr() const { + return *m_tr; + } + + Tr_vertex_handle const& center_vertex() const { + return m_center_vertex; + } + + Tr_vertex_handle & center_vertex() { + return m_center_vertex; + } + + private: + Triangulation* m_tr; + Tr_vertex_handle m_center_vertex; + }; + + public: + typedef Basis Tangent_space_basis; + typedef Basis Orthogonal_space_basis; + typedef std::vector TS_container; + typedef std::vector OS_container; + + typedef std::vector Points; + + typedef boost::container::flat_set Simplex; + typedef std::set Simplex_set; + + private: + typedef sps::Kd_tree_search Points_ds; + typedef typename Points_ds::KNS_range KNS_range; + typedef typename Points_ds::INS_range INS_range; + + typedef std::vector Tr_container; + typedef std::vector Vectors; + + // An Incident_simplex is the list of the vertex indices + // except the center vertex + typedef boost::container::flat_set Incident_simplex; + typedef std::vector Star; + typedef std::vector Stars_container; + + // For transform_iterator + + static const Tr_point &vertex_handle_to_point(Tr_vertex_handle vh) { + return vh->point(); + } + + template + static const P &vertex_handle_to_point(VH vh) { + return vh->point(); + } + + public: + typedef internal::Simplicial_complex Simplicial_complex; + + /** \brief Constructor from a range of points. + * Points are copied into the instance, and a search data structure is initialized. + * Note the complex is not computed: `compute_tangential_complex` must be called after the creation + * of the object. + * + * @param[in] points Range of points (`Point_range::value_type` must be the same as `Kernel_::Point_d`). + * @param[in] intrinsic_dimension Intrinsic dimension of the manifold. + * @param[in] k Kernel instance. + */ + template + Tangential_complex(Point_range points, + int intrinsic_dimension, +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + InputIterator first_for_tse, InputIterator last_for_tse, +#endif + const K &k = K() + ) + : m_k(k), + m_intrinsic_dim(intrinsic_dimension), + m_ambient_dim(points.empty() ? 0 : k.point_dimension_d_object()(*points.begin())), + m_points(points.begin(), points.end()), + m_weights(m_points.size(), FT(0)) +#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION) + , m_p_perturb_mutexes(NULL) +#endif + , m_points_ds(m_points) + , m_last_max_perturb(0.) + , m_are_tangent_spaces_computed(m_points.size(), false) + , m_tangent_spaces(m_points.size(), Tangent_space_basis()) +#ifdef GUDHI_TC_EXPORT_NORMALS + , m_orth_spaces(m_points.size(), Orthogonal_space_basis()) +#endif +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + , m_points_for_tse(first_for_tse, last_for_tse) + , m_points_ds_for_tse(m_points_for_tse) +#endif + { } + + /// Destructor + ~Tangential_complex() { +#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION) + delete [] m_p_perturb_mutexes; +#endif + } + + /// Returns the intrinsic dimension of the manifold. + int intrinsic_dimension() const { + return m_intrinsic_dim; + } + + /// Returns the ambient dimension. + int ambient_dimension() const { + return m_ambient_dim; + } + + Points const& points() const { + return m_points; + } + + /** \brief Returns the point corresponding to the vertex given as parameter. + * + * @param[in] vertex Vertex handle of the point to retrieve. + * @return The point found. + */ + Point get_point(std::size_t vertex) const { + return m_points[vertex]; + } + + /** \brief Returns the perturbed position of the point corresponding to the vertex given as parameter. + * + * @param[in] vertex Vertex handle of the point to retrieve. + * @return The perturbed position of the point found. + */ + Point get_perturbed_point(std::size_t vertex) const { + return compute_perturbed_point(vertex); + } + + /// Returns the number of vertices. + + std::size_t number_of_vertices() const { + return m_points.size(); + } + + void set_weights(const Weights& weights) { + m_weights = weights; + } + + void set_tangent_planes(const TS_container& tangent_spaces +#ifdef GUDHI_TC_EXPORT_NORMALS + , const OS_container& orthogonal_spaces +#endif + ) { +#ifdef GUDHI_TC_EXPORT_NORMALS + GUDHI_CHECK( + m_points.size() == tangent_spaces.size() + && m_points.size() == orthogonal_spaces.size(), + std::logic_error("Wrong sizes")); +#else + GUDHI_CHECK( + m_points.size() == tangent_spaces.size(), + std::logic_error("Wrong sizes")); +#endif + m_tangent_spaces = tangent_spaces; +#ifdef GUDHI_TC_EXPORT_NORMALS + m_orth_spaces = orthogonal_spaces; +#endif + for (std::size_t i = 0; i < m_points.size(); ++i) + m_are_tangent_spaces_computed[i] = true; + } + + /// Computes the tangential complex. + void compute_tangential_complex() { +#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS + std::cerr << red << "WARNING: GUDHI_TC_PERFORM_EXTRA_CHECKS is defined. " + << "Computation might be slower than usual.\n" << white; +#endif + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB) + Gudhi::Clock t; +#endif + + // We need to do that because we don't want the container to copy the + // already-computed triangulations (while resizing) since it would + // invalidate the vertex handles stored beside the triangulations + m_triangulations.resize(m_points.size()); + m_stars.resize(m_points.size()); + m_squared_star_spheres_radii_incl_margin.resize(m_points.size(), FT(-1)); +#ifdef GUDHI_TC_PERTURB_POSITION + if (m_points.empty()) + m_translations.clear(); + else + m_translations.resize(m_points.size(), + m_k.construct_vector_d_object()(m_ambient_dim)); +#if defined(GUDHI_USE_TBB) + delete [] m_p_perturb_mutexes; + m_p_perturb_mutexes = new Mutex_for_perturb[m_points.size()]; +#endif +#endif + +#ifdef GUDHI_USE_TBB + // Parallel + if (boost::is_convertible::value) { + tbb::parallel_for(tbb::blocked_range(0, m_points.size()), + Compute_tangent_triangulation(*this)); + } else { +#endif // GUDHI_USE_TBB + // Sequential + for (std::size_t i = 0; i < m_points.size(); ++i) + compute_tangent_triangulation(i); +#ifdef GUDHI_USE_TBB + } +#endif // GUDHI_USE_TBB + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB) + t.end(); + std::cerr << "Tangential complex computed in " << t.num_seconds() + << " seconds.\n"; +#endif + } + + /// \brief Type returned by `Tangential_complex::fix_inconsistencies_using_perturbation`. + struct Fix_inconsistencies_info { + /// `true` if all inconsistencies could be removed, `false` if the time limit has been reached before + bool success = false; + /// number of steps performed + unsigned int num_steps = 0; + /// initial number of inconsistent stars + std::size_t initial_num_inconsistent_stars = 0; + /// best number of inconsistent stars during the process + std::size_t best_num_inconsistent_stars = 0; + /// final number of inconsistent stars + std::size_t final_num_inconsistent_stars = 0; + }; + + /** \brief Attempts to fix inconsistencies by perturbing the point positions. + * + * @param[in] max_perturb Maximum length of the translations used by the perturbation. + * @param[in] time_limit Time limit in seconds. If -1, no time limit is set. + */ + Fix_inconsistencies_info fix_inconsistencies_using_perturbation(double max_perturb, double time_limit = -1.) { + Fix_inconsistencies_info info; + + if (time_limit == 0.) + return info; + + Gudhi::Clock t; + +#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + std::tuple stats_before = + number_of_inconsistent_simplices(false); + + if (std::get<1>(stats_before) == 0) { +#ifdef DEBUG_TRACES + std::cerr << "Nothing to fix.\n"; +#endif + info.success = false; + return info; + } +#endif // GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + + m_last_max_perturb = max_perturb; + + bool done = false; + info.best_num_inconsistent_stars = m_triangulations.size(); + info.num_steps = 0; + while (!done) { +#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + std::cerr + << "\nBefore fix step:\n" + << " * Total number of simplices in stars (incl. duplicates): " + << std::get<0>(stats_before) << "\n" + << " * Num inconsistent simplices in stars (incl. duplicates): " + << red << std::get<1>(stats_before) << white << " (" + << 100. * std::get<1>(stats_before) / std::get<0>(stats_before) << "%)\n" + << " * Number of stars containing inconsistent simplices: " + << red << std::get<2>(stats_before) << white << " (" + << 100. * std::get<2>(stats_before) / m_points.size() << "%)\n"; +#endif + +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow + << "\nAttempt to fix inconsistencies using perturbations - step #" + << info.num_steps + 1 << "... " << white; +#endif + + std::size_t num_inconsistent_stars = 0; + std::vector updated_points; + +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t_fix_step; +#endif + + // Parallel +#if defined(GUDHI_USE_TBB) + if (boost::is_convertible::value) { + tbb::combinable num_inconsistencies; + tbb::combinable > tls_updated_points; + tbb::parallel_for( + tbb::blocked_range(0, m_triangulations.size()), + Try_to_solve_inconsistencies_in_a_local_triangulation(*this, max_perturb, + num_inconsistencies, + tls_updated_points)); + num_inconsistent_stars = + num_inconsistencies.combine(std::plus()); + updated_points = tls_updated_points.combine( + [](std::vector const& x, + std::vector const& y) { + std::vector res; + res.reserve(x.size() + y.size()); + res.insert(res.end(), x.begin(), x.end()); + res.insert(res.end(), y.begin(), y.end()); + return res; + }); + } else { +#endif // GUDHI_USE_TBB + // Sequential + for (std::size_t i = 0; i < m_triangulations.size(); ++i) { + num_inconsistent_stars += + try_to_solve_inconsistencies_in_a_local_triangulation(i, max_perturb, + std::back_inserter(updated_points)); + } +#if defined(GUDHI_USE_TBB) + } +#endif // GUDHI_USE_TBB + +#ifdef GUDHI_TC_PROFILING + t_fix_step.end(); +#endif + +#if defined(GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES) || defined(DEBUG_TRACES) + std::cerr + << "\nEncountered during fix:\n" + << " * Num stars containing inconsistent simplices: " + << red << num_inconsistent_stars << white + << " (" << 100. * num_inconsistent_stars / m_points.size() << "%)\n"; +#endif + +#ifdef GUDHI_TC_PROFILING + std::cerr << yellow << "done in " << t_fix_step.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + + if (num_inconsistent_stars > 0) + refresh_tangential_complex(updated_points); + +#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS + // Confirm that all stars were actually refreshed + std::size_t num_inc_1 = + std::get<1>(number_of_inconsistent_simplices(false)); + refresh_tangential_complex(); + std::size_t num_inc_2 = + std::get<1>(number_of_inconsistent_simplices(false)); + if (num_inc_1 != num_inc_2) + std::cerr << red << "REFRESHMENT CHECK: FAILED. (" + << num_inc_1 << " vs " << num_inc_2 << ")\n" << white; + else + std::cerr << green << "REFRESHMENT CHECK: PASSED.\n" << white; +#endif + +#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + std::tuple stats_after = + number_of_inconsistent_simplices(false); + + std::cerr + << "\nAfter fix:\n" + << " * Total number of simplices in stars (incl. duplicates): " + << std::get<0>(stats_after) << "\n" + << " * Num inconsistent simplices in stars (incl. duplicates): " + << red << std::get<1>(stats_after) << white << " (" + << 100. * std::get<1>(stats_after) / std::get<0>(stats_after) << "%)\n" + << " * Number of stars containing inconsistent simplices: " + << red << std::get<2>(stats_after) << white << " (" + << 100. * std::get<2>(stats_after) / m_points.size() << "%)\n"; + + stats_before = stats_after; +#endif + + if (info.num_steps == 0) + info.initial_num_inconsistent_stars = num_inconsistent_stars; + + if (num_inconsistent_stars < info.best_num_inconsistent_stars) + info.best_num_inconsistent_stars = num_inconsistent_stars; + + info.final_num_inconsistent_stars = num_inconsistent_stars; + + done = (num_inconsistent_stars == 0); + if (!done) { + ++info.num_steps; + if (time_limit > 0. && t.num_seconds() > time_limit) { +#ifdef DEBUG_TRACES + std::cerr << red << "Time limit reached.\n" << white; +#endif + info.success = false; + return info; + } + } + } + +#ifdef DEBUG_TRACES + std::cerr << green << "Fixed!\n" << white; +#endif + info.success = true; + return info; + } + + /// \brief Type returned by `Tangential_complex::number_of_inconsistent_simplices`. + struct Num_inconsistencies { + /// Total number of simplices in stars (including duplicates that appear in several stars) + std::size_t num_simplices = 0; + /// Number of inconsistent simplices + std::size_t num_inconsistent_simplices = 0; + /// Number of stars containing at least one inconsistent simplex + std::size_t num_inconsistent_stars = 0; + }; + + /// Returns the number of inconsistencies + /// @param[in] verbose If true, outputs a message into `std::cerr`. + + Num_inconsistencies + number_of_inconsistent_simplices( +#ifdef DEBUG_TRACES + bool verbose = true +#else + bool verbose = false +#endif + ) const { + Num_inconsistencies stats; + + // For each triangulation + for (std::size_t idx = 0; idx < m_points.size(); ++idx) { + bool is_star_inconsistent = false; + + // For each cell + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + // Don't check infinite cells + if (is_infinite(*it_inc_simplex)) + continue; + + Simplex c = *it_inc_simplex; + c.insert(idx); // Add the missing index + + if (!is_simplex_consistent(c)) { + ++stats.num_inconsistent_simplices; + is_star_inconsistent = true; + } + + ++stats.num_simplices; + } + stats.num_inconsistent_stars += is_star_inconsistent; + } + + if (verbose) { + std::cerr + << "\n==========================================================\n" + << "Inconsistencies:\n" + << " * Total number of simplices in stars (incl. duplicates): " + << stats.num_simplices << "\n" + << " * Number of inconsistent simplices in stars (incl. duplicates): " + << stats.num_inconsistent_simplices << " (" + << 100. * stats.num_inconsistent_simplices / stats.num_simplices << "%)\n" + << " * Number of stars containing inconsistent simplices: " + << stats.num_inconsistent_stars << " (" + << 100. * stats.num_inconsistent_stars / m_points.size() << "%)\n" + << "==========================================================\n"; + } + + return stats; + } + + /** \brief Exports the complex into a Simplex_tree. + * + * \tparam Simplex_tree_ must be a `Simplex_tree`. + * + * @param[out] tree The result, where each `Vertex_handle` is the index of the + * corresponding point in the range provided to the constructor (it can also be + * retrieved through the `Tangential_complex::get_point` function. + * @param[in] export_inconsistent_simplices Also export inconsistent simplices or not? + * @return The maximal dimension of the simplices. + */ + template + int create_complex(Simplex_tree_ &tree + , bool export_inconsistent_simplices = true + /// \cond ADVANCED_PARAMETERS + , bool export_infinite_simplices = false + , Simplex_set *p_inconsistent_simplices = NULL + /// \endcond + ) const { +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow + << "\nExporting the TC as a Simplex_tree... " << white; +#endif +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t; +#endif + + int max_dim = -1; + + // For each triangulation + for (std::size_t idx = 0; idx < m_points.size(); ++idx) { + // For each cell of the star + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + Simplex c = *it_inc_simplex; + + // Don't export infinite cells + if (!export_infinite_simplices && is_infinite(c)) + continue; + + if (!export_inconsistent_simplices && !is_simplex_consistent(c)) + continue; + + if (static_cast (c.size()) > max_dim) + max_dim = static_cast (c.size()); + // Add the missing center vertex + c.insert(idx); + + // Try to insert the simplex + bool inserted = tree.insert_simplex_and_subfaces(c).second; + + // Inconsistent? + if (p_inconsistent_simplices && inserted && !is_simplex_consistent(c)) { + p_inconsistent_simplices->insert(c); + } + } + } + +#ifdef GUDHI_TC_PROFILING + t.end(); + std::cerr << yellow << "done in " << t.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + + return max_dim; + } + + // First clears the complex then exports the TC into it + // Returns the max dimension of the simplices + // check_lower_and_higher_dim_simplices : 0 (false), 1 (true), 2 (auto) + // If the check is enabled, the function: + // - won't insert the simplex if it is already in a higher dim simplex + // - will erase any lower-dim simplices that are faces of the new simplex + // "auto" (= 2) will enable the check as a soon as it encounters a + // simplex whose dimension is different from the previous ones. + // N.B.: The check is quite expensive. + + int create_complex(Simplicial_complex &complex, + bool export_inconsistent_simplices = true, + bool export_infinite_simplices = false, + int check_lower_and_higher_dim_simplices = 2, + Simplex_set *p_inconsistent_simplices = NULL) const { +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow + << "\nExporting the TC as a Simplicial_complex... " << white; +#endif +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t; +#endif + + int max_dim = -1; + complex.clear(); + + // For each triangulation + for (std::size_t idx = 0; idx < m_points.size(); ++idx) { + // For each cell of the star + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + Simplex c = *it_inc_simplex; + + // Don't export infinite cells + if (!export_infinite_simplices && is_infinite(c)) + continue; + + if (!export_inconsistent_simplices && !is_simplex_consistent(c)) + continue; + + // Unusual simplex dim? + if (check_lower_and_higher_dim_simplices == 2 + && max_dim != -1 + && static_cast (c.size()) != max_dim) { + // Let's activate the check + std::cerr << red << + "Info: check_lower_and_higher_dim_simplices ACTIVATED. " + "Export might be take some time...\n" << white; + check_lower_and_higher_dim_simplices = 1; + } + + if (static_cast (c.size()) > max_dim) + max_dim = static_cast (c.size()); + // Add the missing center vertex + c.insert(idx); + + // Try to insert the simplex + bool added = + complex.add_simplex(c, check_lower_and_higher_dim_simplices == 1); + + // Inconsistent? + if (p_inconsistent_simplices && added && !is_simplex_consistent(c)) { + p_inconsistent_simplices->insert(c); + } + } + } + +#ifdef GUDHI_TC_PROFILING + t.end(); + std::cerr << yellow << "done in " << t.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + + return max_dim; + } + + template > + std::ostream &export_to_off( + const Simplicial_complex &complex, std::ostream & os, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL, + ProjectionFunctor const& point_projection = ProjectionFunctor()) + const { + return export_to_off( + os, false, p_simpl_to_color_in_red, p_simpl_to_color_in_green, + p_simpl_to_color_in_blue, &complex, point_projection); + } + + template > + std::ostream &export_to_off( + std::ostream & os, bool color_inconsistencies = false, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL, + const Simplicial_complex *p_complex = NULL, + ProjectionFunctor const& point_projection = ProjectionFunctor()) const { + if (m_points.empty()) + return os; + + if (m_ambient_dim < 2) { + std::cerr << "Error: export_to_off => ambient dimension should be >= 2.\n"; + os << "Error: export_to_off => ambient dimension should be >= 2.\n"; + return os; + } + if (m_ambient_dim > 3) { + std::cerr << "Warning: export_to_off => ambient dimension should be " + "<= 3. Only the first 3 coordinates will be exported.\n"; + } + + if (m_intrinsic_dim < 1 || m_intrinsic_dim > 3) { + std::cerr << "Error: export_to_off => intrinsic dimension should be " + "between 1 and 3.\n"; + os << "Error: export_to_off => intrinsic dimension should be " + "between 1 and 3.\n"; + return os; + } + + std::stringstream output; + std::size_t num_simplices, num_vertices; + export_vertices_to_off(output, num_vertices, false, point_projection); + if (p_complex) { + export_simplices_to_off( + *p_complex, output, num_simplices, p_simpl_to_color_in_red, + p_simpl_to_color_in_green, p_simpl_to_color_in_blue); + } else { + export_simplices_to_off( + output, num_simplices, color_inconsistencies, p_simpl_to_color_in_red, + p_simpl_to_color_in_green, p_simpl_to_color_in_blue); + } + +#ifdef GUDHI_TC_EXPORT_NORMALS + os << "N"; +#endif + + os << "OFF \n" + << num_vertices << " " + << num_simplices << " " + << "0 \n" + << output.str(); + + return os; + } + + private: + void refresh_tangential_complex() { +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow << "\nRefreshing TC... " << white; +#endif + +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t; +#endif +#ifdef GUDHI_USE_TBB + // Parallel + if (boost::is_convertible::value) { + tbb::parallel_for(tbb::blocked_range(0, m_points.size()), + Compute_tangent_triangulation(*this)); + } else { +#endif // GUDHI_USE_TBB + // Sequential + for (std::size_t i = 0; i < m_points.size(); ++i) + compute_tangent_triangulation(i); +#ifdef GUDHI_USE_TBB + } +#endif // GUDHI_USE_TBB + +#ifdef GUDHI_TC_PROFILING + t.end(); + std::cerr << yellow << "done in " << t.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + } + + // If the list of perturbed points is provided, it is much faster + template + void refresh_tangential_complex( + Point_indices_range const& perturbed_points_indices) { +#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING) + std::cerr << yellow << "\nRefreshing TC... " << white; +#endif + +#ifdef GUDHI_TC_PROFILING + Gudhi::Clock t; +#endif + + // ANN tree containing only the perturbed points + Points_ds updated_pts_ds(m_points, perturbed_points_indices); + +#ifdef GUDHI_USE_TBB + // Parallel + if (boost::is_convertible::value) { + tbb::parallel_for(tbb::blocked_range(0, m_points.size()), + Refresh_tangent_triangulation(*this, updated_pts_ds)); + } else { +#endif // GUDHI_USE_TBB + // Sequential + for (std::size_t i = 0; i < m_points.size(); ++i) + refresh_tangent_triangulation(i, updated_pts_ds); +#ifdef GUDHI_USE_TBB + } +#endif // GUDHI_USE_TBB + +#ifdef GUDHI_TC_PROFILING + t.end(); + std::cerr << yellow << "done in " << t.num_seconds() + << " seconds.\n" << white; +#elif defined(DEBUG_TRACES) + std::cerr << yellow << "done.\n" << white; +#endif + } + + void export_inconsistent_stars_to_OFF_files(std::string const& filename_base) const { + // For each triangulation + for (std::size_t idx = 0; idx < m_points.size(); ++idx) { + // We build a SC along the way in case it's inconsistent + Simplicial_complex sc; + // For each cell + bool is_inconsistent = false; + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; + ++it_inc_simplex) { + // Skip infinite cells + if (is_infinite(*it_inc_simplex)) + continue; + + Simplex c = *it_inc_simplex; + c.insert(idx); // Add the missing index + + sc.add_simplex(c); + + // If we do not already know this star is inconsistent, test it + if (!is_inconsistent && !is_simplex_consistent(c)) + is_inconsistent = true; + } + + if (is_inconsistent) { + // Export star to OFF file + std::stringstream output_filename; + output_filename << filename_base << "_" << idx << ".off"; + std::ofstream off_stream(output_filename.str().c_str()); + export_to_off(sc, off_stream); + } + } + } + + class Compare_distance_to_ref_point { + public: + Compare_distance_to_ref_point(Point const& ref, K const& k) + : m_ref(ref), m_k(k) { } + + bool operator()(Point const& p1, Point const& p2) { + typename K::Squared_distance_d sqdist = + m_k.squared_distance_d_object(); + return sqdist(p1, m_ref) < sqdist(p2, m_ref); + } + + private: + Point const& m_ref; + K const& m_k; + }; + +#ifdef GUDHI_USE_TBB + // Functor for compute_tangential_complex function + class Compute_tangent_triangulation { + Tangential_complex & m_tc; + + public: + // Constructor + Compute_tangent_triangulation(Tangential_complex &tc) + : m_tc(tc) { } + + // Constructor + Compute_tangent_triangulation(const Compute_tangent_triangulation &ctt) + : m_tc(ctt.m_tc) { } + + // operator() + void operator()(const tbb::blocked_range& r) const { + for (size_t i = r.begin(); i != r.end(); ++i) + m_tc.compute_tangent_triangulation(i); + } + }; + + // Functor for refresh_tangential_complex function + class Refresh_tangent_triangulation { + Tangential_complex & m_tc; + Points_ds const& m_updated_pts_ds; + + public: + // Constructor + Refresh_tangent_triangulation(Tangential_complex &tc, Points_ds const& updated_pts_ds) + : m_tc(tc), m_updated_pts_ds(updated_pts_ds) { } + + // Constructor + Refresh_tangent_triangulation(const Refresh_tangent_triangulation &ctt) + : m_tc(ctt.m_tc), m_updated_pts_ds(ctt.m_updated_pts_ds) { } + + // operator() + void operator()(const tbb::blocked_range& r) const { + for (size_t i = r.begin(); i != r.end(); ++i) + m_tc.refresh_tangent_triangulation(i, m_updated_pts_ds); + } + }; +#endif // GUDHI_USE_TBB + + bool is_infinite(Simplex const& s) const { + return *s.rbegin() == std::numeric_limits::max(); + } + + // Output: "triangulation" is a Regular Triangulation containing at least the + // star of "center_pt" + // Returns the handle of the center vertex + Tr_vertex_handle compute_star(std::size_t i, const Point ¢er_pt, const Tangent_space_basis &tsb, + Triangulation &triangulation, bool verbose = false) { + int tangent_space_dim = tsb.dimension(); + const Tr_traits &local_tr_traits = triangulation.geom_traits(); + Tr_vertex_handle center_vertex; + + // Kernel functor & objects + typename K::Squared_distance_d k_sqdist = m_k.squared_distance_d_object(); + + // Triangulation's traits functor & objects + typename Tr_traits::Compute_weight_d point_weight = local_tr_traits.compute_weight_d_object(); + typename Tr_traits::Power_center_d power_center = local_tr_traits.power_center_d_object(); + + //*************************************************** + // Build a minimal triangulation in the tangent space + // (we only need the star of p) + //*************************************************** + + // Insert p + Tr_point proj_wp; + if (i == tsb.origin()) { + // Insert {(0, 0, 0...), m_weights[i]} + proj_wp = local_tr_traits.construct_weighted_point_d_object()(local_tr_traits.construct_point_d_object()(tangent_space_dim, CGAL::ORIGIN), + m_weights[i]); + } else { + const Weighted_point& wp = compute_perturbed_weighted_point(i); + proj_wp = project_point_and_compute_weight(wp, tsb, local_tr_traits); + } + + center_vertex = triangulation.insert(proj_wp); + center_vertex->data() = i; + if (verbose) + std::cerr << "* Inserted point #" << i << "\n"; + +#ifdef GUDHI_TC_VERY_VERBOSE + std::size_t num_attempts_to_insert_points = 1; + std::size_t num_inserted_points = 1; +#endif + // const int NUM_NEIGHBORS = 150; + // KNS_range ins_range = m_points_ds.query_k_nearest_neighbors(center_pt, NUM_NEIGHBORS); + INS_range ins_range = m_points_ds.query_incremental_nearest_neighbors(center_pt); + + // While building the local triangulation, we keep the radius + // of the sphere "star sphere" centered at "center_vertex" + // and which contains all the + // circumspheres of the star of "center_vertex" + boost::optional squared_star_sphere_radius_plus_margin; + + // Insert points until we find a point which is outside "star sphere" + for (auto nn_it = ins_range.begin(); + nn_it != ins_range.end(); + ++nn_it) { + std::size_t neighbor_point_idx = nn_it->first; + + // ith point = p, which is already inserted + if (neighbor_point_idx != i) { + // No need to lock the Mutex_for_perturb here since this will not be + // called while other threads are perturbing the positions + Point neighbor_pt; + FT neighbor_weight; + compute_perturbed_weighted_point(neighbor_point_idx, neighbor_pt, neighbor_weight); + + if (squared_star_sphere_radius_plus_margin && + k_sqdist(center_pt, neighbor_pt) > *squared_star_sphere_radius_plus_margin) + break; + + Tr_point proj_pt = project_point_and_compute_weight(neighbor_pt, neighbor_weight, tsb, + local_tr_traits); + +#ifdef GUDHI_TC_VERY_VERBOSE + ++num_attempts_to_insert_points; +#endif + + + Tr_vertex_handle vh = triangulation.insert_if_in_star(proj_pt, center_vertex); + // Tr_vertex_handle vh = triangulation.insert(proj_pt); + if (vh != Tr_vertex_handle()) { +#ifdef GUDHI_TC_VERY_VERBOSE + ++num_inserted_points; +#endif + if (verbose) + std::cerr << "* Inserted point #" << neighbor_point_idx << "\n"; + + vh->data() = neighbor_point_idx; + + // Let's recompute squared_star_sphere_radius_plus_margin + if (triangulation.current_dimension() >= tangent_space_dim) { + squared_star_sphere_radius_plus_margin = boost::none; + // Get the incident cells and look for the biggest circumsphere + std::vector incident_cells; + triangulation.incident_full_cells( + center_vertex, + std::back_inserter(incident_cells)); + for (typename std::vector::iterator cit = + incident_cells.begin(); cit != incident_cells.end(); ++cit) { + Tr_full_cell_handle cell = *cit; + if (triangulation.is_infinite(cell)) { + squared_star_sphere_radius_plus_margin = boost::none; + break; + } else { + // Note that this uses the perturbed point since it uses + // the points of the local triangulation + Tr_point c = power_center(boost::make_transform_iterator(cell->vertices_begin(), + vertex_handle_to_point), + boost::make_transform_iterator(cell->vertices_end(), + vertex_handle_to_point)); + + FT sq_power_sphere_diam = 4 * point_weight(c); + + if (!squared_star_sphere_radius_plus_margin || + sq_power_sphere_diam > *squared_star_sphere_radius_plus_margin) { + squared_star_sphere_radius_plus_margin = sq_power_sphere_diam; + } + } + } + + // Let's add the margin, now + // The value depends on whether we perturb weight or position + if (squared_star_sphere_radius_plus_margin) { + // "2*m_last_max_perturb" because both points can be perturbed + squared_star_sphere_radius_plus_margin = CGAL::square(std::sqrt(*squared_star_sphere_radius_plus_margin) + + 2 * m_last_max_perturb); + + // Save it in `m_squared_star_spheres_radii_incl_margin` + m_squared_star_spheres_radii_incl_margin[i] = + *squared_star_sphere_radius_plus_margin; + } else { + m_squared_star_spheres_radii_incl_margin[i] = FT(-1); + } + } + } + } + } + + return center_vertex; + } + + void refresh_tangent_triangulation(std::size_t i, Points_ds const& updated_pts_ds, bool verbose = false) { + if (verbose) + std::cerr << "** Refreshing tangent tri #" << i << " **\n"; + + if (m_squared_star_spheres_radii_incl_margin[i] == FT(-1)) + return compute_tangent_triangulation(i, verbose); + + Point center_point = compute_perturbed_point(i); + // Among updated point, what is the closer from our center point? + std::size_t closest_pt_index = + updated_pts_ds.query_k_nearest_neighbors(center_point, 1, false).begin()->first; + + typename K::Construct_weighted_point_d k_constr_wp = + m_k.construct_weighted_point_d_object(); + typename K::Power_distance_d k_power_dist = m_k.power_distance_d_object(); + + // Construct a weighted point equivalent to the star sphere + Weighted_point star_sphere = k_constr_wp(compute_perturbed_point(i), + m_squared_star_spheres_radii_incl_margin[i]); + Weighted_point closest_updated_point = + compute_perturbed_weighted_point(closest_pt_index); + + // Is the "closest point" inside our star sphere? + if (k_power_dist(star_sphere, closest_updated_point) <= FT(0)) + compute_tangent_triangulation(i, verbose); + } + + void compute_tangent_triangulation(std::size_t i, bool verbose = false) { + if (verbose) + std::cerr << "** Computing tangent tri #" << i << " **\n"; + // std::cerr << "***********************************************\n"; + + // No need to lock the mutex here since this will not be called while + // other threads are perturbing the positions + const Point center_pt = compute_perturbed_point(i); + Tangent_space_basis &tsb = m_tangent_spaces[i]; + + // Estimate the tangent space + if (!m_are_tangent_spaces_computed[i]) { +#ifdef GUDHI_TC_EXPORT_NORMALS + tsb = compute_tangent_space(center_pt, i, true /*normalize*/, &m_orth_spaces[i]); +#else + tsb = compute_tangent_space(center_pt, i); +#endif + } + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE) + Gudhi::Clock t; +#endif + int tangent_space_dim = tangent_basis_dim(i); + Triangulation &local_tr = + m_triangulations[i].construct_triangulation(tangent_space_dim); + + m_triangulations[i].center_vertex() = + compute_star(i, center_pt, tsb, local_tr, verbose); + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE) + t.end(); + std::cerr << " - triangulation construction: " << t.num_seconds() << " s.\n"; + t.reset(); +#endif + +#ifdef GUDHI_TC_VERY_VERBOSE + std::cerr << "Inserted " << num_inserted_points << " points / " + << num_attempts_to_insert_points << " attemps to compute the star\n"; +#endif + + update_star(i); + +#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE) + t.end(); + std::cerr << " - update_star: " << t.num_seconds() << " s.\n"; +#endif + } + + // Updates m_stars[i] directly from m_triangulations[i] + + void update_star(std::size_t i) { + Star &star = m_stars[i]; + star.clear(); + Triangulation &local_tr = m_triangulations[i].tr(); + Tr_vertex_handle center_vertex = m_triangulations[i].center_vertex(); + int cur_dim_plus_1 = local_tr.current_dimension() + 1; + + std::vector incident_cells; + local_tr.incident_full_cells( + center_vertex, std::back_inserter(incident_cells)); + + typename std::vector::const_iterator it_c = incident_cells.begin(); + typename std::vector::const_iterator it_c_end = incident_cells.end(); + // For each cell + for (; it_c != it_c_end; ++it_c) { + // Will contain all indices except center_vertex + Incident_simplex incident_simplex; + for (int j = 0; j < cur_dim_plus_1; ++j) { + std::size_t index = (*it_c)->vertex(j)->data(); + if (index != i) + incident_simplex.insert(index); + } + star.push_back(incident_simplex); + } + } + + // Estimates tangent subspaces using PCA + + Tangent_space_basis compute_tangent_space(const Point &p + , const std::size_t i + , bool normalize_basis = true + , Orthogonal_space_basis *p_orth_space_basis = NULL + ) { + unsigned int num_pts_for_pca = static_cast (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)); + + // Kernel functors + typename K::Construct_vector_d constr_vec = + m_k.construct_vector_d_object(); + typename K::Compute_coordinate_d coord = + m_k.compute_coordinate_d_object(); + typename K::Squared_length_d sqlen = + m_k.squared_length_d_object(); + typename K::Scaled_vector_d scaled_vec = + m_k.scaled_vector_d_object(); + typename K::Scalar_product_d scalar_pdct = + m_k.scalar_product_d_object(); + typename K::Difference_of_vectors_d diff_vec = + m_k.difference_of_vectors_d_object(); + +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors( + p, num_pts_for_pca, false); + const Points &points_for_pca = m_points_for_tse; +#else + KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false); + const Points &points_for_pca = m_points; +#endif + + // One row = one point + Eigen::MatrixXd mat_points(num_pts_for_pca, m_ambient_dim); + auto nn_it = kns_range.begin(); + for (unsigned int j = 0; + j < num_pts_for_pca && nn_it != kns_range.end(); + ++j, ++nn_it) { + for (int i = 0; i < m_ambient_dim; ++i) { + mat_points(j, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i)); + } + } + Eigen::MatrixXd centered = mat_points.rowwise() - mat_points.colwise().mean(); + Eigen::MatrixXd cov = centered.adjoint() * centered; + Eigen::SelfAdjointEigenSolver eig(cov); + + Tangent_space_basis tsb(i); // p = compute_perturbed_point(i) here + + // The eigenvectors are sorted in increasing order of their corresponding + // eigenvalues + for (int j = m_ambient_dim - 1; + j >= m_ambient_dim - m_intrinsic_dim; + --j) { + if (normalize_basis) { + Vector v = constr_vec(m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim); + tsb.push_back(normalize_vector(v, m_k)); + } else { + tsb.push_back(constr_vec( + m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim)); + } + } + + if (p_orth_space_basis) { + p_orth_space_basis->set_origin(i); + for (int j = m_ambient_dim - m_intrinsic_dim - 1; + j >= 0; + --j) { + if (normalize_basis) { + Vector v = constr_vec(m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim); + p_orth_space_basis->push_back(normalize_vector(v, m_k)); + } else { + p_orth_space_basis->push_back(constr_vec( + m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim)); + } + } + } + + m_are_tangent_spaces_computed[i] = true; + + return tsb; + } + + // Compute the space tangent to a simplex (p1, p2, ... pn) + // TODO(CJ): Improve this? + // Basically, it takes all the neighbor points to p1, p2... pn and runs PCA + // on it. Note that most points are duplicated. + + Tangent_space_basis compute_tangent_space(const Simplex &s, bool normalize_basis = true) { + unsigned int num_pts_for_pca = static_cast (std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)); + + // Kernel functors + typename K::Construct_vector_d constr_vec = + m_k.construct_vector_d_object(); + typename K::Compute_coordinate_d coord = + m_k.compute_coordinate_d_object(); + typename K::Squared_length_d sqlen = + m_k.squared_length_d_object(); + typename K::Scaled_vector_d scaled_vec = + m_k.scaled_vector_d_object(); + typename K::Scalar_product_d scalar_pdct = + m_k.scalar_product_d_object(); + typename K::Difference_of_vectors_d diff_vec = + m_k.difference_of_vectors_d_object(); + + // One row = one point + Eigen::MatrixXd mat_points(s.size() * num_pts_for_pca, m_ambient_dim); + unsigned int current_row = 0; + + for (Simplex::const_iterator it_index = s.begin(); + it_index != s.end(); ++it_index) { + const Point &p = m_points[*it_index]; + +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + KNS_range kns_range = m_points_ds_for_tse.query_k_nearest_neighbors( + p, num_pts_for_pca, false); + const Points &points_for_pca = m_points_for_tse; +#else + KNS_range kns_range = m_points_ds.query_k_nearest_neighbors(p, num_pts_for_pca, false); + const Points &points_for_pca = m_points; +#endif + + auto nn_it = kns_range.begin(); + for (; + current_row < num_pts_for_pca && nn_it != kns_range.end(); + ++current_row, ++nn_it) { + for (int i = 0; i < m_ambient_dim; ++i) { + mat_points(current_row, i) = + CGAL::to_double(coord(points_for_pca[nn_it->first], i)); + } + } + } + Eigen::MatrixXd centered = mat_points.rowwise() - mat_points.colwise().mean(); + Eigen::MatrixXd cov = centered.adjoint() * centered; + Eigen::SelfAdjointEigenSolver eig(cov); + + Tangent_space_basis tsb; + + // The eigenvectors are sorted in increasing order of their corresponding + // eigenvalues + for (int j = m_ambient_dim - 1; + j >= m_ambient_dim - m_intrinsic_dim; + --j) { + if (normalize_basis) { + Vector v = constr_vec(m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim); + tsb.push_back(normalize_vector(v, m_k)); + } else { + tsb.push_back(constr_vec( + m_ambient_dim, + eig.eigenvectors().col(j).data(), + eig.eigenvectors().col(j).data() + m_ambient_dim)); + } + } + + return tsb; + } + + // Returns the dimension of the ith local triangulation + + int tangent_basis_dim(std::size_t i) const { + return m_tangent_spaces[i].dimension(); + } + + Point compute_perturbed_point(std::size_t pt_idx) const { +#ifdef GUDHI_TC_PERTURB_POSITION + return m_k.translated_point_d_object()( + m_points[pt_idx], m_translations[pt_idx]); +#else + return m_points[pt_idx]; +#endif + } + + void compute_perturbed_weighted_point(std::size_t pt_idx, Point &p, FT &w) const { +#ifdef GUDHI_TC_PERTURB_POSITION + p = m_k.translated_point_d_object()( + m_points[pt_idx], m_translations[pt_idx]); +#else + p = m_points[pt_idx]; +#endif + w = m_weights[pt_idx]; + } + + Weighted_point compute_perturbed_weighted_point(std::size_t pt_idx) const { + typename K::Construct_weighted_point_d k_constr_wp = + m_k.construct_weighted_point_d_object(); + + Weighted_point wp = k_constr_wp( +#ifdef GUDHI_TC_PERTURB_POSITION + m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]), +#else + m_points[pt_idx], +#endif + m_weights[pt_idx]); + + return wp; + } + + Point unproject_point(const Tr_point &p, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + typename K::Translated_point_d k_transl = + m_k.translated_point_d_object(); + typename K::Scaled_vector_d k_scaled_vec = + m_k.scaled_vector_d_object(); + typename Tr_traits::Compute_coordinate_d coord = + tr_traits.compute_coordinate_d_object(); + + Point global_point = compute_perturbed_point(tsb.origin()); + for (int i = 0; i < m_intrinsic_dim; ++i) + global_point = k_transl(global_point, + k_scaled_vec(tsb[i], coord(p, i))); + + return global_point; + } + + // Project the point in the tangent space + // Resulting point coords are expressed in tsb's space + Tr_bare_point project_point(const Point &p, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + typename K::Scalar_product_d scalar_pdct = + m_k.scalar_product_d_object(); + typename K::Difference_of_points_d diff_points = + m_k.difference_of_points_d_object(); + + Vector v = diff_points(p, compute_perturbed_point(tsb.origin())); + + std::vector coords; + // Ambiant-space coords of the projected point + coords.reserve(tsb.dimension()); + for (std::size_t i = 0; i < m_intrinsic_dim; ++i) { + // Local coords are given by the scalar product with the vectors of tsb + FT coord = scalar_pdct(v, tsb[i]); + coords.push_back(coord); + } + + return tr_traits.construct_point_d_object()( + static_cast (coords.size()), coords.begin(), coords.end()); + } + + // Project the point in the tangent space + // The weight will be the squared distance between p and the projection of p + // Resulting point coords are expressed in tsb's space + + Tr_point project_point_and_compute_weight(const Weighted_point &wp, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + typename K::Point_drop_weight_d k_drop_w = + m_k.point_drop_weight_d_object(); + typename K::Compute_weight_d k_point_weight = + m_k.compute_weight_d_object(); + return project_point_and_compute_weight( + k_drop_w(wp), k_point_weight(wp), tsb, tr_traits); + } + + // Same as above, with slightly different parameters + Tr_point project_point_and_compute_weight(const Point &p, const FT w, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + const int point_dim = m_k.point_dimension_d_object()(p); + + typename K::Construct_point_d constr_pt = + m_k.construct_point_d_object(); + typename K::Scalar_product_d scalar_pdct = + m_k.scalar_product_d_object(); + typename K::Difference_of_points_d diff_points = + m_k.difference_of_points_d_object(); + typename K::Compute_coordinate_d coord = + m_k.compute_coordinate_d_object(); + typename K::Construct_cartesian_const_iterator_d ccci = + m_k.construct_cartesian_const_iterator_d_object(); + + Point origin = compute_perturbed_point(tsb.origin()); + Vector v = diff_points(p, origin); + + // Same dimension? Then the weight is 0 + bool same_dim = (point_dim == tsb.dimension()); + + std::vector coords; + // Ambiant-space coords of the projected point + std::vector p_proj(ccci(origin), ccci(origin, 0)); + coords.reserve(tsb.dimension()); + for (int i = 0; i < tsb.dimension(); ++i) { + // Local coords are given by the scalar product with the vectors of tsb + FT c = scalar_pdct(v, tsb[i]); + coords.push_back(c); + + // p_proj += c * tsb[i] + if (!same_dim) { + for (int j = 0; j < point_dim; ++j) + p_proj[j] += c * coord(tsb[i], j); + } + } + + // Same dimension? Then the weight is 0 + FT sq_dist_to_proj_pt = 0; + if (!same_dim) { + Point projected_pt = constr_pt(point_dim, p_proj.begin(), p_proj.end()); + sq_dist_to_proj_pt = m_k.squared_distance_d_object()(p, projected_pt); + } + + return tr_traits.construct_weighted_point_d_object() + (tr_traits.construct_point_d_object()(static_cast (coords.size()), coords.begin(), coords.end()), + w - sq_dist_to_proj_pt); + } + + // Project all the points in the tangent space + + template + std::vector project_points_and_compute_weights( + const Indexed_point_range &point_indices, + const Tangent_space_basis &tsb, + const Tr_traits &tr_traits) const { + std::vector ret; + for (typename Indexed_point_range::const_iterator + it = point_indices.begin(), it_end = point_indices.end(); + it != it_end; ++it) { + ret.push_back(project_point_and_compute_weight( + compute_perturbed_weighted_point(*it), tsb, tr_traits)); + } + return ret; + } + + // A simplex here is a local tri's full cell handle + + bool is_simplex_consistent(Tr_full_cell_handle fch, int cur_dim) const { + Simplex c; + for (int i = 0; i < cur_dim + 1; ++i) { + std::size_t data = fch->vertex(i)->data(); + c.insert(data); + } + return is_simplex_consistent(c); + } + + // A simplex here is a list of point indices + // TODO(CJ): improve it like the other "is_simplex_consistent" below + + bool is_simplex_consistent(Simplex const& simplex) const { + // Check if the simplex is in the stars of all its vertices + Simplex::const_iterator it_point_idx = simplex.begin(); + // For each point p of the simplex, we parse the incidents cells of p + // and we check if "simplex" is among them + for (; it_point_idx != simplex.end(); ++it_point_idx) { + std::size_t point_idx = *it_point_idx; + // Don't check infinite simplices + if (point_idx == std::numeric_limits::max()) + continue; + + Star const& star = m_stars[point_idx]; + + // What we're looking for is "simplex" \ point_idx + Incident_simplex is_to_find = simplex; + is_to_find.erase(point_idx); + + // For each cell + if (std::find(star.begin(), star.end(), is_to_find) == star.end()) + return false; + } + + return true; + } + + // A simplex here is a list of point indices + // "s" contains all the points of the simplex except "center_point" + // This function returns the points whose star doesn't contain the simplex + // N.B.: the function assumes that the simplex is contained in + // star(center_point) + + template // value_type = std::size_t + bool is_simplex_consistent( + std::size_t center_point, + Incident_simplex const& s, // without "center_point" + OutputIterator points_whose_star_does_not_contain_s, + bool check_also_in_non_maximal_faces = false) const { + Simplex full_simplex = s; + full_simplex.insert(center_point); + + // Check if the simplex is in the stars of all its vertices + Incident_simplex::const_iterator it_point_idx = s.begin(); + // For each point p of the simplex, we parse the incidents cells of p + // and we check if "simplex" is among them + for (; it_point_idx != s.end(); ++it_point_idx) { + std::size_t point_idx = *it_point_idx; + // Don't check infinite simplices + if (point_idx == std::numeric_limits::max()) + continue; + + Star const& star = m_stars[point_idx]; + + // What we're looking for is full_simplex \ point_idx + Incident_simplex is_to_find = full_simplex; + is_to_find.erase(point_idx); + + if (check_also_in_non_maximal_faces) { + // For each simplex "is" of the star, check if ic_to_simplex is + // included in "is" + bool found = false; + for (Star::const_iterator is = star.begin(), is_end = star.end(); + !found && is != is_end; ++is) { + if (std::includes(is->begin(), is->end(), + is_to_find.begin(), is_to_find.end())) + found = true; + } + + if (!found) + *points_whose_star_does_not_contain_s++ = point_idx; + } else { + // Does the star contain is_to_find? + if (std::find(star.begin(), star.end(), is_to_find) == star.end()) + *points_whose_star_does_not_contain_s++ = point_idx; + } + } + + return true; + } + + // A simplex here is a list of point indices + // It looks for s in star(p). + // "s" contains all the points of the simplex except p. + bool is_simplex_in_star(std::size_t p, + Incident_simplex const& s, + bool check_also_in_non_maximal_faces = true) const { + Star const& star = m_stars[p]; + + if (check_also_in_non_maximal_faces) { + // For each simplex "is" of the star, check if ic_to_simplex is + // included in "is" + bool found = false; + for (Star::const_iterator is = star.begin(), is_end = star.end(); + !found && is != is_end; ++is) { + if (std::includes(is->begin(), is->end(), s.begin(), s.end())) + found = true; + } + + return found; + } else { + return !(std::find(star.begin(), star.end(), s) == star.end()); + } + } + +#ifdef GUDHI_USE_TBB + // Functor for try_to_solve_inconsistencies_in_a_local_triangulation function + class Try_to_solve_inconsistencies_in_a_local_triangulation { + Tangential_complex & m_tc; + double m_max_perturb; + tbb::combinable &m_num_inconsistencies; + tbb::combinable > &m_updated_points; + + public: + // Constructor + Try_to_solve_inconsistencies_in_a_local_triangulation(Tangential_complex &tc, + double max_perturb, + tbb::combinable &num_inconsistencies, + tbb::combinable > &updated_points) + : m_tc(tc), + m_max_perturb(max_perturb), + m_num_inconsistencies(num_inconsistencies), + m_updated_points(updated_points) { } + + // Constructor + Try_to_solve_inconsistencies_in_a_local_triangulation(const Try_to_solve_inconsistencies_in_a_local_triangulation& + tsilt) + : m_tc(tsilt.m_tc), + m_max_perturb(tsilt.m_max_perturb), + m_num_inconsistencies(tsilt.m_num_inconsistencies), + m_updated_points(tsilt.m_updated_points) { } + + // operator() + void operator()(const tbb::blocked_range& r) const { + for (size_t i = r.begin(); i != r.end(); ++i) { + m_num_inconsistencies.local() += + m_tc.try_to_solve_inconsistencies_in_a_local_triangulation(i, m_max_perturb, + std::back_inserter(m_updated_points.local())); + } + } + }; +#endif // GUDHI_USE_TBB + + void perturb(std::size_t point_idx, double max_perturb) { + const Tr_traits &local_tr_traits = + m_triangulations[point_idx].tr().geom_traits(); + typename Tr_traits::Compute_coordinate_d coord = + local_tr_traits.compute_coordinate_d_object(); + typename K::Translated_point_d k_transl = + m_k.translated_point_d_object(); + typename K::Construct_vector_d k_constr_vec = + m_k.construct_vector_d_object(); + typename K::Scaled_vector_d k_scaled_vec = + m_k.scaled_vector_d_object(); + + CGAL::Random_points_in_ball_d + tr_point_in_ball_generator(m_intrinsic_dim, + m_random_generator.get_double(0., max_perturb)); + + Tr_point local_random_transl = + local_tr_traits.construct_weighted_point_d_object()(*tr_point_in_ball_generator++, 0); + Translation_for_perturb global_transl = k_constr_vec(m_ambient_dim); + const Tangent_space_basis &tsb = m_tangent_spaces[point_idx]; + for (int i = 0; i < m_intrinsic_dim; ++i) { + global_transl = k_transl(global_transl, + k_scaled_vec(tsb[i], coord(local_random_transl, i))); + } + // Parallel +#if defined(GUDHI_USE_TBB) + m_p_perturb_mutexes[point_idx].lock(); + m_translations[point_idx] = global_transl; + m_p_perturb_mutexes[point_idx].unlock(); + // Sequential +#else + m_translations[point_idx] = global_transl; +#endif + } + + // Return true if inconsistencies were found + template + bool try_to_solve_inconsistencies_in_a_local_triangulation(std::size_t tr_index, + double max_perturb, + OutputIt perturbed_pts_indices = CGAL::Emptyset_iterator()) { + bool is_inconsistent = false; + + Star const& star = m_stars[tr_index]; + Tr_vertex_handle center_vh = m_triangulations[tr_index].center_vertex(); + + // For each incident simplex + Star::const_iterator it_inc_simplex = star.begin(); + Star::const_iterator it_inc_simplex_end = star.end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + const Incident_simplex &incident_simplex = *it_inc_simplex; + + // Don't check infinite cells + if (is_infinite(incident_simplex)) + continue; + + Simplex c = incident_simplex; + c.insert(tr_index); // Add the missing index + + // Perturb the center point + if (!is_simplex_consistent(c)) { + is_inconsistent = true; + + std::size_t idx = tr_index; + + perturb(tr_index, max_perturb); + *perturbed_pts_indices++ = idx; + + // We will try the other cells next time + break; + } + } + + return is_inconsistent; + } + + + // 1st line: number of points + // Then one point per line + std::ostream &export_point_set(std::ostream & os, + bool use_perturbed_points = false, + const char *coord_separator = " ") const { + if (use_perturbed_points) { + std::vector perturbed_points; + perturbed_points.reserve(m_points.size()); + for (std::size_t i = 0; i < m_points.size(); ++i) + perturbed_points.push_back(compute_perturbed_point(i)); + + return export_point_set( + m_k, perturbed_points, os, coord_separator); + } else { + return export_point_set( + m_k, m_points, os, coord_separator); + } + } + + template > + std::ostream &export_vertices_to_off( + std::ostream & os, std::size_t &num_vertices, + bool use_perturbed_points = false, + ProjectionFunctor const& point_projection = ProjectionFunctor()) const { + if (m_points.empty()) { + num_vertices = 0; + return os; + } + + // If m_intrinsic_dim = 1, we output each point two times + // to be able to export each segment as a flat triangle with 3 different + // indices (otherwise, Meshlab detects degenerated simplices) + const int N = (m_intrinsic_dim == 1 ? 2 : 1); + + // Kernel functors + typename K::Compute_coordinate_d coord = + m_k.compute_coordinate_d_object(); + +#ifdef GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF + int num_coords = m_ambient_dim; +#else + int num_coords = std::min(m_ambient_dim, 3); +#endif + +#ifdef GUDHI_TC_EXPORT_NORMALS + OS_container::const_iterator it_os = m_orth_spaces.begin(); +#endif + typename Points::const_iterator it_p = m_points.begin(); + typename Points::const_iterator it_p_end = m_points.end(); + // For each point p + for (std::size_t i = 0; it_p != it_p_end; ++it_p, ++i) { + Point p = point_projection( + use_perturbed_points ? compute_perturbed_point(i) : *it_p); + for (int ii = 0; ii < N; ++ii) { + int j = 0; + for (; j < num_coords; ++j) + os << CGAL::to_double(coord(p, j)) << " "; + if (j == 2) + os << "0"; + +#ifdef GUDHI_TC_EXPORT_NORMALS + for (j = 0; j < num_coords; ++j) + os << " " << CGAL::to_double(coord(*it_os->begin(), j)); +#endif + os << "\n"; + } +#ifdef GUDHI_TC_EXPORT_NORMALS + ++it_os; +#endif + } + + num_vertices = N * m_points.size(); + return os; + } + + std::ostream &export_simplices_to_off(std::ostream & os, std::size_t &num_OFF_simplices, + bool color_inconsistencies = false, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL) + const { + // If m_intrinsic_dim = 1, each point is output two times + // (see export_vertices_to_off) + num_OFF_simplices = 0; + std::size_t num_maximal_simplices = 0; + std::size_t num_inconsistent_maximal_simplices = 0; + std::size_t num_inconsistent_stars = 0; + typename Tr_container::const_iterator it_tr = m_triangulations.begin(); + typename Tr_container::const_iterator it_tr_end = m_triangulations.end(); + // For each triangulation + for (std::size_t idx = 0; it_tr != it_tr_end; ++it_tr, ++idx) { + bool is_star_inconsistent = false; + + Triangulation const& tr = it_tr->tr(); + Tr_vertex_handle center_vh = it_tr->center_vertex(); + + if (&tr == NULL || tr.current_dimension() < m_intrinsic_dim) + continue; + + // Color for this star + std::stringstream color; + // color << rand()%256 << " " << 100+rand()%156 << " " << 100+rand()%156; + color << 128 << " " << 128 << " " << 128; + + // Gather the triangles here, with an int telling its color + typedef std::vector > Star_using_triangles; + Star_using_triangles star_using_triangles; + + // For each cell of the star + Star::const_iterator it_inc_simplex = m_stars[idx].begin(); + Star::const_iterator it_inc_simplex_end = m_stars[idx].end(); + for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) { + Simplex c = *it_inc_simplex; + c.insert(idx); + std::size_t num_vertices = c.size(); + ++num_maximal_simplices; + + int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue + if (color_inconsistencies && !is_simplex_consistent(c)) { + ++num_inconsistent_maximal_simplices; + color_simplex = 0; + is_star_inconsistent = true; + } else { + if (p_simpl_to_color_in_red && + std::find( + p_simpl_to_color_in_red->begin(), + p_simpl_to_color_in_red->end(), + c) != p_simpl_to_color_in_red->end()) { + color_simplex = 1; + } else if (p_simpl_to_color_in_green && + std::find( + p_simpl_to_color_in_green->begin(), + p_simpl_to_color_in_green->end(), + c) != p_simpl_to_color_in_green->end()) { + color_simplex = 2; + } else if (p_simpl_to_color_in_blue && + std::find( + p_simpl_to_color_in_blue->begin(), + p_simpl_to_color_in_blue->end(), + c) != p_simpl_to_color_in_blue->end()) { + color_simplex = 3; + } + } + + // If m_intrinsic_dim = 1, each point is output two times, + // so we need to multiply each index by 2 + // And if only 2 vertices, add a third one (each vertex is duplicated in + // the file when m_intrinsic dim = 2) + if (m_intrinsic_dim == 1) { + Simplex tmp_c; + Simplex::iterator it = c.begin(); + for (; it != c.end(); ++it) + tmp_c.insert(*it * 2); + if (num_vertices == 2) + tmp_c.insert(*tmp_c.rbegin() + 1); + + c = tmp_c; + } + + if (num_vertices <= 3) { + star_using_triangles.push_back(std::make_pair(c, color_simplex)); + } else { + // num_vertices >= 4: decompose the simplex in triangles + std::vector booleans(num_vertices, false); + std::fill(booleans.begin() + num_vertices - 3, booleans.end(), true); + do { + Simplex triangle; + Simplex::iterator it = c.begin(); + for (int i = 0; it != c.end(); ++i, ++it) { + if (booleans[i]) + triangle.insert(*it); + } + star_using_triangles.push_back( + std::make_pair(triangle, color_simplex)); + } while (std::next_permutation(booleans.begin(), booleans.end())); + } + } + + // For each cell + Star_using_triangles::const_iterator it_simplex = + star_using_triangles.begin(); + Star_using_triangles::const_iterator it_simplex_end = + star_using_triangles.end(); + for (; it_simplex != it_simplex_end; ++it_simplex) { + const Simplex &c = it_simplex->first; + + // Don't export infinite cells + if (is_infinite(c)) + continue; + + int color_simplex = it_simplex->second; + + std::stringstream sstr_c; + + Simplex::const_iterator it_point_idx = c.begin(); + for (; it_point_idx != c.end(); ++it_point_idx) { + sstr_c << *it_point_idx << " "; + } + + os << 3 << " " << sstr_c.str(); + if (color_inconsistencies || p_simpl_to_color_in_red + || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) { + switch (color_simplex) { + case 0: os << " 255 255 0"; + break; + case 1: os << " 255 0 0"; + break; + case 2: os << " 0 255 0"; + break; + case 3: os << " 0 0 255"; + break; + default: os << " " << color.str(); + break; + } + } + ++num_OFF_simplices; + os << "\n"; + } + if (is_star_inconsistent) + ++num_inconsistent_stars; + } + +#ifdef DEBUG_TRACES + std::cerr + << "\n==========================================================\n" + << "Export from list of stars to OFF:\n" + << " * Number of vertices: " << m_points.size() << "\n" + << " * Total number of maximal simplices: " << num_maximal_simplices + << "\n"; + if (color_inconsistencies) { + std::cerr + << " * Number of inconsistent stars: " + << num_inconsistent_stars << " (" + << (m_points.size() > 0 ? + 100. * num_inconsistent_stars / m_points.size() : 0.) << "%)\n" + << " * Number of inconsistent maximal simplices: " + << num_inconsistent_maximal_simplices << " (" + << (num_maximal_simplices > 0 ? + 100. * num_inconsistent_maximal_simplices / num_maximal_simplices + : 0.) << "%)\n"; + } + std::cerr << "==========================================================\n"; +#endif + + return os; + } + + public: + std::ostream &export_simplices_to_off( + const Simplicial_complex &complex, + std::ostream & os, std::size_t &num_OFF_simplices, + Simplex_set const *p_simpl_to_color_in_red = NULL, + Simplex_set const *p_simpl_to_color_in_green = NULL, + Simplex_set const *p_simpl_to_color_in_blue = NULL) + const { + typedef Simplicial_complex::Simplex Simplex; + typedef Simplicial_complex::Simplex_set Simplex_set; + + // If m_intrinsic_dim = 1, each point is output two times + // (see export_vertices_to_off) + num_OFF_simplices = 0; + std::size_t num_maximal_simplices = 0; + + typename Simplex_set::const_iterator it_s = + complex.simplex_range().begin(); + typename Simplex_set::const_iterator it_s_end = + complex.simplex_range().end(); + // For each simplex + for (; it_s != it_s_end; ++it_s) { + Simplex c = *it_s; + ++num_maximal_simplices; + + int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue + if (p_simpl_to_color_in_red && + std::find( + p_simpl_to_color_in_red->begin(), + p_simpl_to_color_in_red->end(), + c) != p_simpl_to_color_in_red->end()) { + color_simplex = 1; + } else if (p_simpl_to_color_in_green && + std::find(p_simpl_to_color_in_green->begin(), + p_simpl_to_color_in_green->end(), + c) != p_simpl_to_color_in_green->end()) { + color_simplex = 2; + } else if (p_simpl_to_color_in_blue && + std::find(p_simpl_to_color_in_blue->begin(), + p_simpl_to_color_in_blue->end(), + c) != p_simpl_to_color_in_blue->end()) { + color_simplex = 3; + } + + // Gather the triangles here + typedef std::vector Triangles; + Triangles triangles; + + std::size_t num_vertices = c.size(); + // Do not export smaller dimension simplices + if (num_vertices < m_intrinsic_dim + 1) + continue; + + // If m_intrinsic_dim = 1, each point is output two times, + // so we need to multiply each index by 2 + // And if only 2 vertices, add a third one (each vertex is duplicated in + // the file when m_intrinsic dim = 2) + if (m_intrinsic_dim == 1) { + Simplex tmp_c; + Simplex::iterator it = c.begin(); + for (; it != c.end(); ++it) + tmp_c.insert(*it * 2); + if (num_vertices == 2) + tmp_c.insert(*tmp_c.rbegin() + 1); + + c = tmp_c; + } + + if (num_vertices <= 3) { + triangles.push_back(c); + } else { + // num_vertices >= 4: decompose the simplex in triangles + std::vector booleans(num_vertices, false); + std::fill(booleans.begin() + num_vertices - 3, booleans.end(), true); + do { + Simplex triangle; + Simplex::iterator it = c.begin(); + for (int i = 0; it != c.end(); ++i, ++it) { + if (booleans[i]) + triangle.insert(*it); + } + triangles.push_back(triangle); + } while (std::next_permutation(booleans.begin(), booleans.end())); + } + + // For each cell + Triangles::const_iterator it_tri = triangles.begin(); + Triangles::const_iterator it_tri_end = triangles.end(); + for (; it_tri != it_tri_end; ++it_tri) { + // Don't export infinite cells + if (is_infinite(*it_tri)) + continue; + + os << 3 << " "; + Simplex::const_iterator it_point_idx = it_tri->begin(); + for (; it_point_idx != it_tri->end(); ++it_point_idx) { + os << *it_point_idx << " "; + } + + if (p_simpl_to_color_in_red || p_simpl_to_color_in_green + || p_simpl_to_color_in_blue) { + switch (color_simplex) { + case 0: os << " 255 255 0"; + break; + case 1: os << " 255 0 0"; + break; + case 2: os << " 0 255 0"; + break; + case 3: os << " 0 0 255"; + break; + default: os << " 128 128 128"; + break; + } + } + + ++num_OFF_simplices; + os << "\n"; + } + } + +#ifdef DEBUG_TRACES + std::cerr + << "\n==========================================================\n" + << "Export from complex to OFF:\n" + << " * Number of vertices: " << m_points.size() << "\n" + << " * Total number of maximal simplices: " << num_maximal_simplices + << "\n" + << "==========================================================\n"; +#endif + + return os; + } + + private: + const K m_k; + const int m_intrinsic_dim; + const int m_ambient_dim; + + Points m_points; + Weights m_weights; +#ifdef GUDHI_TC_PERTURB_POSITION + Translations_for_perturb m_translations; +#if defined(GUDHI_USE_TBB) + Mutex_for_perturb *m_p_perturb_mutexes; +#endif +#endif + + Points_ds m_points_ds; + double m_last_max_perturb; + std::vector m_are_tangent_spaces_computed; + TS_container m_tangent_spaces; +#ifdef GUDHI_TC_EXPORT_NORMALS + OS_container m_orth_spaces; +#endif + Tr_container m_triangulations; // Contains the triangulations + // and their center vertex + Stars_container m_stars; + std::vector m_squared_star_spheres_radii_incl_margin; + +#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM + Points m_points_for_tse; + Points_ds m_points_ds_for_tse; +#endif + + mutable CGAL::Random m_random_generator; +}; // /class Tangential_complex + +} // end namespace tangential_complex +} // end namespace Gudhi + +#endif // TANGENTIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h new file mode 100644 index 00000000..65c74ca5 --- /dev/null +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h @@ -0,0 +1,539 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_ +#define TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_ + +#include +#include +#include +#include + +#include + +// For is_pure_pseudomanifold +#include +#include +#include +#include + +#include +#include +#include +#include // for map<> +#include // for vector<> +#include // for set<> + +namespace Gudhi { +namespace tangential_complex { +namespace internal { + +class Simplicial_complex { + public: + typedef boost::container::flat_set Simplex; + typedef std::set Simplex_set; + + // If perform_checks = true, the function: + // - won't insert the simplex if it is already in a higher dim simplex + // - will erase any lower-dim simplices that are faces of the new simplex + // Returns true if the simplex was added + bool add_simplex( + const Simplex &s, bool perform_checks = true) { + if (perform_checks) { + unsigned int num_pts = static_cast (s.size()); + std::vector to_erase; + bool check_higher_dim_simpl = true; + for (Complex::iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + // Check if the simplex is not already in a higher dim simplex + if (check_higher_dim_simpl + && it_simplex->size() > num_pts + && std::includes(it_simplex->begin(), it_simplex->end(), + s.begin(), s.end())) { + // No need to insert it, then + return false; + } + // Check if the simplex includes some lower-dim simplices + if (it_simplex->size() < num_pts + && std::includes(s.begin(), s.end(), + it_simplex->begin(), it_simplex->end())) { + to_erase.push_back(it_simplex); + // We don't need to check higher-sim simplices any more + check_higher_dim_simpl = false; + } + } + for (std::vector::const_iterator it = to_erase.begin(); + it != to_erase.end(); ++it) { + m_complex.erase(*it); + } + } + return m_complex.insert(s).second; + } + + const Simplex_set &simplex_range() const { + return m_complex; + } + + bool empty() { + return m_complex.empty(); + } + + void clear() { + m_complex.clear(); + } + + template + void get_simplices_matching_test(Test test, Output_it out) { + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (test(*it_simplex)) + *out++ = *it_simplex; + } + } + + // When a simplex S has only one co-face C, we can remove S and C + // without changing the topology + + void collapse(int max_simplex_dim, bool quiet = false) { +#ifdef DEBUG_TRACES + if (!quiet) + std::cerr << "Collapsing... "; +#endif + // We note k = max_simplex_dim - 1 + int k = max_simplex_dim - 1; + + typedef Complex::iterator Simplex_iterator; + typedef std::vector Simplex_iterator_list; + typedef std::map Cofaces_map; + + std::size_t num_collapsed_maximal_simplices = 0; + do { + num_collapsed_maximal_simplices = 0; + // Create a map associating each non-maximal k-faces to the list of its + // maximal cofaces + Cofaces_map cofaces_map; + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (static_cast (it_simplex->size()) > k + 1) { + std::vector k_faces; + // Get the k-faces composing the simplex + combinations(*it_simplex, k + 1, std::back_inserter(k_faces)); + for (const auto &comb : k_faces) + cofaces_map[comb].push_back(it_simplex); + } + } + + // For each non-maximal k-face F, if F has only one maximal coface Cf: + // - Look for the other k-faces F2, F3... of Cf in the map and: + // * if the list contains only Cf, clear the list (we don't remove the + // list since it creates troubles with the iterators) and add the F2, + // F3... to the complex + // * otherwise, remove Cf from the associated list + // - Remove Cf from the complex + for (Cofaces_map::const_iterator it_map_elt = cofaces_map.begin(), + it_map_end = cofaces_map.end(); + it_map_elt != it_map_end; + ++it_map_elt) { + if (it_map_elt->second.size() == 1) { + std::vector k_faces; + const Simplex_iterator_list::value_type &it_Cf = + *it_map_elt->second.begin(); + GUDHI_CHECK(it_Cf->size() == max_simplex_dim + 1, + std::logic_error("Wrong dimension")); + // Get the k-faces composing the simplex + combinations(*it_Cf, k + 1, std::back_inserter(k_faces)); + for (const auto &f2 : k_faces) { + // Skip F + if (f2 != it_map_elt->first) { + Cofaces_map::iterator it_comb_in_map = cofaces_map.find(f2); + if (it_comb_in_map->second.size() == 1) { + it_comb_in_map->second.clear(); + m_complex.insert(f2); + } else { // it_comb_in_map->second.size() > 1 + Simplex_iterator_list::iterator it = std::find(it_comb_in_map->second.begin(), + it_comb_in_map->second.end(), + it_Cf); + GUDHI_CHECK(it != it_comb_in_map->second.end(), + std::logic_error("Error: it == it_comb_in_map->second.end()")); + it_comb_in_map->second.erase(it); + } + } + } + m_complex.erase(it_Cf); + ++num_collapsed_maximal_simplices; + } + } + // Repeat until no maximal simplex got removed + } while (num_collapsed_maximal_simplices > 0); + + // Collapse the lower dimension simplices + if (k > 0) + collapse(max_simplex_dim - 1, true); + +#ifdef DEBUG_TRACES + if (!quiet) + std::cerr << "done.\n"; +#endif + } + + void display_stats() const { + std::cerr << yellow << "Complex stats:\n" << white; + + if (m_complex.empty()) { + std::cerr << " * No simplices.\n"; + } else { + // Number of simplex for each dimension + std::map simplex_stats; + + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + ++simplex_stats[static_cast (it_simplex->size()) - 1]; + } + + for (std::map::const_iterator it_map = simplex_stats.begin(); + it_map != simplex_stats.end(); ++it_map) { + std::cerr << " * " << it_map->first << "-simplices: " + << it_map->second << "\n"; + } + } + } + + // verbose_level = 0, 1 or 2 + bool is_pure_pseudomanifold__do_not_check_if_stars_are_connected(int simplex_dim, + bool allow_borders = false, + bool exit_at_the_first_problem = false, + int verbose_level = 0, + std::size_t *p_num_wrong_dim_simplices = NULL, + std::size_t *p_num_wrong_number_of_cofaces = NULL) const { + typedef Simplex K_1_face; + typedef std::map Cofaces_map; + + std::size_t num_wrong_dim_simplices = 0; + std::size_t num_wrong_number_of_cofaces = 0; + + // Counts the number of cofaces of each K_1_face + + // Create a map associating each non-maximal k-faces to the list of its + // maximal cofaces + Cofaces_map cofaces_map; + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (static_cast (it_simplex->size()) != simplex_dim + 1) { + if (verbose_level >= 2) + std::cerr << "Found a simplex with dim = " + << it_simplex->size() - 1 << "\n"; + ++num_wrong_dim_simplices; + } else { + std::vector k_1_faces; + // Get the facets composing the simplex + combinations( + *it_simplex, simplex_dim, std::back_inserter(k_1_faces)); + for (const auto &k_1_face : k_1_faces) { + ++cofaces_map[k_1_face]; + } + } + } + + for (Cofaces_map::const_iterator it_map_elt = cofaces_map.begin(), + it_map_end = cofaces_map.end(); + it_map_elt != it_map_end; + ++it_map_elt) { + if (it_map_elt->second != 2 + && (!allow_borders || it_map_elt->second != 1)) { + if (verbose_level >= 2) + std::cerr << "Found a k-1-face with " + << it_map_elt->second << " cofaces\n"; + + if (exit_at_the_first_problem) + return false; + else + ++num_wrong_number_of_cofaces; + } + } + + bool ret = num_wrong_dim_simplices == 0 && num_wrong_number_of_cofaces == 0; + + if (verbose_level >= 1) { + std::cerr << "Pure pseudo-manifold: "; + if (ret) { + std::cerr << green << "YES" << white << "\n"; + } else { + std::cerr << red << "NO" << white << "\n" + << " * Number of wrong dimension simplices: " + << num_wrong_dim_simplices << "\n" + << " * Number of wrong number of cofaces: " + << num_wrong_number_of_cofaces << "\n"; + } + } + + if (p_num_wrong_dim_simplices) + *p_num_wrong_dim_simplices = num_wrong_dim_simplices; + if (p_num_wrong_number_of_cofaces) + *p_num_wrong_number_of_cofaces = num_wrong_number_of_cofaces; + + return ret; + } + + template + std::size_t num_K_simplices() const { + Simplex_set k_simplices; + + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (it_simplex->size() == K + 1) { + k_simplices.insert(*it_simplex); + } else if (it_simplex->size() > K + 1) { + // Get the k-faces composing the simplex + combinations( + *it_simplex, K + 1, std::inserter(k_simplices, k_simplices.begin())); + } + } + + return k_simplices.size(); + } + + std::ptrdiff_t euler_characteristic(bool verbose = false) const { + if (verbose) + std::cerr << "\nComputing Euler characteristic of the complex...\n"; + + std::size_t num_vertices = num_K_simplices<0>(); + std::size_t num_edges = num_K_simplices<1>(); + std::size_t num_triangles = num_K_simplices<2>(); + + std::ptrdiff_t ec = + (std::ptrdiff_t) num_vertices + - (std::ptrdiff_t) num_edges + + (std::ptrdiff_t) num_triangles; + + if (verbose) + std::cerr << "Euler characteristic: V - E + F = " + << num_vertices << " - " << num_edges << " + " << num_triangles << " = " + << blue + << ec + << white << "\n"; + + return ec; + } + + // TODO(CJ): ADD COMMENTS + + bool is_pure_pseudomanifold( + int simplex_dim, + std::size_t num_vertices, + bool allow_borders = false, + bool exit_at_the_first_problem = false, + int verbose_level = 0, + std::size_t *p_num_wrong_dim_simplices = NULL, + std::size_t *p_num_wrong_number_of_cofaces = NULL, + std::size_t *p_num_unconnected_stars = NULL, + Simplex_set *p_wrong_dim_simplices = NULL, + Simplex_set *p_wrong_number_of_cofaces_simplices = NULL, + Simplex_set *p_unconnected_stars_simplices = NULL) const { + // If simplex_dim == 1, we do not need to check if stars are connected + if (simplex_dim == 1) { + if (p_num_unconnected_stars) + *p_num_unconnected_stars = 0; + return is_pure_pseudomanifold__do_not_check_if_stars_are_connected(simplex_dim, + allow_borders, + exit_at_the_first_problem, + verbose_level, + p_num_wrong_dim_simplices, + p_num_wrong_number_of_cofaces); + } + // Associates each vertex (= the index in the vector) + // to its star (list of simplices) + typedef std::vector > Stars; + std::size_t num_wrong_dim_simplices = 0; + std::size_t num_wrong_number_of_cofaces = 0; + std::size_t num_unconnected_stars = 0; + + // Fills a Stars data structure + Stars stars; + stars.resize(num_vertices); + for (Complex::const_iterator it_simplex = m_complex.begin(), + it_simplex_end = m_complex.end(); + it_simplex != it_simplex_end; + ++it_simplex) { + if (static_cast (it_simplex->size()) != simplex_dim + 1) { + if (verbose_level >= 2) + std::cerr << "Found a simplex with dim = " + << it_simplex->size() - 1 << "\n"; + ++num_wrong_dim_simplices; + if (p_wrong_dim_simplices) + p_wrong_dim_simplices->insert(*it_simplex); + } else { + for (Simplex::const_iterator it_point_idx = it_simplex->begin(); + it_point_idx != it_simplex->end(); + ++it_point_idx) { + stars[*it_point_idx].push_back(it_simplex); + } + } + } + + // Now, for each star, we have a vector of its d-simplices + // i.e. one index for each d-simplex + // Boost Graph only deals with indexes, so we also need indexes for the + // (d-1)-simplices + std::size_t center_vertex_index = 0; + for (Stars::const_iterator it_star = stars.begin(); + it_star != stars.end(); + ++it_star, ++center_vertex_index) { + typedef std::map > + Dm1_faces_to_adj_D_faces; + Dm1_faces_to_adj_D_faces dm1_faces_to_adj_d_faces; + + for (std::size_t i_dsimpl = 0; i_dsimpl < it_star->size(); ++i_dsimpl) { + Simplex dm1_simpl_of_link = *((*it_star)[i_dsimpl]); + dm1_simpl_of_link.erase(center_vertex_index); + // Copy it to a vector so that we can use operator[] on it + std::vector dm1_simpl_of_link_vec( + dm1_simpl_of_link.begin(), dm1_simpl_of_link.end()); + + CGAL::Combination_enumerator dm2_simplices( + simplex_dim - 1, 0, simplex_dim); + for (; !dm2_simplices.finished(); ++dm2_simplices) { + Simplex dm2_simpl; + for (int j = 0; j < simplex_dim - 1; ++j) + dm2_simpl.insert(dm1_simpl_of_link_vec[dm2_simplices[j]]); + dm1_faces_to_adj_d_faces[dm2_simpl].push_back(i_dsimpl); + } + } + + Adj_graph adj_graph; + std::vector d_faces_descriptors; + d_faces_descriptors.resize(it_star->size()); + for (std::size_t j = 0; j < it_star->size(); ++j) + d_faces_descriptors[j] = boost::add_vertex(adj_graph); + + Dm1_faces_to_adj_D_faces::const_iterator dm1_to_d_it = + dm1_faces_to_adj_d_faces.begin(); + Dm1_faces_to_adj_D_faces::const_iterator dm1_to_d_it_end = + dm1_faces_to_adj_d_faces.end(); + for (std::size_t i_km1_face = 0; + dm1_to_d_it != dm1_to_d_it_end; + ++dm1_to_d_it, ++i_km1_face) { + Graph_vertex km1_gv = boost::add_vertex(adj_graph); + + for (std::vector::const_iterator kface_it = + dm1_to_d_it->second.begin(); + kface_it != dm1_to_d_it->second.end(); + ++kface_it) { + boost::add_edge(km1_gv, *kface_it, adj_graph); + } + + if (dm1_to_d_it->second.size() != 2 + && (!allow_borders || dm1_to_d_it->second.size() != 1)) { + ++num_wrong_number_of_cofaces; + if (p_wrong_number_of_cofaces_simplices) { + for (auto idx : dm1_to_d_it->second) + p_wrong_number_of_cofaces_simplices->insert(*((*it_star)[idx])); + } + } + } + + // What is left is to check the connexity + bool is_connected = true; + if (boost::num_vertices(adj_graph) > 0) { + std::vector components(boost::num_vertices(adj_graph)); + is_connected = + (boost::connected_components(adj_graph, &components[0]) == 1); + } + + if (!is_connected) { + if (verbose_level >= 2) + std::cerr << "Error: star #" << center_vertex_index + << " is not connected\n"; + ++num_unconnected_stars; + if (p_unconnected_stars_simplices) { + for (std::vector::const_iterator + it_simpl = it_star->begin(), + it_simpl_end = it_star->end(); + it_simpl != it_simpl_end; + ++it_simpl) { + p_unconnected_stars_simplices->insert(**it_simpl); + } + } + } + } + + // Each one has been counted several times ("simplex_dim" times) + num_wrong_number_of_cofaces /= simplex_dim; + + bool ret = + num_wrong_dim_simplices == 0 + && num_wrong_number_of_cofaces == 0 + && num_unconnected_stars == 0; + + if (verbose_level >= 1) { + std::cerr << "Pure pseudo-manifold: "; + if (ret) { + std::cerr << green << "YES" << white << "\n"; + } else { + std::cerr << red << "NO" << white << "\n" + << " * Number of wrong dimension simplices: " + << num_wrong_dim_simplices << "\n" + << " * Number of wrong number of cofaces: " + << num_wrong_number_of_cofaces << "\n" + << " * Number of not-connected stars: " + << num_unconnected_stars << "\n"; + } + } + + if (p_num_wrong_dim_simplices) + *p_num_wrong_dim_simplices = num_wrong_dim_simplices; + if (p_num_wrong_number_of_cofaces) + *p_num_wrong_number_of_cofaces = num_wrong_number_of_cofaces; + if (p_num_unconnected_stars) + *p_num_unconnected_stars = num_unconnected_stars; + + return ret; + } + + private: + typedef Simplex_set Complex; + + // graph is an adjacency list + typedef boost::adjacency_list Adj_graph; + // map that gives to a certain simplex its node in graph and its dimension + typedef boost::graph_traits::vertex_descriptor Graph_vertex; + typedef boost::graph_traits::edge_descriptor Graph_edge; + + Complex m_complex; +}; // class Simplicial_complex + +} // namespace internal +} // namespace tangential_complex +} // namespace Gudhi + +#endif // TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/config.h b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h new file mode 100644 index 00000000..98a1b14f --- /dev/null +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h @@ -0,0 +1,44 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef TANGENTIAL_COMPLEX_CONFIG_H_ +#define TANGENTIAL_COMPLEX_CONFIG_H_ + +#include + +// ========================= Debugging & profiling ============================= +#define GUDHI_TC_PROFILING +#define DEBUG_TRACES +// #define GUDHI_TC_VERY_VERBOSE +// #define GUDHI_TC_PERFORM_EXTRA_CHECKS +// #define GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES + +// ========================= Strategy ========================================== +#define GUDHI_TC_PERTURB_POSITION +// #define GUDHI_TC_PERTURB_WEIGHT + +// ========================= Parameters ======================================== + +// PCA will use GUDHI_TC_BASE_VALUE_FOR_PCA^intrinsic_dim points +const std::size_t GUDHI_TC_BASE_VALUE_FOR_PCA = 5; + +#endif // TANGENTIAL_COMPLEX_CONFIG_H_ diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h new file mode 100644 index 00000000..b2d6d674 --- /dev/null +++ b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h @@ -0,0 +1,195 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#ifndef TANGENTIAL_COMPLEX_UTILITIES_H_ +#define TANGENTIAL_COMPLEX_UTILITIES_H_ + +#include +#include +#include + +#include + +#include +#include + +#include +#include +#include +#include +#include +#include // for std::sqrt + +namespace Gudhi { +namespace tangential_complex { +namespace internal { + +// Provides copy constructors to std::atomic so that +// it can be used in a vector +template +struct Atomic_wrapper +: public std::atomic { + typedef std::atomic Base; + + Atomic_wrapper() { } + + Atomic_wrapper(const T &t) : Base(t) { } + + Atomic_wrapper(const std::atomic &a) : Base(a.load()) { } + + Atomic_wrapper(const Atomic_wrapper &other) : Base(other.load()) { } + + Atomic_wrapper &operator=(const T &other) { + Base::store(other); + return *this; + } + + Atomic_wrapper &operator=(const std::atomic &other) { + Base::store(other.load()); + return *this; + } + + Atomic_wrapper &operator=(const Atomic_wrapper &other) { + Base::store(other.load()); + return *this; + } +}; + +// Modifies v in-place +template +typename K::Vector_d& normalize_vector(typename K::Vector_d& v, + K const& k) { + v = k.scaled_vector_d_object()( + v, typename K::FT(1) / std::sqrt(k.squared_length_d_object()(v))); + return v; +} + +template +struct Basis { + typedef typename Kernel::FT FT; + typedef typename Kernel::Point_d Point; + typedef typename Kernel::Vector_d Vector; + typedef typename std::vector::const_iterator const_iterator; + + std::size_t m_origin; + std::vector m_vectors; + + std::size_t origin() const { + return m_origin; + } + + void set_origin(std::size_t o) { + m_origin = o; + } + + const_iterator begin() const { + return m_vectors.begin(); + } + + const_iterator end() const { + return m_vectors.end(); + } + + std::size_t size() const { + return m_vectors.size(); + } + + Vector& operator[](const std::size_t i) { + return m_vectors[i]; + } + + const Vector& operator[](const std::size_t i) const { + return m_vectors[i]; + } + + void push_back(const Vector& v) { + m_vectors.push_back(v); + } + + void reserve(const std::size_t s) { + m_vectors.reserve(s); + } + + Basis() { } + + Basis(std::size_t origin) : m_origin(origin) { } + + Basis(std::size_t origin, const std::vector& vectors) + : m_origin(origin), m_vectors(vectors) { } + + int dimension() const { + return static_cast (m_vectors.size()); + } +}; + +// 1st line: number of points +// Then one point per line +template +std::ostream &export_point_set( + Kernel const& k, + Point_range const& points, + std::ostream & os, + const char *coord_separator = " ") { + // Kernel functors + typename Kernel::Construct_cartesian_const_iterator_d ccci = + k.construct_cartesian_const_iterator_d_object(); + + os << points.size() << "\n"; + + typename Point_range::const_iterator it_p = points.begin(); + typename Point_range::const_iterator it_p_end = points.end(); + // For each point p + for (; it_p != it_p_end; ++it_p) { + for (auto it = ccci(*it_p); it != ccci(*it_p, 0); ++it) + os << CGAL::to_double(*it) << coord_separator; + + os << "\n"; + } + + return os; +} + +// Compute all the k-combinations of elements +// Output_iterator::value_type must be +// boost::container::flat_set +template +void combinations(const Elements_container elements, int k, + Output_iterator combinations) { + std::size_t n = elements.size(); + std::vector booleans(n, false); + std::fill(booleans.begin() + n - k, booleans.end(), true); + do { + boost::container::flat_set combination; + typename Elements_container::const_iterator it_elt = elements.begin(); + for (std::size_t i = 0; i < n; ++i, ++it_elt) { + if (booleans[i]) + combination.insert(*it_elt); + } + *combinations++ = combination; + } while (std::next_permutation(booleans.begin(), booleans.end())); +} + +} // namespace internal +} // namespace tangential_complex +} // namespace Gudhi + +#endif // TANGENTIAL_COMPLEX_UTILITIES_H_ diff --git a/src/Tangential_complex/test/CMakeLists.txt b/src/Tangential_complex/test/CMakeLists.txt new file mode 100644 index 00000000..075028c8 --- /dev/null +++ b/src/Tangential_complex/test/CMakeLists.txt @@ -0,0 +1,31 @@ +cmake_minimum_required(VERSION 2.6) +project(Tangential_complex_tests) + +if (GCOVR_PATH) + # for gcovr to make coverage reports - Corbera Jenkins plugin + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-arcs -ftest-coverage") +endif() +if (GPROF_PATH) + # for gprof to make coverage reports - Jenkins + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pg") +endif() + +if(CGAL_FOUND) + if (NOT CGAL_VERSION VERSION_LESS 4.8.0) + if (EIGEN3_FOUND) + message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") + include( ${EIGEN3_USE_FILE} ) + include_directories (BEFORE "../../include") + + add_executable( Tangential_complex_test_TC test_tangential_complex.cpp ) + target_link_libraries(Tangential_complex_test_TC ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY}) + if (TBB_FOUND) + target_link_libraries(Tangential_complex_test_TC ${TBB_LIBRARIES}) + endif() + add_test(Tangential_complex_test_TC ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_test_TC + # XML format for Jenkins xUnit plugin + --log_format=XML --log_sink=${CMAKE_SOURCE_DIR}/Tangential_complex_UT.xml --log_level=test_suite --report_level=no) + + endif() + endif () +endif() \ No newline at end of file diff --git a/src/Tangential_complex/test/test_tangential_complex.cpp b/src/Tangential_complex/test/test_tangential_complex.cpp new file mode 100644 index 00000000..f8b0d2fb --- /dev/null +++ b/src/Tangential_complex/test/test_tangential_complex.cpp @@ -0,0 +1,70 @@ +/* This file is part of the Gudhi Library. The Gudhi library + * (Geometric Understanding in Higher Dimensions) is a generic C++ + * library for computational topology. + * + * Author(s): Clement Jamin + * + * Copyright (C) 2016 INRIA + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ + +#define BOOST_TEST_DYN_LINK +#define BOOST_TEST_MODULE Tangential_complex - test tangential complex +#include + +#include +#include + +#include +#include + +#include +#include + +namespace tc = Gudhi::tangential_complex; + +BOOST_AUTO_TEST_CASE(test_Spatial_tree_data_structure) { + typedef CGAL::Epick_d Kernel; + typedef Kernel::Point_d Point; + typedef tc::Tangential_complex< + Kernel, CGAL::Dynamic_dimension_tag, + CGAL::Parallel_tag> TC; + + const int INTRINSIC_DIM = 2; + const int AMBIENT_DIM = 3; + const int NUM_POINTS = 50; + + Kernel k; + + // Generate points on a 2-sphere + CGAL::Random_points_on_sphere_d generator(AMBIENT_DIM, 3.); + std::vector points; + points.reserve(NUM_POINTS); + for (int i = 0; i < NUM_POINTS; ++i) + points.push_back(*generator++); + + // Compute the TC + TC tc(points, INTRINSIC_DIM, k); + tc.compute_tangential_complex(); + + // Try to fix inconsistencies. Give it 60 seconds to succeed + auto perturb_ret = tc.fix_inconsistencies_using_perturbation(0.01, 60); + + BOOST_CHECK(perturb_ret.success); + + // Export the TC into a Simplex_tree + Gudhi::Simplex_tree<> stree; + tc.create_complex(stree); +} -- cgit v1.2.3 From 92479a53aa228c4e212067b375ab1b665116834e Mon Sep 17 00:00:00 2001 From: vrouvrea Date: Tue, 11 Oct 2016 15:24:42 +0000 Subject: Fix doxygen warning CMake message fix CMake test tangential examples git-svn-id: svn+ssh://scm.gforge.inria.fr/svnroot/gudhi/trunk@1704 636b058d-ea47-450e-bf9e-a15bfbe3eedb Former-commit-id: efa3c76e767d6ba55367b57c10ab87844c968457 --- src/Simplex_tree/doc/Intro_simplex_tree.h | 4 ++-- src/Tangential_complex/benchmark/CMakeLists.txt | 22 ++++---------------- src/Tangential_complex/example/CMakeLists.txt | 27 +++++++++---------------- src/common/doc/main_page.h | 10 ++++----- 4 files changed, 21 insertions(+), 42 deletions(-) (limited to 'src/Tangential_complex') diff --git a/src/Simplex_tree/doc/Intro_simplex_tree.h b/src/Simplex_tree/doc/Intro_simplex_tree.h index be061785..940dd694 100644 --- a/src/Simplex_tree/doc/Intro_simplex_tree.h +++ b/src/Simplex_tree/doc/Intro_simplex_tree.h @@ -66,8 +66,8 @@ Expand the simplex tree in 3.8e-05 s. Information of the Simplex Tree: Number of vertices = 10 Number of simplices = 98 \endcode * - * \li - * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp - Simplex tree is computed and displayed from a 3D alpha + * \li + * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp - Simplex tree is computed and displayed from a 3D alpha * complex (Requires CGAL, GMP and GMPXX to be installed) * * diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt index 12488201..a217d6e6 100644 --- a/src/Tangential_complex/benchmark/CMakeLists.txt +++ b/src/Tangential_complex/benchmark/CMakeLists.txt @@ -13,28 +13,14 @@ endif() # need CGAL 4.8 if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.8.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - add_executable(Tangential_complex_benchmark benchmark_tc.cpp) target_link_libraries(Tangential_complex_benchmark ${Boost_DATE_TIME_LIBRARY} ${Boost_SYSTEM_LIBRARY} ${CGAL_LIBRARY}) if (TBB_FOUND) target_link_libraries(Tangential_complex_benchmark ${TBB_LIBRARIES}) - endif() - - # Do not forget to copy test files in current binary dir - #file(COPY "${CMAKE_SOURCE_DIR}/data/points/alphacomplexdoc.off" DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/) - - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for Tangential complex feature.") - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Tangential complex feature. Version 4.8.0 is required.") - endif () -endif() + endif(TBB_FOUND) + endif(EIGEN3_FOUND) + endif (NOT CGAL_VERSION VERSION_LESS 4.8.0) +endif(CGAL_FOUND) diff --git a/src/Tangential_complex/example/CMakeLists.txt b/src/Tangential_complex/example/CMakeLists.txt index 7ba043f0..a75ccd5b 100644 --- a/src/Tangential_complex/example/CMakeLists.txt +++ b/src/Tangential_complex/example/CMakeLists.txt @@ -3,14 +3,7 @@ project(Tangential_complex_examples) if(CGAL_FOUND) if (NOT CGAL_VERSION VERSION_LESS 4.8.0) - message(STATUS "CGAL version: ${CGAL_VERSION}.") - - find_package(Eigen3 3.1.0) if (EIGEN3_FOUND) - message(STATUS "Eigen3 version: ${EIGEN3_VERSION}.") - include( ${EIGEN3_USE_FILE} ) - include_directories (BEFORE "../../include") - add_executable( Tangential_complex_example_basic example_basic.cpp ) target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY} ${Boost_DATE_TIME_LIBRARY}) add_executable( Tangential_complex_example_with_perturb example_with_perturb.cpp ) @@ -18,13 +11,13 @@ if(CGAL_FOUND) if (TBB_FOUND) target_link_libraries(Tangential_complex_example_basic ${TBB_LIBRARIES}) target_link_libraries(Tangential_complex_example_with_perturb ${TBB_LIBRARIES}) - endif() - else() - message(WARNING "Eigen3 not found. Version 3.1.0 is required for the Tangential_complex examples.") - endif() - else() - message(WARNING "CGAL version: ${CGAL_VERSION} is too old to compile Tangential_complex examples. Version 4.8.0 is required.") - endif () -else() - message(WARNING "CGAL not found. It is required for the Tangential_complex examples.") -endif() + endif(TBB_FOUND) + + add_test(Tangential_complex_example_basic + ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_example_basic) + + add_test(Tangential_complex_example_with_perturb + ${CMAKE_CURRENT_BINARY_DIR}/Tangential_complex_example_with_perturb) + endif(EIGEN3_FOUND) + endif(NOT CGAL_VERSION VERSION_LESS 4.8.0) +endif(CGAL_FOUND) diff --git a/src/common/doc/main_page.h b/src/common/doc/main_page.h index fe23c4e7..1a2cb6ba 100644 --- a/src/common/doc/main_page.h +++ b/src/common/doc/main_page.h @@ -220,8 +220,8 @@ make \endverbatim * Library (CGAL \cite cgal:eb-15b) and will not be built if CGAL is not installed: * \li * Persistent_cohomology/alpha_complex_3d_persistence.cpp - * \li - * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp + * \li + * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp * * The following example requires CGAL version ≥ 4.6: * \li @@ -281,8 +281,8 @@ make \endverbatim * Persistent_cohomology/alpha_complex_persistence.cpp * \li * Simplex_tree/simple_simplex_tree.cpp - * \li - * Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp + * \li + * Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp * \li * Simplex_tree/simplex_tree_from_cliques_of_graph.cpp * \li @@ -351,7 +351,7 @@ make \endverbatim * @example Persistent_cohomology/custom_persistence_sort.cpp * @example Simplex_tree/mini_simplex_tree.cpp * @example Simplex_tree/simple_simplex_tree.cpp - * @example Simplex_tree/simplex_tree_from_alpha_shapes_3.cpp + * @example Simplex_tree/example_alpha_shapes_3_simplex_tree_from_off_file.cpp * @example Simplex_tree/simplex_tree_from_cliques_of_graph.cpp * @example Skeleton_blocker/Skeleton_blocker_from_simplices.cpp * @example Skeleton_blocker/Skeleton_blocker_iteration.cpp -- cgit v1.2.3