summaryrefslogtreecommitdiff
path: root/src/Tangential_complex
diff options
context:
space:
mode:
Diffstat (limited to 'src/Tangential_complex')
-rw-r--r--src/Tangential_complex/benchmark/CMakeLists.txt9
-rw-r--r--src/Tangential_complex/benchmark/RIB_exporter.h257
-rw-r--r--src/Tangential_complex/benchmark/XML_exporter.h195
-rw-r--r--src/Tangential_complex/benchmark/benchmark_script.txt221
-rw-r--r--src/Tangential_complex/benchmark/benchmark_tc.cpp781
-rw-r--r--src/Tangential_complex/doc/COPYRIGHT12
-rw-r--r--src/Tangential_complex/doc/Intro_tangential_complex.h109
-rw-r--r--src/Tangential_complex/doc/tc_example_01.pngbin0 -> 20323 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_02.pngbin0 -> 36017 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_03.pngbin0 -> 62990 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_05.pngbin0 -> 36032 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_06.pngbin0 -> 37195 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_07.pngbin0 -> 49399 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_07_after.pngbin0 -> 50132 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_07_before.pngbin0 -> 48898 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_08.pngbin0 -> 63636 bytes
-rw-r--r--src/Tangential_complex/doc/tc_example_09.pngbin0 -> 35453 bytes
-rw-r--r--src/Tangential_complex/doc/tc_examples.pngbin0 -> 150776 bytes
-rw-r--r--src/Tangential_complex/example/CMakeLists.txt20
-rw-r--r--src/Tangential_complex/example/example_basic.cpp49
-rw-r--r--src/Tangential_complex/example/example_with_perturb.cpp53
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex.h2038
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h527
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex/config.h31
-rw-r--r--src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h183
-rw-r--r--src/Tangential_complex/test/CMakeLists.txt13
-rw-r--r--src/Tangential_complex/test/test_tangential_complex.cpp146
27 files changed, 4644 insertions, 0 deletions
diff --git a/src/Tangential_complex/benchmark/CMakeLists.txt b/src/Tangential_complex/benchmark/CMakeLists.txt
new file mode 100644
index 00000000..621b0fd7
--- /dev/null
+++ b/src/Tangential_complex/benchmark/CMakeLists.txt
@@ -0,0 +1,9 @@
+project(Tangential_complex_benchmark)
+
+if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ add_executable(Tangential_complex_benchmark benchmark_tc.cpp)
+ target_link_libraries(Tangential_complex_benchmark ${CGAL_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Tangential_complex_benchmark ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
diff --git a/src/Tangential_complex/benchmark/RIB_exporter.h b/src/Tangential_complex/benchmark/RIB_exporter.h
new file mode 100644
index 00000000..4cec0603
--- /dev/null
+++ b/src/Tangential_complex/benchmark/RIB_exporter.h
@@ -0,0 +1,257 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef GUDHI_TC_RIB_EXPORTER_H
+#define GUDHI_TC_RIB_EXPORTER_H
+
+#include <gudhi/Tangential_complex/utilities.h>
+
+#include <tuple>
+#include <string>
+
+template <typename PointRandomAccessRange, typename SimplexRange>
+class RIB_exporter {
+ typedef typename PointRandomAccessRange::value_type Point;
+ typedef typename SimplexRange::value_type Simplex;
+ public:
+
+ typedef std::tuple<double, double, double, double> Color; // RGBA
+ typedef std::tuple<int, int, int> Coords_choice;
+
+ // Constructor
+ RIB_exporter(
+ PointRandomAccessRange const& points,
+ SimplexRange const& simplices,
+ std::ofstream &out,
+ std::string const& rendered_image_filename = "export.tif",
+ bool is_preview = false, // low-quality
+ Coords_choice coords_choice = std::make_tuple(0, 1, 2),
+ int image_width = 1920,
+ int image_height = 1080,
+ Color const& triangle_color = std::make_tuple(1., 1., 1., 1.),
+ bool ambient_light = true,
+ double ambient_intensity = 0.3,
+ bool shadow = true,
+ double shadow_intensity = 0.85,
+ double point_sphere_radius = 0.003)
+ : m_points(points),
+ m_simplices(simplices),
+ m_out(out),
+ m_rendered_image_filename(rendered_image_filename),
+ m_is_preview(is_preview),
+ m_coords_choice(coords_choice),
+ m_image_width(image_width),
+ m_image_height(image_height),
+ m_current_color(0., 0., 0., 0.),
+ m_current_alpha(1),
+ m_triangle_color(triangle_color),
+ m_ambient_light(ambient_light),
+ m_ambient_intensity(ambient_intensity),
+ m_shadow(shadow),
+ m_shadow_intensity(shadow_intensity),
+ m_point_sphere_radius(point_sphere_radius) {
+ m_out.precision(8);
+ }
+
+ void write_file() {
+ write_header();
+ write_lights();
+ /*if (m_point_sphere_radius != 0.)
+ write_point_spheres();*/
+ write_triangles();
+
+ m_out << "WorldEnd\n";
+ }
+
+ private:
+
+ void write_header() {
+ m_out << "Option \"searchpath\" \"shader\" "
+ "\".:./shaders:%PIXIE_SHADERS%:%PIXIEHOME%/shaders\"\n";
+
+ if (m_is_preview) {
+ m_out << "Attribute \"visibility\" \"specular\" 1\n"
+ << "Attribute \"visibility\" \"transmission\" 1\n\n";
+ }
+
+ m_out << "Display \"" << m_rendered_image_filename << "\" \"file\" \"rgb\"\n";
+
+ if (!m_is_preview) {
+ m_out << "Format " << m_image_width << " " << m_image_height << " 1\n";
+ } else {
+ double ratio = double(m_image_height) / double(m_image_width);
+
+ int width = (ratio < 1.) ? 300 : int(300. / ratio);
+ int height = (ratio < 1.) ? int(ratio * 300.) : 300;
+
+ m_out << "Format " << width << " " << height << " 1\n";
+ }
+
+
+ if (m_image_width > m_image_height) {
+ double ratio = double(m_image_height) / double(m_image_width);
+ m_out << "ScreenWindow -1 1 " << -ratio << " " << ratio << "\n";
+ } else if (m_image_height > m_image_width) {
+ double ratio = double(m_image_width) / double(m_image_height);
+ m_out << "ScreenWindow " << -ratio << " " << ratio << " -1 1\n";
+ }
+
+ m_out << "Projection \"perspective\" \"fov\" 45\n"
+ << "Translate 0 0 3\n"
+ << "PixelSamples 4 4\n"
+ << "PixelFilter \"catmull-rom\" 3 3\n"
+ << "ShadingInterpolation \"smooth\"\n"
+ << "Rotate -10 20 0 1\n"
+ << "WorldBegin\n";
+ }
+
+ void write_lights() {
+ if (!m_is_preview) {
+ // ShadowLight
+ m_out << "LightSource \"shadowdistant\" 1 \"from\" [0 0 0] \"to\" [0 0 1]"
+ << " \"shadowname\" \"raytrace\" \"intensity\" "
+ << m_shadow_intensity << "\n";
+
+ // Ambient light
+ m_out << "LightSource \"ambientlight\" 2 \"intensity\" "
+ << m_ambient_intensity << "\n";
+ } else {
+ m_out << "LightSource \"distantLight\" 1 \"from\" [0 0 0] \"to\" [0 0 1]"
+ << " \"intensity\" " << m_shadow_intensity << "\n";
+
+ // Ambient light
+ m_out << "LightSource \"ambientlight\" 2 \"intensity\" "
+ << m_ambient_intensity << "\n";
+ }
+
+ // Background light
+ m_out << "LightSource \"ambientlight\" 99 \"intensity\" 1\n";
+
+ // Turn background light OFF
+ turn_background_light(false);
+ }
+
+ void turn_background_light(bool turn_on) {
+ if (!turn_on) {
+ m_out << "Illuminate 1 1" << std::endl;
+ if (!m_is_preview)
+ m_out << "Illuminate 2 1" << std::endl;
+ m_out << "Illuminate 99 0" << std::endl;
+ } else {
+ m_out << "Illuminate 1 0" << std::endl;
+ if (!m_is_preview)
+ m_out << "Illuminate 2 0" << std::endl;
+ m_out << "Illuminate 99 1" << std::endl;
+ }
+ }
+
+ void write_color(Color const& color, bool use_transparency) {
+ if (m_current_color == color)
+ return;
+
+ m_current_color = color;
+
+ // Write opacity data
+ if (use_transparency)
+ write_opacity(std::get<3>(color));
+
+ // Write color data
+ m_out << "Color [ " << std::get<0>(color) << " " << std::get<1>(color)
+ << " " << std::get<2>(color) << " ]\n";
+ }
+
+ void write_opacity(const double alpha) {
+ if (m_current_alpha == alpha)
+ return;
+
+ m_current_alpha = alpha;
+
+ // Write opacity data
+ m_out << "Opacity " << alpha << " " << alpha << " " << alpha << std::endl;
+ }
+
+ void write_point(Point const& p) {
+ m_out << " " << p[std::get<0>(m_coords_choice)]
+ << " " << p[std::get<1>(m_coords_choice)]
+ << " " << p[std::get<2>(m_coords_choice)] << " ";
+ }
+
+ void write_triangles() {
+ m_out << "Surface \"plastic\" \"Ka\" 0.65 \"Kd\" 0.85 \"Ks\" 0.25 \"roughness\" 0.1" << std::endl;
+
+ for (auto const& simplex : m_simplices) {
+ std::vector<Simplex> triangles;
+ // Get the triangles composing the simplex
+ combinations(simplex, 3, std::back_inserter(triangles));
+ for (auto const& t : triangles)
+ write_triangle(t);
+ }
+ }
+
+ template <typename PointIndexRange>
+ void write_triangle(PointIndexRange const& t) {
+ // Color
+ write_color(m_triangle_color, true);
+
+ // Triangle
+ m_out << "Polygon \"P\" [";
+ for (auto idx : t)
+ write_point(m_points[idx]);
+ m_out << "]" << std::endl;
+
+ // Edges (will be drawn later on)
+ /*add_edge(p, q, edge_color);
+ add_edge(p, r, edge_color);
+ add_edge(q, r, edge_color);
+
+ // Vertices (will be drawn later on)
+ add_vertex(p, edge_color);
+ add_vertex(q, edge_color);
+ add_vertex(r, edge_color);*/
+ }
+
+ void write_point_sphere(Point const& p) {
+ if (m_point_sphere_radius == 0.)
+ return;
+
+ m_out << "Translate " << p[0] << " " << p[1] << " " << p[2] << std::endl;
+ // Sphere radius zmin zmax thetamax
+ m_out << "Sphere " << m_point_sphere_radius << " " << -m_point_sphere_radius
+ << " " << m_point_sphere_radius << " 360" << std::endl;
+ m_out << "Identity" << std::endl;
+ }
+
+ void write_point_spheres() {
+ write_color(std::make_tuple(0.7, 0.7, 0.7, 0.5), true);
+ for (auto const& p : m_points)
+ write_point_sphere(p);
+ }
+
+ //===========================================================================
+
+ PointRandomAccessRange const& m_points;
+ SimplexRange const& m_simplices;
+ std::ofstream &m_out;
+ std::string m_rendered_image_filename;
+ bool m_is_preview;
+ Coords_choice m_coords_choice;
+ int m_image_width;
+ int m_image_height;
+ Color m_current_color;
+ Color m_triangle_color;
+ double m_current_alpha;
+ bool m_ambient_light;
+ double m_ambient_intensity;
+ bool m_shadow;
+ double m_shadow_intensity;
+ double m_point_sphere_radius;
+};
+
+#endif // GUDHI_TC_RIB_EXPORTER_H
diff --git a/src/Tangential_complex/benchmark/XML_exporter.h b/src/Tangential_complex/benchmark/XML_exporter.h
new file mode 100644
index 00000000..16b62eb6
--- /dev/null
+++ b/src/Tangential_complex/benchmark/XML_exporter.h
@@ -0,0 +1,195 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#include <string>
+#include <vector>
+#include <iostream>
+#include <fstream>
+#include <ctime>
+
+template<typename value_type = std::string>
+class Simple_XML_exporter {
+ public:
+ typedef value_type Value_type;
+ typedef std::vector<value_type> Element;
+ typedef std::map<std::string, value_type> Element_with_map;
+ typedef std::vector<Element> List_of_elements;
+
+ Simple_XML_exporter(
+ const std::string &list_name,
+ const std::string &element_name,
+ const std::vector<std::string> &subelement_names,
+ bool add_timestamp = true)
+ : m_list_name(list_name),
+ m_element_name(element_name),
+ m_subelement_names(subelement_names),
+ m_add_timestamp(add_timestamp) { }
+
+ bool add_element(const Element &element) {
+ if (element.size() == m_subelement_names.size()) {
+ m_list_of_elements.push_back(element);
+ return true;
+ } else {
+ std::cerr << "ERROR: element.size() == m_subelement_names.size()" << std::endl;
+ return false;
+ }
+ }
+
+ bool add_element(Element_with_map &element) {
+ Element elt;
+
+ std::vector<std::string>::const_iterator
+ it_subelement_name = m_subelement_names.begin();
+ std::vector<std::string>::const_iterator
+ it_subelement_name_end = m_subelement_names.end();
+ for (; it_subelement_name != it_subelement_name_end; ++it_subelement_name) {
+ elt.push_back(element[*it_subelement_name]);
+ }
+
+ return add_element(elt);
+ }
+
+ bool export_to_xml(const std::string &filename) const {
+ std::ofstream xmlfile;
+ xmlfile.open(filename.c_str());
+ xmlfile << "<?xml version='1.0'?>" << std::endl;
+ xmlfile << "<" << m_list_name << ">" << std::endl;
+
+ typename List_of_elements::const_iterator it_element = m_list_of_elements.begin();
+ typename List_of_elements::const_iterator it_element_end = m_list_of_elements.end();
+ for (int id = 1; it_element != it_element_end; ++it_element, ++id) {
+ xmlfile << " <" << m_element_name << ">" << std::endl;
+ std::vector<std::string>::const_iterator
+ it_subelement_name = m_subelement_names.begin();
+ std::vector<std::string>::const_iterator
+ it_subelement_name_end = m_subelement_names.end();
+
+ if (m_add_timestamp)
+ xmlfile << " <id> " << time(NULL) << " </id>" << std::endl;
+
+ for (int i = 0;
+ it_subelement_name != it_subelement_name_end;
+ ++it_subelement_name, ++i) {
+ xmlfile
+ << " <" << *it_subelement_name << "> "
+ << (*it_element)[i]
+ << " </" << *it_subelement_name << ">" << std::endl;
+ }
+ xmlfile << " </" << m_element_name << ">" << std::endl;
+ }
+
+ xmlfile << "</" << m_list_name << ">" << std::endl;
+ xmlfile.close();
+ return 0;
+
+ }
+
+ protected:
+ std::string m_list_name;
+ std::string m_element_name;
+ std::vector<std::string> m_subelement_names;
+ List_of_elements m_list_of_elements;
+ bool m_add_timestamp;
+};
+
+template<typename value_type = std::string>
+class Streaming_XML_exporter {
+ public:
+ typedef value_type Value_type;
+ typedef std::vector<value_type> Element;
+ typedef std::map<std::string, value_type> Element_with_map;
+ typedef std::vector<Element> List_of_elements;
+
+ Streaming_XML_exporter(
+ const std::string &filename,
+ const std::string &list_name,
+ const std::string &element_name,
+ const std::vector<std::string> &subelement_names,
+ bool add_timestamp = true)
+ : m_list_name(list_name),
+ m_element_name(element_name),
+ m_subelement_names(subelement_names),
+ m_add_timestamp(add_timestamp) {
+ m_xml_fstream.open(filename.c_str());
+ if (m_xml_fstream.good()) {
+ m_xml_fstream << "<?xml version='1.0'?>" << std::endl;
+ m_xml_fstream << "<" << m_list_name << ">" << std::endl;
+ } else {
+ std::cerr << "Could not open file '" << filename << "'." << std::endl;
+ }
+ }
+
+ virtual ~Streaming_XML_exporter() {
+ close_file();
+ }
+
+ void close_file() {
+ m_xml_fstream.close();
+ }
+
+ bool add_element(const Element &element) {
+ if (element.size() == m_subelement_names.size()) {
+ m_xml_fstream << " <" << m_element_name << ">" << std::endl;
+ std::vector<std::string>::const_iterator
+ it_subelement_name = m_subelement_names.begin();
+ std::vector<std::string>::const_iterator
+ it_subelement_name_end = m_subelement_names.end();
+
+ if (m_add_timestamp) {
+ m_xml_fstream << " <id> " << time(NULL) << " </id>" << std::endl;
+ }
+
+ for (int i = 0;
+ it_subelement_name != it_subelement_name_end;
+ ++it_subelement_name, ++i) {
+ m_xml_fstream
+ << " <" << *it_subelement_name << "> "
+ << element[i]
+ << " </" << *it_subelement_name << ">" << std::endl;
+ }
+ m_xml_fstream << " </" << m_element_name << ">" << std::endl;
+
+ // Save current pointer position
+ std::ofstream::streampos pos = m_xml_fstream.tellp();
+ // Close the XML file (temporarily) so that the XML file is always correct
+ m_xml_fstream << "</" << m_list_name << ">" << std::endl;
+ // Restore the pointer position so that the next "add_element" will overwrite
+ // the end of the file
+ m_xml_fstream.seekp(pos);
+
+ m_xml_fstream.flush();
+ return true;
+ } else {
+ std::cerr << "ERROR: element.size() == m_subelement_names.size()" << std::endl;
+ return false;
+ }
+ }
+
+ bool add_element(Element_with_map &element) {
+ Element elt;
+
+ std::vector<std::string>::const_iterator
+ it_subelement_name = m_subelement_names.begin();
+ std::vector<std::string>::const_iterator
+ it_subelement_name_end = m_subelement_names.end();
+ for (; it_subelement_name != it_subelement_name_end; ++it_subelement_name) {
+ elt.push_back(element[*it_subelement_name]);
+ }
+
+ return add_element(elt);
+ }
+
+ protected:
+ std::ofstream m_xml_fstream;
+ std::string m_list_name;
+ std::string m_element_name;
+ std::vector<std::string> m_subelement_names;
+ bool m_add_timestamp;
+};
diff --git a/src/Tangential_complex/benchmark/benchmark_script.txt b/src/Tangential_complex/benchmark/benchmark_script.txt
new file mode 100644
index 00000000..f4ddaac3
--- /dev/null
+++ b/src/Tangential_complex/benchmark/benchmark_script.txt
@@ -0,0 +1,221 @@
+#---------------------------------------------------------------------------------------------------------------------------------------------------------
+# Input PARAM1 PARAM2 PARAM3 NUM_P AMB INTR SPARSITY MAX_PERTURB PERTURB ADD_HDIM COLLAPSE FIX_TIME_LIMIT NUM_ITERATIONS
+#---------------------------------------------------------------------------------------------------------------------------------------------------------
+
+#---------------------------------------------------------------- Alpha TC tests ------------------------------------------------------------------------
+#generate_sphere_d 1 0 - 8 2 1 0.01 0.005 N Y N 3 1 #No noise => OK: 6 2d with a perturb sometimes
+#generate_sphere_d 1 0 - 50 2 1 0.01 0.005 N Y N 3 1 #No noise => OK: 49 1d
+#generate_sphere_d 1 1 - 50 2 1 0.01 0.005 N Y N 3 1 #Noise => OK: 45 2d + 3 3d
+#generate_torus_d N - - 15 2 1 0.01 0.05 N Y N 10 1
+#generate_sphere_d 0.302 0 - 8 3 2 0.01 0.005 N Y N 60 1 #No noise => OK: 7 3d with a perturb sometimes
+#generate_sphere_d 0.302 0 - 50 3 2 0.01 0.005 N Y N 60 1 #No noise => no inconsitencies
+#generate_sphere_d 0.302 3 - 50 3 2 0.01 0.005 N Y N 60 1 #Noise => OK: 90 2d + 3 3d
+#generate_sphere_d 1 1 - 500 4 3 0.01 0.005 N Y N 60 1 #Noise 1% => OK: 3113 3d + 35 4d
+#generate_sphere_d 1 2 - 500 4 3 0.01 0.005 N Y N 60 1 #Noise 2% => OK: 2969 3d + 91 4d
+#generate_sphere_d 1 2 - 5000 4 3 0.01 0.005 N Y N 60 1 #Noise 2% => OK: 27905 3d + 2485 4d
+#generate_sphere_d 0.302 2 - 300 2 1 0.01 0.005 N Y N 60 1
+#generate_torus_3D 2 1 N 200 3 2 0.01 0.05 N Y N 600 1 #OK: 1048 3d ~170s
+#generate_torus_3D 2 1 N 2000 3 2 0.01 0.05 N Y N 600 1 #OK: 3545 2d + 27 3d ~35s
+#generate_torus_d N 1 - 50 4 2 0.01 0.05 N Y N 3 1 #OK: 431 4d
+#generate_torus_d N 1 - 500 4 2 0.01 0.05 N Y N 3 1 #OK: 881 2d + 37 3d
+#generate_torus_d Y 1 - 250 4 2 0.01 0.05 N Y N 3 1 #OK: 80 d2 + 185 d3
+#generate_torus_d N - - 50 6 3 0.01 0.05 Y Y N 10 1 #
+#generate_torus_d Y - - 700 6 3 0.01 0.05 Y Y N 100 1 #Grid
+#generate_torus_d N - - 10000 6 3 0.01 0.05 Y Y N 30000 1
+#generate_moment_curve 0 1 - 10 3 1 0.01 0.005 N Y N 60 1
+#generate_two_spheres_d 3 4 - 500 3 2 0.01 0.05 N Y N 10 1 #OK: 320 2d + 1167 3d
+#generate_klein_bottle_4D 40 15 - 500 4 2 0.01 0.2 N Y N 60 1 #OK: 901 d2 + 50 d3 + 1 d4
+#data/SO3_10000.xyz - - - 0 9 3 0.01 0.05 Y Y N 300 1 #Too long. Be careful with the memory consumption!
+#data/buddha_100kv.xyz - - - 0 3 2 0.01 0.005 Y Y N 120 1 #Too long...
+#data/fandisk.xyz - - - 0 3 2 0.01 0.005 Y Y N 5 1 #NOT OK: Tq & V do not intersect
+
+#---------------------------------------------------------- Spatial search benchmarking --------------------------------------------------------------
+#generate_torus_3D 2 1 Y 10000 3 2 0 0 Y N N 600 1
+#data/buddha_100kv.xyz - - - 0 3 2 0 0 N Y N 120 1
+#generate_torus_d N - - 10000 30 15 0 0 Y N N 3600 1
+#generate_torus_d N - - 100000 12 6 0 0 Y N N 3600 1
+#data/SO3_50000.xyz - - - 0 9 3 0 0 Y N N 60 1
+#data/Cy8.xyz - - - 0 24 2 0 0 N Y N 60 1
+#generate_sphere_d 0.5 - - 10000 2 1 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 3 2 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 4 3 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 5 4 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 6 5 0 0 N N Y 60 1
+#generate_sphere_d 0.5 - - 10000 7 6 0 0 N N Y 60 1
+
+#---------------------------------------------------------- Very small cases for Debug mode --------------------------------------------------------------
+#generate_sphere_d 4 - - 20 3 2 0.05 0.025 Y N N 60 1
+generate_sphere_d 3 10 - 70 3 2 0.05 0.025 Y N N 60 1
+#generate_sphere_d 3 - - 1000 3 2 0.05 0.025 Y N N 60 1
+#generate_sphere_d 3 - - 10 4 3 0.05 0.025 Y N N 60 1
+#generate_sphere_d 3 - - 70 5 4 0.05 0.025 Y N N 60 1
+#generate_klein_bottle_4D 4 3 - 70 4 2 0.05 0.025 Y N N 3 1
+#generate_klein_bottle_variant_5D 4 3 - 70 5 2 0.05 0.025 Y N N 3 1
+#data/SO3_10000.xyz - - - 0 9 3 0.7 0.35 Y N N 60 1
+#generate_moment_curve 0 1 - 30 3 1 0.005 0.0025 Y N N 60 1
+
+#------------------------------------------------------------------ From files --------------------------------------------------------------------------
+#data/SO3_50000.xyz - - - 0 9 3 0.05 0.05 Y N N 6000 1
+#data/SO3_10000.xyz - - - 0 9 3 0.1 0.1 Y N N 60000 1
+#data/cube3D_eps_0.1.xyz - - - 0 3 2 0.05 0.05 Y N N 3000 1
+#data/cube4D_eps_0.1.xyz - - - 0 4 3 0.05 0.05 N Y N 3000 1
+#data/cube5D_eps_0.1.xyz - - - 0 5 4 0.05 0.05 N Y N 3000 1
+#data/Cy8.xyz - - - 0 24 2 0.1 0.1 N Y N 60 1
+#data/Kl.xyz - - - 0 5 2 0.05 0.05 N Y N 60 1
+#data/S3.xyz - - - 0 4 3 0.05 0.05 N Y N 60 1
+
+#data/Alvarez_variete_k2_D4_29700p.xyz - - - 0 4 2 0.01 0.01 Y N N 60 1 # points on a "grid"
+#data/Alvarez_variete_k2_D4_10k_1x1_v2.xyz - - - 0 4 2 0.001 0.001 Y N N 200 1
+#data/Alvarez_variete_k2_D4_30k_1x1_v2.xyz - - - 0 4 2 0.001 0.001 Y N N 6000 1
+#data/Alvarez_variete_k2_D4_120k_2x2_denser_in_1x1.xyz - - - 0 4 2 0.002 0.002 Y N N 60000 1
+#data/Alvarez_variete_k2_D4_300k_2x2.xyz - - - 0 4 2 0.005 0.005 Y N N 100000 1
+#data/Alvarez_variete_k2_D4_300k_2x2.xyz - - - 0 4 2 0.05 0.05 Y N N 50000 1 # heavy sparsification (e.g. 0.05 => 33k points)
+#data/Alvarez_variete_k2_D4_90k_2x2.xyz - - - 0 4 2 0.003 0.003 Y N N 6000 1
+#data/Alvarez_variete_k2_D4_30k_10x10.xyz - - - 0 4 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_variete_k2_D4_60k_10x10.xyz - - - 0 4 2 0.01 0.01 Y N N 1800 1
+
+#data/Alvarez_variete_k2_D8_9003p.xyz - - - 0 8 2 0.001 0.001 Y N N 60 1
+#data/Alvarez_variete_k2_D8_90K.xyz - - - 0 8 2 0.001 0.001 Y N N 60 1
+#data/Alvarez_variete_k2_D8_300k_10x10.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 # heavy sparsification
+#data/Alvarez_variete_k2_D8_900k_2x2.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1 # heavy sparsification
+#data/Alvarez_variete_k2_D8_900k_10x10.xyz - - - 0 8 2 0.02 0.02 Y N N 60 1 # heavy sparsification
+
+#data/Alvarez_courbeElliptique_k2_D8_200K_2x2.xyz - - - 0 8 2 0.006 0.006 Y N N 60 1
+
+#data/Alvarez_surface_deg2_k2_D8_6000K_10x10.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.003 0.003 Y N N 3600 1
+#data/Alvarez_surface_deg4_k2_D8_382K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_surface_deg5_k2_D8_112K.xyz - - - 0 8 2 0.001 0.001 Y N N 240 1
+#data/Alvarez_surface_deg6_k2_D8_67K.xyz - - - 0 8 2 0.015 0.015 Y N N 60 1
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.025 0.025 Y N N 60 1
+#data/Alvarez_surface_deg9_k2_D8_42K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+#data/Alvarez_surface_deg10_k2_D8_41K.xyz - - - 0 8 2 0.01 0.01 Y N N 60 1
+
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 600 1
+#data/sparsified/Alvarez_deg8_k2_D8_32K_sparsified_from_41K_0.01.xyz - - - 0 8 2 0.05 0.05 Y N N 600 1
+
+# PAS VRAIMENT DE DIFFERENCE
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.003 0.007 Y N N 3600 1
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.014 0.007 Y N N 3600 1
+
+# PAS VRAIMENT DE DIFFERENCE
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.01 0.005 Y N N 120 1
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.005 Y N N 120 1
+
+# PAS VRAIMENT DE DIFFERENCE
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.001 0.01 Y N N 3600 1
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.02 0.01 Y N N 3600 1
+#data/sparsified/Alvarez_deg3_k2_D8_534k_sparsified_from_902K_0.001.xyz - - - 0 8 2 0.01 0.01 Y N N 3600 1
+
+# PAS TRES CLAIR, MAIS DIFFERENCE EN NOMBRE D'ETAPES (>100 vs 15-20) :
+#data/sparsified/Alvarez_deg8_k2_D8_38K_sparsified_from_41K_0.005.xyz - - - 0 8 2 0.02 0.02 Y N N 600 1
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.001 0.02 Y N N 60 1
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.025 0.02 Y N N 60 1
+
+# With pre-computed tangent spaces
+#data/test.pwt - - - 0 4 2 0.01 0.01 N N N 500000 1
+#data/Alvarez_variete_k2_D4_30000p.xyz - - - 0 4 2 0.01 0.01 Y N N 500000 1
+#data/Alvarez_variete_k2_D4_30000p_with_TSB.pwt - - - 0 4 2 0.01 0.01 Y N N 500000 1
+
+#---------------------------------------------------------------------- 3D meshes -----------------------------------------------------------------------
+#data/buddha_100kv.xyz - - - 0 3 2 0.005 0.005 Y N N 3 1
+#data/fandisk.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/fertility.xyz - - - 0 3 2 0.4 0.4 Y N N 3 1
+#data/bunny.xyz - - - 0 3 2 0.0006 0.0003 Y N N 3000 1
+#data/blob.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/3holes.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/785_hand_2500v.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/785_hand_50kv.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#data/bumpy_sphere.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#D:\INRIA\Data\_Models\Pointclouds\ajax_jotero.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#D:\INRIA\Data\_Models\Pointclouds\house.xyz - - - 0 3 2 0.01 0.01 Y N N 3 1
+#D:\INRIA\Data\_Models\Pointclouds\lucy_14M.xyz - - - 0 3 2 0.6 0.3 Y N N 3 1
+
+#----------------------------------------------------------- Generated point sets -----------------------------------------------------------------------
+#generate_sphere_d 3 - - 4 3 2 0.05 0.05 Y N N 3000 1
+#generate_sphere_d 3 - - 30000 2 1 0.005 0.005 Y N N 3000 1
+#generate_sphere_d 1 - - 500000 3 2 0.005 0.005 Y N N 3000 1
+#generate_sphere_d 3 - - 30000 4 3 0.05 0.05 Y N N 3000 1
+#generate_sphere_d 3 0 - 300 3 2 0.005 0.005 Y N N 60 1
+#generate_sphere_d 3 4 - 3000 3 2 0.005 0.005 Y N N 60 1
+#generate_sphere_d 3 7 - 3000 3 2 0.005 0.005 Y N N 60 1
+#generate_torus_3D 2 1 N 300 3 2 0.05 0.05 Y N N 600 1
+#generate_torus_d N - - 200 4 2 0.05 0.05 Y N N 600 1
+
+#generate_torus_d Y - - 100 6 3 0.1 0.19 Y N N 600 1
+#generate_torus_d Y - - 1000 6 3 0. 0.19 Y N N 600 1
+#generate_torus_d Y - - 10000 6 3 0. 0.19 Y N N 600 1
+#generate_torus_d Y - - 100000 6 3 0. 0.19 Y N N 600 1
+#generate_plane - - - 30000 3 2 0.005 0.005 Y N N 3000 1
+#generate_moment_curve 0 1 - 30000 6 1 0.005 0.005 Y N N 60 1
+#generate_klein_bottle_4D 4 3 - 700 4 2 0.05 0.05 Y N N 500 20
+#generate_klein_bottle_variant_5D 4 3 - 30000 5 2 0.05 0.05 Y N N 600 1
+#generate_klein_bottle_4D 8 5 - 5000 4 2 0.2 0.2 Y N N 60 1 #Takes forever
+#data/sparsified/Flat_torus_195p_sparsified_0.05_from_200p.xyz N - - 0 4 2 -1 0.2 Y N N 600 1
+
+#----------------------------------------------------------- Performance testing ------------------------------------------------------------------------
+# TC: 5.55 / 1st fix step : 0.2
+#data/fertility.xyz - - - 0 3 2 0.1 0.1 Y N N 10 1
+
+#---------------------------------------------------------- 04/04/2016 - for stats ----------------------------------------------------------
+
+#generate_torus_3D 2 1 N 5000 3 2 0.05 0.05 Y N N 120 1
+#generate_torus_d N - - 500 4 2 0.05 0.05 Y N N 120 1
+#data/Alvarez_variete_k2_D8_900k_2x2.xyz - - - 0 8 2 0.005 0.005 Y N N 120 1
+#data/Alvarez_surface_deg3_k2_D8_902K.xyz - - - 0 8 2 0.01 0.01 Y N N 120 1
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.02 Y N N 600 10
+#data/Alvarez_surface_deg8_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 120 1
+#data/Alvarez_surface_deg10_k2_D8_41K.xyz - - - 0 8 2 0.02 0.02 Y N N 120 1
+#generate_torus_d N - - 200000 6 3 0.05 0.05 Y N N 1200 1
+
+#---------------------------------------------------------- 14/04/2016 - stats about noise ----------------------------------------------------------
+
+#generate_torus_d Y 0 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 1 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 2 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 3 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 4 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 5 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 6 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 7 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 8 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 9 - 1000 4 2 0.05 0.19 Y N N 120 4
+#generate_torus_d Y 10 - 1000 4 2 0.05 0.19 Y N N 120 4
+
+#generate_sphere_d 3 0 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 1 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 2 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 3 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 4 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 5 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 6 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 7 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 8 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 9 - 1000 4 3 0.05 0.05 Y N N 120 4
+#generate_sphere_d 3 10 - 1000 4 3 0.05 0.05 Y N N 120 4
+
+#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.01 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.02 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.03 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.04 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.05 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.06 5000 4 2 0.05 0.05 Y N N 120 4
+#generate_klein_bottle_4D 4 3 0.07 5000 4 2 0.05 0.05 Y N N 120 4
+
+#---------------------------------------------------------- 04/2016 - stats with different perturb techniques ----------------------------------------------------------
+
+# Tangential translation
+#data/SO3_50000.xyz - - - 0 9 3 0 0.05 Y N N 500 10
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.01 Y N N 120 10
+#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.05 Y N N 120 10
+#generate_torus_d Y 0 - 1000 4 2 0.05 0.19 Y N N 120 10
+#generate_sphere_d 3 1 - 1000 4 3 0.05 0.05 Y N N 120 10
+
+# Weight
+#data/SO3_50000.xyz - - - 0 9 3 0.1 0.05 Y N N 500 10
+#data/Alvarez_surface_deg7_k2_D8_48K.xyz - - - 0 8 2 0.02 0.01 Y N N 120 10
+#generate_klein_bottle_4D 4 3 0 5000 4 2 0.05 0.025 Y N N 20000 10
+#generate_torus_d Y 0 - 1000 4 2 0.05 0.025 Y N N 120 10
+#generate_sphere_d 3 1 - 1000 4 3 0.05 0.025 Y N N 12000 10 \ No newline at end of file
diff --git a/src/Tangential_complex/benchmark/benchmark_tc.cpp b/src/Tangential_complex/benchmark/benchmark_tc.cpp
new file mode 100644
index 00000000..e3b2a04f
--- /dev/null
+++ b/src/Tangential_complex/benchmark/benchmark_tc.cpp
@@ -0,0 +1,781 @@
+/******************************************************************************
+This benchmark allows to compute the Tangential Complex from input files or
+generated point sets.
+
+It reads the benchmark_script.txt file (located in the same folder as this
+file) and compute one or several complexes for each line. Unless TC_NO_EXPORT
+is defined, each complex is exported as an OFF file and/or as a RIB file
+(RenderMan). In addition an XML file is created at each run of the benchmark.
+It contains statistics about the complexes that were created. This XML file
+can be processed in Excel, for example.
+ ******************************************************************************/
+
+// Without TBB_USE_THREADING_TOOL Intel Inspector XE will report false positives in Intel TBB
+// (http://software.intel.com/en-us/articles/compiler-settings-for-threading-error-analysis-in-intel-inspector-xe/)
+#ifdef _DEBUG
+#define TBB_USE_THREADING_TOOL
+#endif
+
+#include <cstddef>
+
+//#define GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+//#define TC_INPUT_STRIDES 3 // only take one point every TC_INPUT_STRIDES points
+#define TC_NO_EXPORT // do not output OFF files
+//#define TC_EXPORT_TO_RIB //
+//#define GUDHI_TC_EXPORT_SPARSIFIED_POINT_SET
+//#define GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF
+
+const std::size_t ONLY_LOAD_THE_FIRST_N_POINTS = 20000000;
+
+#include <gudhi/Debug_utils.h>
+#include <gudhi/Clock.h>
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/sparsify_point_set.h>
+#include <gudhi/random_point_generators.h>
+#include <gudhi/Tangential_complex/utilities.h>
+
+#include <CGAL/assertions_behaviour.h>
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <boost/algorithm/string/replace.hpp>
+#include <boost/algorithm/string/trim_all.hpp>
+#include <boost/range/adaptor/strided.hpp>
+
+#include <cstdlib>
+#include <ctime>
+#include <fstream>
+#include <cmath> // for std::sqrt
+
+#ifdef GUDHI_USE_TBB
+#include <tbb/task_scheduler_init.h>
+#endif
+#include "XML_exporter.h"
+#include "RIB_exporter.h"
+#define GUDHI_TC_EXPORT_PERFORMANCE_DATA
+#define GUDHI_TC_SET_PERFORMANCE_DATA(value_name, value) \
+ XML_perf_data::set(value_name, value);
+
+
+namespace subsampl = Gudhi::subsampling;
+namespace tc = Gudhi::tangential_complex;
+
+const char * const BENCHMARK_SCRIPT_FILENAME = "benchmark_script.txt";
+
+typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+typedef Kernel::FT FT;
+typedef Kernel::Point_d Point;
+typedef Kernel::Vector_d Vector;
+typedef tc::Tangential_complex<
+Kernel, CGAL::Dynamic_dimension_tag,
+CGAL::Parallel_tag> TC;
+typedef TC::Simplex Simplex;
+typedef TC::Simplex_set Simplex_set;
+
+class XML_perf_data {
+ public:
+ typedef Streaming_XML_exporter<std::string> XML_exporter;
+
+ XML_perf_data(const std::string &filename)
+ : m_xml(filename, "ContainerPerformance", "Perf",
+ construct_subelements_names()) { }
+
+ virtual ~XML_perf_data() { }
+
+ static XML_perf_data &get() {
+ static XML_perf_data singleton(build_filename());
+ return singleton;
+ }
+
+ template <typename Value_type>
+ static void set(const std::string &name, Value_type value) {
+ get().set_data(name, value);
+ }
+
+ static void commit() {
+ get().commit_current_element();
+ }
+
+ protected:
+
+ static std::string build_filename() {
+ std::stringstream sstr;
+ sstr << "perf_logs/Performance_log_" << time(0) << ".xml";
+ return sstr.str();
+ }
+
+ static std::vector<std::string> construct_subelements_names() {
+ std::vector<std::string> subelements;
+ subelements.push_back("Input");
+ subelements.push_back("Param1");
+ subelements.push_back("Param2");
+ subelements.push_back("Param3");
+ subelements.push_back("Intrinsic_dim");
+ subelements.push_back("Ambient_dim");
+ subelements.push_back("Num_threads");
+ subelements.push_back("Sparsity");
+ subelements.push_back("Max_perturb");
+ subelements.push_back("Num_points_in_input");
+ subelements.push_back("Num_points");
+ subelements.push_back("Perturb_technique");
+ subelements.push_back("Perturb_which_points");
+ subelements.push_back("Initial_num_inconsistent_local_tr");
+ subelements.push_back("Best_num_inconsistent_local_tr");
+ subelements.push_back("Final_num_inconsistent_local_tr");
+ subelements.push_back("Init_time");
+ subelements.push_back("Comput_time");
+ subelements.push_back("Perturb_successful");
+ subelements.push_back("Perturb_time");
+ subelements.push_back("Perturb_steps");
+ subelements.push_back("Result_pure_pseudomanifold");
+ subelements.push_back("Result_num_wrong_dim_simplices");
+ subelements.push_back("Result_num_wrong_number_of_cofaces");
+ subelements.push_back("Result_num_unconnected_stars");
+ subelements.push_back("Info");
+
+ return subelements;
+ }
+
+ void set_data(const std::string &name, const std::string &value) {
+ m_current_element[name] = value;
+ }
+
+ template <typename Value_type>
+ void set_data(const std::string &name, Value_type value) {
+ std::stringstream sstr;
+ sstr << value;
+ set_data(name, sstr.str());
+ }
+
+ void commit_current_element() {
+ m_xml.add_element(m_current_element);
+ m_current_element.clear();
+ }
+
+ XML_exporter m_xml;
+ XML_exporter::Element_with_map m_current_element;
+};
+
+template<
+typename Kernel, typename OutputIteratorPoints>
+bool load_points_from_file(
+ const std::string &filename,
+ OutputIteratorPoints points,
+ std::size_t only_first_n_points = (std::numeric_limits<std::size_t>::max)()) {
+ typedef typename Kernel::Point_d Point;
+
+ std::ifstream in(filename);
+ if (!in.is_open()) {
+ std::cerr << "Could not open '" << filename << "'" << std::endl;
+ return false;
+ }
+
+ Kernel k;
+ Point p;
+ int num_ppints;
+ in >> num_ppints;
+
+ std::size_t i = 0;
+ while (i < only_first_n_points && in >> p) {
+ *points++ = p;
+ ++i;
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr << "'" << filename << "' loaded." << std::endl;
+#endif
+
+ return true;
+}
+
+template<
+typename Kernel, typename Tangent_space_basis,
+typename OutputIteratorPoints, typename OutputIteratorTS>
+bool load_points_and_tangent_space_basis_from_file(
+ const std::string &filename,
+ OutputIteratorPoints points,
+ OutputIteratorTS tangent_spaces,
+ int intrinsic_dim,
+ std::size_t only_first_n_points = (std::numeric_limits<std::size_t>::max)()) {
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::Vector_d Vector;
+
+ std::ifstream in(filename);
+ if (!in.is_open()) {
+ std::cerr << "Could not open '" << filename << "'" << std::endl;
+ return false;
+ }
+
+ Kernel k;
+ Point p;
+ int num_ppints;
+ in >> num_ppints;
+
+ std::size_t i = 0;
+ while (i < only_first_n_points && in >> p) {
+ *points++ = p;
+
+ Tangent_space_basis tsb(i);
+ for (int d = 0; d < intrinsic_dim; ++d) {
+ Vector v;
+ in >> v;
+ tsb.push_back(tc::internal::normalize_vector(v, k));
+ }
+ *tangent_spaces++ = tsb;
+ ++i;
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr << "'" << filename << "' loaded." << std::endl;
+#endif
+
+ return true;
+}
+
+// color_inconsistencies: only works if p_complex = NULL
+template <typename TC>
+bool export_to_off(
+ TC const& tc,
+ std::string const& input_name_stripped,
+ std::string const& suffix,
+ bool color_inconsistencies = false,
+ typename TC::Simplicial_complex const* p_complex = NULL,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL) {
+#ifdef TC_NO_EXPORT
+ return true;
+#endif
+
+ CGAL::Identity<Point> proj_functor;
+
+ if (tc.intrinsic_dimension() <= 3) {
+ std::stringstream output_filename;
+ output_filename << "output/" << input_name_stripped << "_"
+ << tc.intrinsic_dimension() << "_in_R"
+ << tc.ambient_dimension() << "_"
+ << tc.number_of_vertices() << "v"
+ << suffix << ".off";
+ std::ofstream off_stream(output_filename.str().c_str());
+
+ if (p_complex) {
+#ifndef TC_NO_EXPORT
+ tc.export_to_off(
+ *p_complex, off_stream,
+ p_simpl_to_color_in_red,
+ p_simpl_to_color_in_green,
+ p_simpl_to_color_in_blue,
+ proj_functor);
+#endif
+ } else {
+ tc.export_to_off(
+ off_stream, color_inconsistencies,
+ p_simpl_to_color_in_red,
+ p_simpl_to_color_in_green,
+ p_simpl_to_color_in_blue,
+ NULL,
+ proj_functor);
+ }
+ return true;
+ }
+ return false;
+}
+
+void make_tc(std::vector<Point> &points,
+ TC::TS_container const& tangent_spaces, // can be empty
+ int intrinsic_dim,
+ double sparsity = 0.01,
+ double max_perturb = 0.005,
+ bool perturb = true,
+ bool add_high_dim_simpl = false,
+ bool collapse = false,
+ double time_limit_for_perturb = 0.,
+ const char *input_name = "tc") {
+ Kernel k;
+
+ if (sparsity > 0. && !tangent_spaces.empty()) {
+ std::cerr << "Error: cannot sparsify point set with pre-computed normals.\n";
+ return;
+ }
+
+ //===========================================================================
+ // Init
+ //===========================================================================
+ Gudhi::Clock t;
+
+ // Get input_name_stripped
+ std::string input_name_stripped(input_name);
+ size_t slash_index = input_name_stripped.find_last_of('/');
+ if (slash_index == std::string::npos)
+ slash_index = input_name_stripped.find_last_of('\\');
+ if (slash_index == std::string::npos)
+ slash_index = 0;
+ else
+ ++slash_index;
+ input_name_stripped = input_name_stripped.substr(
+ slash_index, input_name_stripped.find_last_of('.') - slash_index);
+
+ GUDHI_TC_SET_PERFORMANCE_DATA("Num_points_in_input", points.size());
+
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ std::vector<Point> points_not_sparse = points;
+#endif
+
+ //===========================================================================
+ // Sparsify point set if requested
+ //===========================================================================
+ if (sparsity > 0.) {
+ std::size_t num_points_before = points.size();
+ std::vector<Point> sparsified_points;
+ subsampl::sparsify_point_set(k, points, sparsity*sparsity,
+ std::back_inserter(sparsified_points));
+ sparsified_points.swap(points);
+ std::cerr << "Number of points before/after sparsification: "
+ << num_points_before << " / " << points.size() << "\n";
+
+#ifdef GUDHI_TC_EXPORT_SPARSIFIED_POINT_SET
+ std::ofstream ps_stream("output/sparsified_point_set.txt");
+ tc::internal::export_point_set(k, points, ps_stream);
+#endif
+ }
+
+ GUDHI_TC_SET_PERFORMANCE_DATA("Sparsity", sparsity);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Max_perturb", max_perturb);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Num_points", points.size());
+
+ //===========================================================================
+ // Compute Tangential Complex
+ //===========================================================================
+
+ TC tc(
+ points,
+ intrinsic_dim,
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ points_not_sparse.begin(), points_not_sparse.end(),
+#endif
+ k);
+
+ if (!tangent_spaces.empty()) {
+ tc.set_tangent_planes(tangent_spaces);
+ }
+
+ t.end();
+ double init_time = t.num_seconds();
+
+ t.begin();
+ tc.compute_tangential_complex();
+ t.end();
+ double computation_time = t.num_seconds();
+
+ //===========================================================================
+ // Export to OFF
+ //===========================================================================
+
+ // Create complex
+ int max_dim = -1;
+ TC::Simplicial_complex complex;
+ Simplex_set inconsistent_simplices;
+ max_dim = tc.create_complex(complex, true, false, 2, &inconsistent_simplices);
+
+ // TODO(CJ): TEST
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree, true, false);
+ // std::cerr << stree;
+
+ t.begin();
+ bool ret = export_to_off(
+ tc, input_name_stripped, "_INITIAL_TC", true,
+ &complex, &inconsistent_simplices);
+ t.end();
+ double export_before_time = (ret ? t.num_seconds() : -1);
+
+ unsigned int num_perturb_steps = 0;
+ double perturb_time = -1;
+ double export_after_perturb_time = -1.;
+ bool perturb_success = false;
+ if (perturb) {
+ //=========================================================================
+ // Try to fix inconsistencies by perturbing points
+ //=========================================================================
+ t.begin();
+ auto fix_result =
+ tc.fix_inconsistencies_using_perturbation(max_perturb, time_limit_for_perturb);
+ t.end();
+ perturb_time = t.num_seconds();
+
+ perturb_success = fix_result.success;
+ GUDHI_TC_SET_PERFORMANCE_DATA("Initial_num_inconsistent_local_tr",
+ fix_result.initial_num_inconsistent_stars);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Best_num_inconsistent_local_tr",
+ fix_result.best_num_inconsistent_stars);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Final_num_inconsistent_local_tr",
+ fix_result.final_num_inconsistent_stars);
+
+ //=========================================================================
+ // Export to OFF
+ //=========================================================================
+
+ // Re-build the complex
+ Simplex_set inconsistent_simplices;
+ max_dim = tc.create_complex(complex, true, false, 2, &inconsistent_simplices);
+
+ t.begin();
+ bool exported = export_to_off(
+ tc, input_name_stripped, "_AFTER_FIX", true, &complex,
+ &inconsistent_simplices);
+ t.end();
+ export_after_perturb_time = (exported ? t.num_seconds() : -1);
+
+ //std::string fn = "output/inc_stars/";
+ //fn += input_name_stripped;
+ //tc.export_inconsistent_stars_to_OFF_files(fn);
+
+#if !defined(TC_NO_EXPORT) && defined(TC_EXPORT_TO_RIB)
+ std::ofstream rib(std::string("output/") + input_name_stripped + ".rib");
+ RIB_exporter<TC::Points, TC::Simplicial_complex::Simplex_set> rib_exporter(
+ tc.points(),
+ complex.simplex_range(),
+ rib,
+ input_name_stripped + ".tif",
+ false, // is_preview
+ std::make_tuple(2, 4, 6),
+ 1600, 503 // resolution
+ );
+ rib_exporter.write_file();
+
+ std::ofstream rib_LQ(std::string("output/") + input_name_stripped + "_LQ.rib");
+ RIB_exporter<TC::Points, TC::Simplicial_complex::Simplex_set> rib_exporter_LQ(
+ tc.points(),
+ complex.simplex_range(),
+ rib_LQ,
+ input_name_stripped + "_LQ.tif",
+ true, // is_preview
+ std::make_tuple(0, 4, 5)
+ );
+ rib_exporter_LQ.write_file();
+#endif
+ } else {
+ GUDHI_TC_SET_PERFORMANCE_DATA("Initial_num_inconsistent_local_tr", "N/A");
+ GUDHI_TC_SET_PERFORMANCE_DATA("Best_num_inconsistent_local_tr", "N/A");
+ GUDHI_TC_SET_PERFORMANCE_DATA("Final_num_inconsistent_local_tr", "N/A");
+ }
+
+ max_dim = tc.create_complex(complex, true, false, 2);
+
+ complex.display_stats();
+
+ if (intrinsic_dim == 2)
+ complex.euler_characteristic(true);
+
+ //===========================================================================
+ // Collapse
+ //===========================================================================
+ if (collapse) {
+ complex.collapse(max_dim);
+ complex.display_stats();
+ }
+
+ //===========================================================================
+ // Is the result a pure pseudomanifold?
+ //===========================================================================
+ std::size_t num_wrong_dim_simplices,
+ num_wrong_number_of_cofaces,
+ num_unconnected_stars;
+ Simplex_set wrong_dim_simplices;
+ Simplex_set wrong_number_of_cofaces_simplices;
+ Simplex_set unconnected_stars_simplices;
+ bool is_pure_pseudomanifold = complex.is_pure_pseudomanifold(
+ intrinsic_dim, tc.number_of_vertices(),
+ false, // do NOT allow borders
+ false, 1,
+ &num_wrong_dim_simplices, &num_wrong_number_of_cofaces,
+ &num_unconnected_stars,
+ &wrong_dim_simplices, &wrong_number_of_cofaces_simplices,
+ &unconnected_stars_simplices);
+
+ //===========================================================================
+ // Export to OFF
+ //===========================================================================
+
+ double export_after_collapse_time = -1.;
+ if (collapse) {
+ t.begin();
+ bool exported = export_to_off(
+ tc, input_name_stripped, "_AFTER_COLLAPSE", false, &complex,
+ &wrong_dim_simplices, &wrong_number_of_cofaces_simplices,
+ &unconnected_stars_simplices);
+ t.end();
+ std::cerr
+ << " OFF colors:\n"
+ << " * Red: wrong dim simplices\n"
+ << " * Green: wrong number of cofaces simplices\n"
+ << " * Blue: not-connected stars\n";
+ export_after_collapse_time = (exported ? t.num_seconds() : -1.);
+ }
+
+ //===========================================================================
+ // Display info
+ //===========================================================================
+
+ std::cerr
+ << "\n================================================\n"
+ << "Number of vertices: " << tc.number_of_vertices() << "\n"
+ << "Computation times (seconds): \n"
+ << " * Tangential complex: " << init_time + computation_time << "\n"
+ << " - Init + kd-tree = " << init_time << "\n"
+ << " - TC computation = " << computation_time << "\n"
+ << " * Export to OFF (before perturb): " << export_before_time << "\n"
+ << " * Fix inconsistencies 1: " << perturb_time
+ << " (" << num_perturb_steps << " steps) ==> "
+ << (perturb_success ? "FIXED" : "NOT fixed") << "\n"
+ << " * Export to OFF (after perturb): " << export_after_perturb_time << "\n"
+ << " * Export to OFF (after collapse): "
+ << export_after_collapse_time << "\n"
+ << "================================================\n";
+
+ //===========================================================================
+ // Export info
+ //===========================================================================
+ GUDHI_TC_SET_PERFORMANCE_DATA("Init_time", init_time);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Comput_time", computation_time);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_successful",
+ (perturb_success ? 1 : 0));
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_time", perturb_time);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_steps", num_perturb_steps);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Result_pure_pseudomanifold",
+ (is_pure_pseudomanifold ? 1 : 0));
+ GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_wrong_dim_simplices",
+ num_wrong_dim_simplices);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_wrong_number_of_cofaces",
+ num_wrong_number_of_cofaces);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Result_num_unconnected_stars",
+ num_unconnected_stars);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Info", "");
+}
+
+int main() {
+ CGAL::set_error_behaviour(CGAL::ABORT);
+
+#ifdef GUDHI_USE_TBB
+#ifdef _DEBUG
+ int num_threads = 1;
+#else
+ int num_threads = tbb::task_scheduler_init::default_num_threads() - 4;
+#endif
+#endif
+
+ std::ifstream script_file;
+ script_file.open(BENCHMARK_SCRIPT_FILENAME);
+ // Script?
+ // Script file format: each line gives
+ // - Filename (point set) or "generate_XXX" (point set generation)
+ // - Ambient dim
+ // - Intrinsic dim
+ // - Number of iterations with these parameters
+ if (script_file.is_open()) {
+ int i = 1;
+#ifdef GUDHI_USE_TBB
+#ifdef BENCHMARK_WITH_1_TO_MAX_THREADS
+ for (num_threads = 1;
+ num_threads <= tbb::task_scheduler_init::default_num_threads();
+ ++num_threads)
+#endif
+#endif
+ /*for (Concurrent_mesher_config::get().num_work_items_per_batch = 5 ;
+ Concurrent_mesher_config::get().num_work_items_per_batch < 100 ;
+ Concurrent_mesher_config::get().num_work_items_per_batch += 5)*/ {
+#ifdef GUDHI_USE_TBB
+ tbb::task_scheduler_init init(
+ num_threads > 0 ? num_threads : tbb::task_scheduler_init::automatic);
+#endif
+
+ std::cerr << "Script file '" << BENCHMARK_SCRIPT_FILENAME << "' found.\n";
+ script_file.seekg(0);
+ while (script_file.good()) {
+ std::string line;
+ std::getline(script_file, line);
+ if (line.size() > 1 && line[0] != '#') {
+ boost::replace_all(line, "\t", " ");
+ boost::trim_all(line);
+ std::cerr << "\n\n";
+ std::cerr << "*****************************************\n";
+ std::cerr << "******* " << line << "\n";
+ std::cerr << "*****************************************\n";
+ std::stringstream sstr(line);
+
+ std::string input;
+ std::string param1;
+ std::string param2;
+ std::string param3;
+ std::size_t num_points;
+ int ambient_dim;
+ int intrinsic_dim;
+ double sparsity;
+ double max_perturb;
+ char perturb, add_high_dim_simpl, collapse;
+ double time_limit_for_perturb;
+ int num_iteration;
+ sstr >> input;
+ sstr >> param1;
+ sstr >> param2;
+ sstr >> param3;
+ sstr >> num_points;
+ sstr >> ambient_dim;
+ sstr >> intrinsic_dim;
+ sstr >> sparsity;
+ sstr >> max_perturb;
+ sstr >> perturb;
+ sstr >> add_high_dim_simpl;
+ sstr >> collapse;
+ sstr >> time_limit_for_perturb;
+ sstr >> num_iteration;
+
+ for (int j = 0; j < num_iteration; ++j) {
+ std::string input_stripped = input;
+ size_t slash_index = input_stripped.find_last_of('/');
+ if (slash_index == std::string::npos)
+ slash_index = input_stripped.find_last_of('\\');
+ if (slash_index == std::string::npos)
+ slash_index = 0;
+ else
+ ++slash_index;
+ input_stripped = input_stripped.substr(
+ slash_index, input_stripped.find_last_of('.') - slash_index);
+
+ GUDHI_TC_SET_PERFORMANCE_DATA("Input", input_stripped);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Param1", param1);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Param2", param2);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Param3", param3);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Ambient_dim", ambient_dim);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Intrinsic_dim", intrinsic_dim);
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_technique", "Tangential_translation");
+ GUDHI_TC_SET_PERFORMANCE_DATA("Perturb_which_points", "Center_vertex");
+
+#ifdef GUDHI_USE_TBB
+ GUDHI_TC_SET_PERFORMANCE_DATA(
+ "Num_threads",
+ (num_threads == -1 ? tbb::task_scheduler_init::default_num_threads() : num_threads));
+#else
+ GUDHI_TC_SET_PERFORMANCE_DATA("Num_threads", "N/A");
+#endif
+
+ std::cerr << "\nTC #" << i << "...\n";
+
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t_gen;
+#endif
+
+ std::vector<Point> points;
+ TC::TS_container tangent_spaces;
+
+ if (input == "generate_moment_curve") {
+ points = Gudhi::generate_points_on_moment_curve<Kernel>(
+ num_points, ambient_dim,
+ std::atof(param1.c_str()), std::atof(param2.c_str()));
+ } else if (input == "generate_plane") {
+ points = Gudhi::generate_points_on_plane<Kernel>(
+ num_points, intrinsic_dim, ambient_dim);
+ } else if (input == "generate_sphere_d") {
+ points = Gudhi::generate_points_on_sphere_d<Kernel>(
+ num_points, ambient_dim,
+ std::atof(param1.c_str()), // radius
+ std::atof(param2.c_str())); // radius_noise_percentage
+ } else if (input == "generate_two_spheres_d") {
+ points = Gudhi::generate_points_on_two_spheres_d<Kernel>(
+ num_points, ambient_dim,
+ std::atof(param1.c_str()),
+ std::atof(param2.c_str()),
+ std::atof(param3.c_str()));
+ } else if (input == "generate_3sphere_and_circle_d") {
+ GUDHI_CHECK(intrinsic_dim == 3,
+ std::logic_error("Intrinsic dim should be 3"));
+ GUDHI_CHECK(ambient_dim == 5,
+ std::logic_error("Ambient dim should be 5"));
+ points = Gudhi::generate_points_on_3sphere_and_circle<Kernel>(
+ num_points,
+ std::atof(param1.c_str()));
+ } else if (input == "generate_torus_3D") {
+ points = Gudhi::generate_points_on_torus_3D<Kernel>(
+ num_points,
+ std::atof(param1.c_str()),
+ std::atof(param2.c_str()),
+ param3 == "Y");
+ } else if (input == "generate_torus_d") {
+ points = Gudhi::generate_points_on_torus_d<Kernel>(
+ num_points,
+ intrinsic_dim,
+ param1 == "Y", // uniform
+ std::atof(param2.c_str())); // radius_noise_percentage
+ } else if (input == "generate_klein_bottle_3D") {
+ points = Gudhi::generate_points_on_klein_bottle_3D<Kernel>(
+ num_points,
+ std::atof(param1.c_str()), std::atof(param2.c_str()));
+ } else if (input == "generate_klein_bottle_4D") {
+ points = Gudhi::generate_points_on_klein_bottle_4D<Kernel>(
+ num_points,
+ std::atof(param1.c_str()), std::atof(param2.c_str()),
+ std::atof(param3.c_str())); // noise
+ } else if (input == "generate_klein_bottle_variant_5D") {
+ points = Gudhi::generate_points_on_klein_bottle_variant_5D<Kernel>(
+ num_points,
+ std::atof(param1.c_str()), std::atof(param2.c_str()));
+ } else {
+ // Contains tangent space basis
+ if (input.substr(input.size() - 3) == "pwt") {
+ load_points_and_tangent_space_basis_from_file
+ <Kernel, typename TC::Tangent_space_basis > (
+ input, std::back_inserter(points),
+ std::back_inserter(tangent_spaces),
+ intrinsic_dim,
+ ONLY_LOAD_THE_FIRST_N_POINTS);
+ } else {
+ load_points_from_file<Kernel>(
+ input, std::back_inserter(points),
+ ONLY_LOAD_THE_FIRST_N_POINTS);
+ }
+ }
+
+#ifdef GUDHI_TC_PROFILING
+ t_gen.end();
+ std::cerr << "Point set generated/loaded in " << t_gen.num_seconds()
+ << " seconds.\n";
+#endif
+
+ if (!points.empty()) {
+#if defined(TC_INPUT_STRIDES) && TC_INPUT_STRIDES > 1
+ auto p = points | boost::adaptors::strided(TC_INPUT_STRIDES);
+ std::vector<Point> points(p.begin(), p.end());
+ std::cerr << "****************************************\n"
+ << "WARNING: taking 1 point every " << TC_INPUT_STRIDES
+ << " points.\n"
+ << "****************************************\n";
+#endif
+
+ make_tc(points, tangent_spaces, intrinsic_dim,
+ sparsity, max_perturb,
+ perturb == 'Y', add_high_dim_simpl == 'Y', collapse == 'Y',
+ time_limit_for_perturb, input.c_str());
+
+ std::cerr << "TC #" << i++ << " done.\n";
+ std::cerr << "\n---------------------------------\n";
+ } else {
+ std::cerr << "TC #" << i++ << ": no points loaded.\n";
+ }
+
+ XML_perf_data::commit();
+ }
+ }
+ }
+ script_file.seekg(0);
+ script_file.clear();
+ }
+
+ script_file.close();
+ } // Or not script?
+ else {
+ std::cerr << "Script file '" << BENCHMARK_SCRIPT_FILENAME << "' NOT found.\n";
+ }
+
+ // system("pause");
+ return 0;
+}
diff --git a/src/Tangential_complex/doc/COPYRIGHT b/src/Tangential_complex/doc/COPYRIGHT
new file mode 100644
index 00000000..61f17f6d
--- /dev/null
+++ b/src/Tangential_complex/doc/COPYRIGHT
@@ -0,0 +1,12 @@
+The files of this directory are part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+
+Author(s): Vincent Rouvreau
+
+Copyright (C) 2015 Inria
+
+This gives everyone the freedoms to use openFrameworks in any context:
+commercial or non-commercial, public or private, open or closed source.
+
+You should have received a copy of the MIT License along with this program.
+If not, see https://opensource.org/licenses/MIT. \ No newline at end of file
diff --git a/src/Tangential_complex/doc/Intro_tangential_complex.h b/src/Tangential_complex/doc/Intro_tangential_complex.h
new file mode 100644
index 00000000..ce277185
--- /dev/null
+++ b/src/Tangential_complex/doc/Intro_tangential_complex.h
@@ -0,0 +1,109 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_
+#define DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_
+
+// needs namespaces for Doxygen to link on classes
+namespace Gudhi {
+namespace tangential_complex {
+
+/** \defgroup tangential_complex Tangential complex
+
+\author Cl&eacute;ment Jamin
+
+@{
+
+\section tangentialdefinition Definition
+
+A Tangential Delaunay complex is a
+<a target="_blank" href="https://en.wikipedia.org/wiki/Simplicial_complex">simplicial complex</a>
+designed to reconstruct a \f$k\f$-dimensional smooth manifold embedded in \f$d\f$-dimensional Euclidean space.
+The input is a point sample coming from an unknown manifold, which means that the points lie close to a structure of
+"small" intrinsic dimension.
+The running time depends only linearly on the extrinsic dimension \f$ d \f$
+and exponentially on the intrinsic dimension \f$ k \f$.
+
+An extensive description of the Tangential complex can be found in \cite tangentialcomplex2014.
+
+\subsection whatisthetc What is a Tangential Complex?
+
+Let us start with the description of the Tangential complex of a simple example, with \f$ k=1 \f$ and \f$ d=2 \f$.
+The point set \f$ \mathscr P \f$ is located on a closed curve embedded in 2D.
+Only 4 points will be displayed (more are required for PCA) to simplify the figures.
+\image html "tc_example_01.png" "The input"
+For each point \f$ P \f$, estimate its tangent subspace \f$ T_P \f$ using PCA.
+\image html "tc_example_02.png" "The estimated normals"
+Let us add the Voronoi diagram of the points in orange. For each point \f$ P \f$, construct its star in the Delaunay
+triangulation of \f$ \mathscr P \f$ restricted to \f$ T_P \f$.
+\image html "tc_example_03.png" "The Voronoi diagram"
+The Tangential Delaunay complex is the union of those stars.
+
+In practice, neither the ambient Voronoi diagram nor the ambient Delaunay triangulation is computed.
+Instead, local \f$ k \f$-dimensional regular triangulations are computed with a limited number of points as we only
+need the star of each point. More details can be found in \cite tangentialcomplex2014.
+
+\subsection inconsistencies Inconsistencies
+
+Inconsistencies between the stars can occur.
+An inconsistency occurs when a simplex is not in the star of all its vertices.
+
+Let us take the same example.
+\image html "tc_example_07_before.png" "Before"
+Let us slightly move the tangent subspace \f$ T_Q \f$
+\image html "tc_example_07_after.png" "After"
+Now, the star of \f$ Q \f$ contains \f$ QP \f$, but the star of \f$ P \f$ does not contain \f$ QP \f$. We have an inconsistency.
+\image html "tc_example_08.png" "After"
+
+One way to solve inconsistencies is to randomly perturb the positions of the points involved in an inconsistency.
+In the current implementation, this perturbation is done in the tangent subspace of each point.
+The maximum perturbation radius is given as a parameter to the constructor.
+
+In most cases, we recommend to provide a point set where the minimum distance between any two points
+is not too small. This can be achieved using the functions provided by the Subsampling module. Then, a good value to start with for
+the maximum perturbation radius would be around half the minimum distance between any two points.
+The \ref example_with_perturb below shows an example of such a process.
+
+In most cases, this process is able to dramatically reduce the number of inconsistencies, but is not guaranteed to succeed.
+
+\subsection output Output
+
+The result of the computation is exported as a `Simplex_tree`. It is the union of the stars of all the input points.
+A vertex in the Simplex Tree is the index of the point in the range provided by the user.
+The point corresponding to a vertex can also be obtained through the `Tangential_complex::get_point` function.
+Note that even if the positions of the points are perturbed, their original positions are kept (e.g. `Tangential_complex::get_point` returns the original position of the point).
+
+The result can be obtained after the computation of the Tangential complex itself and/or after the perturbation process.
+
+\section simple_example Simple example
+
+This example builds the Tangential complex of point set.
+Note that the dimension of the kernel here is dynamic, which is slower, but more flexible:
+the intrinsic and ambient dimensions does not have to be known at compile-time.
+
+\include Tangential_complex/example_basic.cpp
+
+\section example_with_perturb Example with perturbation
+
+This example builds the Tangential complex of a point set, then tries to solve inconsistencies
+by perturbing the positions of points involved in inconsistent simplices.
+Note that the dimension of the kernel here is static, which is the best choice when the
+dimensions are known at compile-time.
+
+\include Tangential_complex/example_with_perturb.cpp
+
+ */
+/** @} */ // end defgroup tangential_complex
+
+} // namespace tangential_complex
+
+} // namespace Gudhi
+
+#endif // DOC_TANGENTIAL_COMPLEX_INTRO_TANGENTIAL_COMPLEX_H_
diff --git a/src/Tangential_complex/doc/tc_example_01.png b/src/Tangential_complex/doc/tc_example_01.png
new file mode 100644
index 00000000..8afe6198
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_01.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_02.png b/src/Tangential_complex/doc/tc_example_02.png
new file mode 100644
index 00000000..01591c1d
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_02.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_03.png b/src/Tangential_complex/doc/tc_example_03.png
new file mode 100644
index 00000000..5de04e01
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_03.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_05.png b/src/Tangential_complex/doc/tc_example_05.png
new file mode 100644
index 00000000..fdd5e5fa
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_05.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_06.png b/src/Tangential_complex/doc/tc_example_06.png
new file mode 100644
index 00000000..31ad3c43
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_06.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_07.png b/src/Tangential_complex/doc/tc_example_07.png
new file mode 100644
index 00000000..47e34de7
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_07.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_07_after.png b/src/Tangential_complex/doc/tc_example_07_after.png
new file mode 100644
index 00000000..981350d2
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_07_after.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_07_before.png b/src/Tangential_complex/doc/tc_example_07_before.png
new file mode 100644
index 00000000..ddc6bc7b
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_07_before.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_08.png b/src/Tangential_complex/doc/tc_example_08.png
new file mode 100644
index 00000000..119a87de
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_08.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_example_09.png b/src/Tangential_complex/doc/tc_example_09.png
new file mode 100644
index 00000000..31bac1e0
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_example_09.png
Binary files differ
diff --git a/src/Tangential_complex/doc/tc_examples.png b/src/Tangential_complex/doc/tc_examples.png
new file mode 100644
index 00000000..b6544afe
--- /dev/null
+++ b/src/Tangential_complex/doc/tc_examples.png
Binary files differ
diff --git a/src/Tangential_complex/example/CMakeLists.txt b/src/Tangential_complex/example/CMakeLists.txt
new file mode 100644
index 00000000..cb1486a4
--- /dev/null
+++ b/src/Tangential_complex/example/CMakeLists.txt
@@ -0,0 +1,20 @@
+project(Tangential_complex_examples)
+
+if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ add_executable( Tangential_complex_example_basic example_basic.cpp )
+ target_link_libraries(Tangential_complex_example_basic ${CGAL_LIBRARY})
+ add_executable( Tangential_complex_example_with_perturb example_with_perturb.cpp )
+ target_link_libraries(Tangential_complex_example_with_perturb ${CGAL_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Tangential_complex_example_basic ${TBB_LIBRARIES})
+ target_link_libraries(Tangential_complex_example_with_perturb ${TBB_LIBRARIES})
+ endif(TBB_FOUND)
+
+ add_test(NAME Tangential_complex_example_basic
+ COMMAND $<TARGET_FILE:Tangential_complex_example_basic>)
+ add_test(NAME Tangential_complex_example_with_perturb
+ COMMAND $<TARGET_FILE:Tangential_complex_example_with_perturb>)
+
+ install(TARGETS Tangential_complex_example_basic DESTINATION bin)
+ install(TARGETS Tangential_complex_example_with_perturb DESTINATION bin)
+endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
diff --git a/src/Tangential_complex/example/example_basic.cpp b/src/Tangential_complex/example/example_basic.cpp
new file mode 100644
index 00000000..ab35edf0
--- /dev/null
+++ b/src/Tangential_complex/example/example_basic.cpp
@@ -0,0 +1,49 @@
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/sparsify_point_set.h>
+//#include <gudhi/Fake_simplex_tree.h>
+
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <array>
+#include <vector>
+
+namespace tc = Gudhi::tangential_complex;
+
+typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+typedef Kernel::FT FT;
+typedef Kernel::Point_d Point;
+typedef Kernel::Vector_d Vector;
+typedef tc::Tangential_complex<
+Kernel, CGAL::Dynamic_dimension_tag,
+CGAL::Parallel_tag> TC;
+
+int main(void) {
+ const int INTRINSIC_DIM = 2;
+ const int AMBIENT_DIM = 3;
+ const int NUM_POINTS = 100;
+
+ Kernel k;
+
+ // Generate points on a 2-sphere
+ CGAL::Random_points_on_sphere_d<Point> generator(AMBIENT_DIM, 3.);
+ std::vector<Point> points;
+ points.reserve(NUM_POINTS);
+ for (int i = 0; i < NUM_POINTS; ++i)
+ points.push_back(*generator++);
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.compute_tangential_complex();
+
+ // Export the TC into a Simplex_tree
+ Gudhi::Simplex_tree<> stree;
+ //Gudhi::Fake_simplex_tree stree;
+ tc.create_complex(stree);
+
+ // Display stats about inconsistencies
+ tc.number_of_inconsistent_simplices(true); // verbose
+
+ return 0;
+}
diff --git a/src/Tangential_complex/example/example_with_perturb.cpp b/src/Tangential_complex/example/example_with_perturb.cpp
new file mode 100644
index 00000000..d0d877ea
--- /dev/null
+++ b/src/Tangential_complex/example/example_with_perturb.cpp
@@ -0,0 +1,53 @@
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/sparsify_point_set.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <array>
+#include <vector>
+
+namespace subsampl = Gudhi::subsampling;
+namespace tc = Gudhi::tangential_complex;
+
+typedef CGAL::Epick_d<CGAL::Dimension_tag < 3 >> Kernel;
+typedef Kernel::FT FT;
+typedef Kernel::Point_d Point;
+typedef Kernel::Vector_d Vector;
+typedef tc::Tangential_complex<
+Kernel, CGAL::Dimension_tag<2>,
+CGAL::Parallel_tag> TC;
+
+int main(void) {
+ const int INTRINSIC_DIM = 2;
+ const int AMBIENT_DIM = 3;
+ const int NUM_POINTS = 50;
+
+ Kernel k;
+
+ // Generate points on a 2-sphere
+ CGAL::Random_points_on_sphere_d<Point> generator(AMBIENT_DIM, 3.);
+ std::vector<Point> points;
+ points.reserve(NUM_POINTS);
+ for (int i = 0; i < NUM_POINTS; ++i)
+ points.push_back(*generator++);
+
+ // Sparsify the point set
+ std::vector<Point> sparsified_points;
+ subsampl::sparsify_point_set(k, points, 0.1 * 0.1,
+ std::back_inserter(sparsified_points));
+ sparsified_points.swap(points);
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.compute_tangential_complex();
+
+ // Try to fix inconsistencies. Give it 10 seconds to succeed
+ tc.fix_inconsistencies_using_perturbation(0.05, 10);
+
+ // Export the TC into a Simplex_tree
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree);
+
+ return 0;
+}
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex.h
new file mode 100644
index 00000000..f59476b1
--- /dev/null
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex.h
@@ -0,0 +1,2038 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - 2019/08 Vincent Rouvreau: Fix issue #10 for CGAL and Eigen3
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef TANGENTIAL_COMPLEX_H_
+#define TANGENTIAL_COMPLEX_H_
+
+#include <gudhi/Tangential_complex/config.h>
+#include <gudhi/Tangential_complex/Simplicial_complex.h>
+#include <gudhi/Tangential_complex/utilities.h>
+#include <gudhi/Kd_tree_search.h>
+#include <gudhi/console_color.h>
+#include <gudhi/Clock.h>
+#include <gudhi/Simplex_tree.h>
+#include <gudhi/Debug_utils.h>
+
+#include <CGAL/Default.h>
+#include <CGAL/Dimension.h>
+#include <CGAL/function_objects.h> // for CGAL::Identity
+#include <CGAL/Epick_d.h>
+#include <CGAL/Regular_triangulation_traits_adapter.h>
+#include <CGAL/Regular_triangulation.h>
+#include <CGAL/Delaunay_triangulation.h>
+#include <CGAL/Combination_enumerator.h>
+#include <CGAL/point_generators_d.h>
+#include <CGAL/version.h> // for CGAL_VERSION_NR
+
+#include <Eigen/Core>
+#include <Eigen/Eigen>
+#include <Eigen/src/Core/util/Macros.h> // for EIGEN_VERSION_AT_LEAST
+
+#include <boost/optional.hpp>
+#include <boost/iterator/transform_iterator.hpp>
+#include <boost/range/adaptor/transformed.hpp>
+#include <boost/range/counting_range.hpp>
+#include <boost/math/special_functions/factorials.hpp>
+#include <boost/container/flat_set.hpp>
+
+#include <tuple>
+#include <vector>
+#include <set>
+#include <utility>
+#include <sstream>
+#include <iostream>
+#include <limits>
+#include <algorithm>
+#include <functional>
+#include <iterator>
+#include <cmath> // for std::sqrt
+#include <string>
+#include <cstddef> // for std::size_t
+
+#ifdef GUDHI_USE_TBB
+#include <tbb/parallel_for.h>
+#include <tbb/combinable.h>
+#include <tbb/mutex.h>
+#endif
+
+// #define GUDHI_TC_EXPORT_NORMALS // Only for 3D surfaces (k=2, d=3)
+
+// Make compilation fail - required for external projects - https://github.com/GUDHI/gudhi-devel/issues/10
+#if CGAL_VERSION_NR < 1041101000
+# error Alpha_complex_3d is only available for CGAL >= 4.11
+#endif
+
+#if !EIGEN_VERSION_AT_LEAST(3,1,0)
+# error Alpha_complex_3d is only available for Eigen3 >= 3.1.0 installed with CGAL
+#endif
+
+namespace sps = Gudhi::spatial_searching;
+
+namespace Gudhi {
+
+namespace tangential_complex {
+
+using namespace internal;
+
+class Vertex_data {
+ public:
+ Vertex_data(std::size_t data = (std::numeric_limits<std::size_t>::max)()) : m_data(data) {}
+
+ operator std::size_t() { return m_data; }
+
+ operator std::size_t() const { return m_data; }
+
+ private:
+ std::size_t m_data;
+};
+
+/**
+ * \class Tangential_complex Tangential_complex.h gudhi/Tangential_complex.h
+ * \brief Tangential complex data structure.
+ *
+ * \ingroup tangential_complex
+ *
+ * \details
+ * The class Tangential_complex represents a tangential complex.
+ * After the computation of the complex, an optional post-processing called perturbation can
+ * be run to attempt to remove inconsistencies.
+ *
+ * \tparam Kernel_ requires a <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_d/classCGAL_1_1Epick__d.html">CGAL::Epick_d</a> class, which
+ * can be static if you know the ambiant dimension at compile-time, or dynamic if you don't.
+ * \tparam DimensionTag can be either <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_23/classCGAL_1_1Dimension__tag.html">Dimension_tag<d></a>
+ * if you know the intrinsic dimension at compile-time,
+ * or <a target="_blank"
+ * href="http://doc.cgal.org/latest/Kernel_23/classCGAL_1_1Dynamic__dimension__tag.html">CGAL::Dynamic_dimension_tag</a>
+ * if you don't.
+ * \tparam Concurrency_tag enables sequential versus parallel computation. Possible values are `CGAL::Parallel_tag` (the
+ * default) and `CGAL::Sequential_tag`. \tparam Triangulation_ is the type used for storing the local regular
+ * triangulations. We highly recommend to use the default value (`CGAL::Regular_triangulation`).
+ *
+ */
+template <typename Kernel_, // ambiant kernel
+ typename DimensionTag, // intrinsic dimension
+ typename Concurrency_tag = CGAL::Parallel_tag, typename Triangulation_ = CGAL::Default>
+class Tangential_complex {
+ typedef Kernel_ K;
+ typedef typename K::FT FT;
+ typedef typename K::Point_d Point;
+ typedef typename K::Weighted_point_d Weighted_point;
+ typedef typename K::Vector_d Vector;
+
+ typedef typename CGAL::Default::Get<
+ Triangulation_,
+ CGAL::Regular_triangulation<
+ CGAL::Epick_d<DimensionTag>,
+ CGAL::Triangulation_data_structure<
+ typename CGAL::Epick_d<DimensionTag>::Dimension,
+ CGAL::Triangulation_vertex<CGAL::Regular_triangulation_traits_adapter<CGAL::Epick_d<DimensionTag> >,
+ Vertex_data>,
+ CGAL::Triangulation_full_cell<
+ CGAL::Regular_triangulation_traits_adapter<CGAL::Epick_d<DimensionTag> > > > > >::type Triangulation;
+ typedef typename Triangulation::Geom_traits Tr_traits;
+ typedef typename Triangulation::Weighted_point Tr_point;
+ typedef typename Tr_traits::Base::Point_d Tr_bare_point;
+ typedef typename Triangulation::Vertex_handle Tr_vertex_handle;
+ typedef typename Triangulation::Full_cell_handle Tr_full_cell_handle;
+ typedef typename Tr_traits::Vector_d Tr_vector;
+
+#if defined(GUDHI_USE_TBB)
+ typedef tbb::mutex Mutex_for_perturb;
+ typedef Vector Translation_for_perturb;
+ typedef std::vector<Atomic_wrapper<FT> > Weights;
+#else
+ typedef Vector Translation_for_perturb;
+ typedef std::vector<FT> Weights;
+#endif
+ typedef std::vector<Translation_for_perturb> Translations_for_perturb;
+
+ // Store a local triangulation and a handle to its center vertex
+
+ struct Tr_and_VH {
+ public:
+ Tr_and_VH() : m_tr(NULL) {}
+
+ Tr_and_VH(int dim) : m_tr(new Triangulation(dim)) {}
+
+ ~Tr_and_VH() { destroy_triangulation(); }
+
+ Triangulation &construct_triangulation(int dim) {
+ delete m_tr;
+ m_tr = new Triangulation(dim);
+ return tr();
+ }
+
+ void destroy_triangulation() {
+ delete m_tr;
+ m_tr = NULL;
+ }
+
+ Triangulation &tr() { return *m_tr; }
+
+ Triangulation const &tr() const { return *m_tr; }
+
+ Tr_vertex_handle const &center_vertex() const { return m_center_vertex; }
+
+ Tr_vertex_handle &center_vertex() { return m_center_vertex; }
+
+ private:
+ Triangulation *m_tr;
+ Tr_vertex_handle m_center_vertex;
+ };
+
+ public:
+ typedef Basis<K> Tangent_space_basis;
+ typedef Basis<K> Orthogonal_space_basis;
+ typedef std::vector<Tangent_space_basis> TS_container;
+ typedef std::vector<Orthogonal_space_basis> OS_container;
+
+ typedef std::vector<Point> Points;
+
+ typedef boost::container::flat_set<std::size_t> Simplex;
+ typedef std::set<Simplex> Simplex_set;
+
+ private:
+ typedef sps::Kd_tree_search<K, Points> Points_ds;
+ typedef typename Points_ds::KNS_range KNS_range;
+ typedef typename Points_ds::INS_range INS_range;
+
+ typedef std::vector<Tr_and_VH> Tr_container;
+ typedef std::vector<Vector> Vectors;
+
+ // An Incident_simplex is the list of the vertex indices
+ // except the center vertex
+ typedef boost::container::flat_set<std::size_t> Incident_simplex;
+ typedef std::vector<Incident_simplex> Star;
+ typedef std::vector<Star> Stars_container;
+
+ // For transform_iterator
+
+ static const Tr_point &vertex_handle_to_point(Tr_vertex_handle vh) { return vh->point(); }
+
+ template <typename P, typename VH>
+ static const P &vertex_handle_to_point(VH vh) {
+ return vh->point();
+ }
+
+ public:
+ typedef internal::Simplicial_complex Simplicial_complex;
+
+ /** \brief Constructor from a range of points.
+ * Points are copied into the instance, and a search data structure is initialized.
+ * Note the complex is not computed: `compute_tangential_complex` must be called after the creation
+ * of the object.
+ *
+ * @param[in] points Range of points (`Point_range::value_type` must be the same as `Kernel_::Point_d`).
+ * @param[in] intrinsic_dimension Intrinsic dimension of the manifold.
+ * @param[in] k Kernel instance.
+ */
+ template <typename Point_range>
+ Tangential_complex(Point_range points, int intrinsic_dimension,
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ InputIterator first_for_tse, InputIterator last_for_tse,
+#endif
+ const K &k = K())
+ : m_k(k),
+ m_intrinsic_dim(intrinsic_dimension),
+ m_ambient_dim(points.empty() ? 0 : k.point_dimension_d_object()(*points.begin())),
+ m_points(points.begin(), points.end()),
+ m_weights(m_points.size(), FT(0))
+#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION)
+ ,
+ m_p_perturb_mutexes(NULL)
+#endif
+ ,
+ m_points_ds(m_points),
+ m_last_max_perturb(0.),
+ m_are_tangent_spaces_computed(m_points.size(), false),
+ m_tangent_spaces(m_points.size(), Tangent_space_basis())
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ ,
+ m_orth_spaces(m_points.size(), Orthogonal_space_basis())
+#endif
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ ,
+ m_points_for_tse(first_for_tse, last_for_tse),
+ m_points_ds_for_tse(m_points_for_tse)
+#endif
+ {
+ }
+
+ /// Destructor
+ ~Tangential_complex() {
+#if defined(GUDHI_USE_TBB) && defined(GUDHI_TC_PERTURB_POSITION)
+ delete[] m_p_perturb_mutexes;
+#endif
+ }
+
+ /// Returns the intrinsic dimension of the manifold.
+ int intrinsic_dimension() const { return m_intrinsic_dim; }
+
+ /// Returns the ambient dimension.
+ int ambient_dimension() const { return m_ambient_dim; }
+
+ Points const &points() const { return m_points; }
+
+ /** \brief Returns the point corresponding to the vertex given as parameter.
+ *
+ * @param[in] vertex Vertex handle of the point to retrieve.
+ * @return The point found.
+ */
+ Point get_point(std::size_t vertex) const { return m_points[vertex]; }
+
+ /** \brief Returns the perturbed position of the point corresponding to the vertex given as parameter.
+ *
+ * @param[in] vertex Vertex handle of the point to retrieve.
+ * @return The perturbed position of the point found.
+ */
+ Point get_perturbed_point(std::size_t vertex) const { return compute_perturbed_point(vertex); }
+
+ /// Returns the number of vertices.
+
+ std::size_t number_of_vertices() const { return m_points.size(); }
+
+ void set_weights(const Weights &weights) { m_weights = weights; }
+
+ void set_tangent_planes(const TS_container &tangent_spaces
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ ,
+ const OS_container &orthogonal_spaces
+#endif
+ ) {
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ GUDHI_CHECK(m_points.size() == tangent_spaces.size() && m_points.size() == orthogonal_spaces.size(),
+ std::logic_error("Wrong sizes"));
+#else
+ GUDHI_CHECK(m_points.size() == tangent_spaces.size(), std::logic_error("Wrong sizes"));
+#endif
+ m_tangent_spaces = tangent_spaces;
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ m_orth_spaces = orthogonal_spaces;
+#endif
+ for (std::size_t i = 0; i < m_points.size(); ++i) m_are_tangent_spaces_computed[i] = true;
+ }
+
+ /** \brief Computes the tangential complex.
+ * \exception std::invalid_argument In debug mode, if the computed star dimension is too low. Try to set a bigger
+ * maximal edge length value with `Tangential_complex::set_max_squared_edge_length` if
+ * this happens.
+ */
+ void compute_tangential_complex() {
+#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS
+ std::cerr << red << "WARNING: GUDHI_TC_PERFORM_EXTRA_CHECKS is defined. "
+ << "Computation might be slower than usual.\n"
+ << white;
+#endif
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB)
+ Gudhi::Clock t;
+#endif
+
+ // We need to do that because we don't want the container to copy the
+ // already-computed triangulations (while resizing) since it would
+ // invalidate the vertex handles stored beside the triangulations
+ m_triangulations.resize(m_points.size());
+ m_stars.resize(m_points.size());
+ m_squared_star_spheres_radii_incl_margin.resize(m_points.size(), FT(-1));
+#ifdef GUDHI_TC_PERTURB_POSITION
+ if (m_points.empty())
+ m_translations.clear();
+ else
+ m_translations.resize(m_points.size(), m_k.construct_vector_d_object()(m_ambient_dim));
+#if defined(GUDHI_USE_TBB)
+ delete[] m_p_perturb_mutexes;
+ m_p_perturb_mutexes = new Mutex_for_perturb[m_points.size()];
+#endif
+#endif
+
+#ifdef GUDHI_USE_TBB
+ // Parallel
+ if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()), Compute_tangent_triangulation(*this));
+ } else {
+#endif // GUDHI_USE_TBB
+ // Sequential
+ for (std::size_t i = 0; i < m_points.size(); ++i) compute_tangent_triangulation(i);
+#ifdef GUDHI_USE_TBB
+ }
+#endif // GUDHI_USE_TBB
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_USE_TBB)
+ t.end();
+ std::cerr << "Tangential complex computed in " << t.num_seconds() << " seconds.\n";
+#endif
+ }
+
+ /// \brief Type returned by `Tangential_complex::fix_inconsistencies_using_perturbation`.
+ struct Fix_inconsistencies_info {
+ /// `true` if all inconsistencies could be removed, `false` if the time limit has been reached before
+ bool success = false;
+ /// number of steps performed
+ unsigned int num_steps = 0;
+ /// initial number of inconsistent stars
+ std::size_t initial_num_inconsistent_stars = 0;
+ /// best number of inconsistent stars during the process
+ std::size_t best_num_inconsistent_stars = 0;
+ /// final number of inconsistent stars
+ std::size_t final_num_inconsistent_stars = 0;
+ };
+
+ /** \brief Attempts to fix inconsistencies by perturbing the point positions.
+ *
+ * @param[in] max_perturb Maximum length of the translations used by the perturbation.
+ * @param[in] time_limit Time limit in seconds. If -1, no time limit is set.
+ */
+ Fix_inconsistencies_info fix_inconsistencies_using_perturbation(double max_perturb, double time_limit = -1.) {
+ Fix_inconsistencies_info info;
+
+ if (time_limit == 0.) return info;
+
+ Gudhi::Clock t;
+
+#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+ std::tuple<std::size_t, std::size_t, std::size_t> stats_before = number_of_inconsistent_simplices(false);
+
+ if (std::get<1>(stats_before) == 0) {
+#ifdef DEBUG_TRACES
+ std::cerr << "Nothing to fix.\n";
+#endif
+ info.success = false;
+ return info;
+ }
+#endif // GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+
+ m_last_max_perturb = max_perturb;
+
+ bool done = false;
+ info.best_num_inconsistent_stars = m_triangulations.size();
+ info.num_steps = 0;
+ while (!done) {
+#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+ std::cerr << "\nBefore fix step:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << std::get<0>(stats_before) << "\n"
+ << " * Num inconsistent simplices in stars (incl. duplicates): " << red << std::get<1>(stats_before)
+ << white << " (" << 100. * std::get<1>(stats_before) / std::get<0>(stats_before) << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << red << std::get<2>(stats_before)
+ << white << " (" << 100. * std::get<2>(stats_before) / m_points.size() << "%)\n";
+#endif
+
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow << "\nAttempt to fix inconsistencies using perturbations - step #" << info.num_steps + 1
+ << "... " << white;
+#endif
+
+ std::size_t num_inconsistent_stars = 0;
+ std::vector<std::size_t> updated_points;
+
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t_fix_step;
+#endif
+
+ // Parallel
+#if defined(GUDHI_USE_TBB)
+ if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
+ tbb::combinable<std::size_t> num_inconsistencies;
+ tbb::combinable<std::vector<std::size_t> > tls_updated_points;
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_triangulations.size()),
+ Try_to_solve_inconsistencies_in_a_local_triangulation(*this, max_perturb, num_inconsistencies,
+ tls_updated_points));
+ num_inconsistent_stars = num_inconsistencies.combine(std::plus<std::size_t>());
+ updated_points =
+ tls_updated_points.combine([](std::vector<std::size_t> const &x, std::vector<std::size_t> const &y) {
+ std::vector<std::size_t> res;
+ res.reserve(x.size() + y.size());
+ res.insert(res.end(), x.begin(), x.end());
+ res.insert(res.end(), y.begin(), y.end());
+ return res;
+ });
+ } else {
+#endif // GUDHI_USE_TBB
+ // Sequential
+ for (std::size_t i = 0; i < m_triangulations.size(); ++i) {
+ num_inconsistent_stars +=
+ try_to_solve_inconsistencies_in_a_local_triangulation(i, max_perturb, std::back_inserter(updated_points));
+ }
+#if defined(GUDHI_USE_TBB)
+ }
+#endif // GUDHI_USE_TBB
+
+#ifdef GUDHI_TC_PROFILING
+ t_fix_step.end();
+#endif
+
+#if defined(GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES) || defined(DEBUG_TRACES)
+ std::cerr << "\nEncountered during fix:\n"
+ << " * Num stars containing inconsistent simplices: " << red << num_inconsistent_stars << white << " ("
+ << 100. * num_inconsistent_stars / m_points.size() << "%)\n";
+#endif
+
+#ifdef GUDHI_TC_PROFILING
+ std::cerr << yellow << "done in " << t_fix_step.num_seconds() << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+
+ if (num_inconsistent_stars > 0) refresh_tangential_complex(updated_points);
+
+#ifdef GUDHI_TC_PERFORM_EXTRA_CHECKS
+ // Confirm that all stars were actually refreshed
+ std::size_t num_inc_1 = std::get<1>(number_of_inconsistent_simplices(false));
+ refresh_tangential_complex();
+ std::size_t num_inc_2 = std::get<1>(number_of_inconsistent_simplices(false));
+ if (num_inc_1 != num_inc_2)
+ std::cerr << red << "REFRESHMENT CHECK: FAILED. (" << num_inc_1 << " vs " << num_inc_2 << ")\n" << white;
+ else
+ std::cerr << green << "REFRESHMENT CHECK: PASSED.\n" << white;
+#endif
+
+#ifdef GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+ std::tuple<std::size_t, std::size_t, std::size_t> stats_after = number_of_inconsistent_simplices(false);
+
+ std::cerr << "\nAfter fix:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << std::get<0>(stats_after) << "\n"
+ << " * Num inconsistent simplices in stars (incl. duplicates): " << red << std::get<1>(stats_after)
+ << white << " (" << 100. * std::get<1>(stats_after) / std::get<0>(stats_after) << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << red << std::get<2>(stats_after) << white
+ << " (" << 100. * std::get<2>(stats_after) / m_points.size() << "%)\n";
+
+ stats_before = stats_after;
+#endif
+
+ if (info.num_steps == 0) info.initial_num_inconsistent_stars = num_inconsistent_stars;
+
+ if (num_inconsistent_stars < info.best_num_inconsistent_stars)
+ info.best_num_inconsistent_stars = num_inconsistent_stars;
+
+ info.final_num_inconsistent_stars = num_inconsistent_stars;
+
+ done = (num_inconsistent_stars == 0);
+ if (!done) {
+ ++info.num_steps;
+ if (time_limit > 0. && t.num_seconds() > time_limit) {
+#ifdef DEBUG_TRACES
+ std::cerr << red << "Time limit reached.\n" << white;
+#endif
+ info.success = false;
+ return info;
+ }
+ }
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr << green << "Fixed!\n" << white;
+#endif
+ info.success = true;
+ return info;
+ }
+
+ /// \brief Type returned by `Tangential_complex::number_of_inconsistent_simplices`.
+ struct Num_inconsistencies {
+ /// Total number of simplices in stars (including duplicates that appear in several stars)
+ std::size_t num_simplices = 0;
+ /// Number of inconsistent simplices
+ std::size_t num_inconsistent_simplices = 0;
+ /// Number of stars containing at least one inconsistent simplex
+ std::size_t num_inconsistent_stars = 0;
+ };
+
+ /// Returns the number of inconsistencies
+ /// @param[in] verbose If true, outputs a message into `std::cerr`.
+
+ Num_inconsistencies number_of_inconsistent_simplices(
+#ifdef DEBUG_TRACES
+ bool verbose = true
+#else
+ bool verbose = false
+#endif
+ ) const {
+ Num_inconsistencies stats;
+
+ // For each triangulation
+ for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
+ bool is_star_inconsistent = false;
+
+ // For each cell
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ // Don't check infinite cells
+ if (is_infinite(*it_inc_simplex)) continue;
+
+ Simplex c = *it_inc_simplex;
+ c.insert(idx); // Add the missing index
+
+ if (!is_simplex_consistent(c)) {
+ ++stats.num_inconsistent_simplices;
+ is_star_inconsistent = true;
+ }
+
+ ++stats.num_simplices;
+ }
+ stats.num_inconsistent_stars += is_star_inconsistent;
+ }
+
+ if (verbose) {
+ std::cerr << "\n==========================================================\n"
+ << "Inconsistencies:\n"
+ << " * Total number of simplices in stars (incl. duplicates): " << stats.num_simplices << "\n"
+ << " * Number of inconsistent simplices in stars (incl. duplicates): "
+ << stats.num_inconsistent_simplices << " ("
+ << 100. * stats.num_inconsistent_simplices / stats.num_simplices << "%)\n"
+ << " * Number of stars containing inconsistent simplices: " << stats.num_inconsistent_stars << " ("
+ << 100. * stats.num_inconsistent_stars / m_points.size() << "%)\n"
+ << "==========================================================\n";
+ }
+
+ return stats;
+ }
+
+ /** \brief Exports the complex into a Simplex_tree.
+ *
+ * \tparam Simplex_tree_ must be a `Simplex_tree`.
+ *
+ * @param[out] tree The result, where each `Vertex_handle` is the index of the
+ * corresponding point in the range provided to the constructor (it can also be
+ * retrieved through the `Tangential_complex::get_point` function.
+ * @param[in] export_inconsistent_simplices Also export inconsistent simplices or not?
+ * @return The maximal dimension of the simplices.
+ */
+ template <typename Simplex_tree_>
+ int create_complex(Simplex_tree_ &tree,
+ bool export_inconsistent_simplices = true
+ /// \cond ADVANCED_PARAMETERS
+ ,
+ bool export_infinite_simplices = false, Simplex_set *p_inconsistent_simplices = NULL
+ /// \endcond
+ ) const {
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow << "\nExporting the TC as a Simplex_tree... " << white;
+#endif
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t;
+#endif
+
+ int max_dim = -1;
+
+ // For each triangulation
+ for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
+ // For each cell of the star
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ Simplex c = *it_inc_simplex;
+
+ // Don't export infinite cells
+ if (!export_infinite_simplices && is_infinite(c)) continue;
+
+ if (static_cast<int>(c.size()) > max_dim) max_dim = static_cast<int>(c.size());
+ // Add the missing center vertex
+ c.insert(idx);
+
+ if (!export_inconsistent_simplices && !is_simplex_consistent(c)) continue;
+
+ // Try to insert the simplex
+ bool inserted = tree.insert_simplex_and_subfaces(c).second;
+
+ // Inconsistent?
+ if (p_inconsistent_simplices && inserted && !is_simplex_consistent(c)) {
+ p_inconsistent_simplices->insert(c);
+ }
+ }
+ }
+
+#ifdef GUDHI_TC_PROFILING
+ t.end();
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+
+ return max_dim;
+ }
+
+ // First clears the complex then exports the TC into it
+ // Returns the max dimension of the simplices
+ // check_lower_and_higher_dim_simplices : 0 (false), 1 (true), 2 (auto)
+ // If the check is enabled, the function:
+ // - won't insert the simplex if it is already in a higher dim simplex
+ // - will erase any lower-dim simplices that are faces of the new simplex
+ // "auto" (= 2) will enable the check as a soon as it encounters a
+ // simplex whose dimension is different from the previous ones.
+ // N.B.: The check is quite expensive.
+
+ int create_complex(Simplicial_complex &complex, bool export_inconsistent_simplices = true,
+ bool export_infinite_simplices = false, int check_lower_and_higher_dim_simplices = 2,
+ Simplex_set *p_inconsistent_simplices = NULL) const {
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow << "\nExporting the TC as a Simplicial_complex... " << white;
+#endif
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t;
+#endif
+
+ int max_dim = -1;
+ complex.clear();
+
+ // For each triangulation
+ for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
+ // For each cell of the star
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ Simplex c = *it_inc_simplex;
+
+ // Don't export infinite cells
+ if (!export_infinite_simplices && is_infinite(c)) continue;
+
+ if (static_cast<int>(c.size()) > max_dim) max_dim = static_cast<int>(c.size());
+ // Add the missing center vertex
+ c.insert(idx);
+
+ if (!export_inconsistent_simplices && !is_simplex_consistent(c)) continue;
+
+ // Unusual simplex dim?
+ if (check_lower_and_higher_dim_simplices == 2 && max_dim != -1 && static_cast<int>(c.size()) != max_dim) {
+ // Let's activate the check
+ std::cerr << red
+ << "Info: check_lower_and_higher_dim_simplices ACTIVATED. "
+ "Export might be take some time...\n"
+ << white;
+ check_lower_and_higher_dim_simplices = 1;
+ }
+
+ // Try to insert the simplex
+ bool added = complex.add_simplex(c, check_lower_and_higher_dim_simplices == 1);
+
+ // Inconsistent?
+ if (p_inconsistent_simplices && added && !is_simplex_consistent(c)) {
+ p_inconsistent_simplices->insert(c);
+ }
+ }
+ }
+
+#ifdef GUDHI_TC_PROFILING
+ t.end();
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+
+ return max_dim;
+ }
+
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_to_off(const Simplicial_complex &complex, std::ostream &os,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL,
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
+ return export_to_off(os, false, p_simpl_to_color_in_red, p_simpl_to_color_in_green, p_simpl_to_color_in_blue,
+ &complex, point_projection);
+ }
+
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_to_off(std::ostream &os, bool color_inconsistencies = false,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL,
+ const Simplicial_complex *p_complex = NULL,
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
+ if (m_points.empty()) return os;
+
+ if (m_ambient_dim < 2) {
+ std::cerr << "Error: export_to_off => ambient dimension should be >= 2.\n";
+ os << "Error: export_to_off => ambient dimension should be >= 2.\n";
+ return os;
+ }
+ if (m_ambient_dim > 3) {
+ std::cerr << "Warning: export_to_off => ambient dimension should be "
+ "<= 3. Only the first 3 coordinates will be exported.\n";
+ }
+
+ if (m_intrinsic_dim < 1 || m_intrinsic_dim > 3) {
+ std::cerr << "Error: export_to_off => intrinsic dimension should be "
+ "between 1 and 3.\n";
+ os << "Error: export_to_off => intrinsic dimension should be "
+ "between 1 and 3.\n";
+ return os;
+ }
+
+ std::stringstream output;
+ std::size_t num_simplices, num_vertices;
+ export_vertices_to_off(output, num_vertices, false, point_projection);
+ if (p_complex) {
+ export_simplices_to_off(*p_complex, output, num_simplices, p_simpl_to_color_in_red, p_simpl_to_color_in_green,
+ p_simpl_to_color_in_blue);
+ } else {
+ export_simplices_to_off(output, num_simplices, color_inconsistencies, p_simpl_to_color_in_red,
+ p_simpl_to_color_in_green, p_simpl_to_color_in_blue);
+ }
+
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ os << "N";
+#endif
+
+ os << "OFF \n"
+ << num_vertices << " " << num_simplices << " "
+ << "0 \n"
+ << output.str();
+
+ return os;
+ }
+
+ private:
+ void refresh_tangential_complex() {
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow << "\nRefreshing TC... " << white;
+#endif
+
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t;
+#endif
+#ifdef GUDHI_USE_TBB
+ // Parallel
+ if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()), Compute_tangent_triangulation(*this));
+ } else {
+#endif // GUDHI_USE_TBB
+ // Sequential
+ for (std::size_t i = 0; i < m_points.size(); ++i) compute_tangent_triangulation(i);
+#ifdef GUDHI_USE_TBB
+ }
+#endif // GUDHI_USE_TBB
+
+#ifdef GUDHI_TC_PROFILING
+ t.end();
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+ }
+
+ // If the list of perturbed points is provided, it is much faster
+ template <typename Point_indices_range>
+ void refresh_tangential_complex(Point_indices_range const &perturbed_points_indices) {
+#if defined(DEBUG_TRACES) || defined(GUDHI_TC_PROFILING)
+ std::cerr << yellow << "\nRefreshing TC... " << white;
+#endif
+
+#ifdef GUDHI_TC_PROFILING
+ Gudhi::Clock t;
+#endif
+
+ // ANN tree containing only the perturbed points
+ Points_ds updated_pts_ds(m_points, perturbed_points_indices);
+
+#ifdef GUDHI_USE_TBB
+ // Parallel
+ if (boost::is_convertible<Concurrency_tag, CGAL::Parallel_tag>::value) {
+ tbb::parallel_for(tbb::blocked_range<size_t>(0, m_points.size()),
+ Refresh_tangent_triangulation(*this, updated_pts_ds));
+ } else {
+#endif // GUDHI_USE_TBB
+ // Sequential
+ for (std::size_t i = 0; i < m_points.size(); ++i) refresh_tangent_triangulation(i, updated_pts_ds);
+#ifdef GUDHI_USE_TBB
+ }
+#endif // GUDHI_USE_TBB
+
+#ifdef GUDHI_TC_PROFILING
+ t.end();
+ std::cerr << yellow << "done in " << t.num_seconds() << " seconds.\n" << white;
+#elif defined(DEBUG_TRACES)
+ std::cerr << yellow << "done.\n" << white;
+#endif
+ }
+
+ void export_inconsistent_stars_to_OFF_files(std::string const &filename_base) const {
+ // For each triangulation
+ for (std::size_t idx = 0; idx < m_points.size(); ++idx) {
+ // We build a SC along the way in case it's inconsistent
+ Simplicial_complex sc;
+ // For each cell
+ bool is_inconsistent = false;
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ // Skip infinite cells
+ if (is_infinite(*it_inc_simplex)) continue;
+
+ Simplex c = *it_inc_simplex;
+ c.insert(idx); // Add the missing index
+
+ sc.add_simplex(c);
+
+ // If we do not already know this star is inconsistent, test it
+ if (!is_inconsistent && !is_simplex_consistent(c)) is_inconsistent = true;
+ }
+
+ if (is_inconsistent) {
+ // Export star to OFF file
+ std::stringstream output_filename;
+ output_filename << filename_base << "_" << idx << ".off";
+ std::ofstream off_stream(output_filename.str().c_str());
+ export_to_off(sc, off_stream);
+ }
+ }
+ }
+
+ class Compare_distance_to_ref_point {
+ public:
+ Compare_distance_to_ref_point(Point const &ref, K const &k) : m_ref(ref), m_k(k) {}
+
+ bool operator()(Point const &p1, Point const &p2) {
+ typename K::Squared_distance_d sqdist = m_k.squared_distance_d_object();
+ return sqdist(p1, m_ref) < sqdist(p2, m_ref);
+ }
+
+ private:
+ Point const &m_ref;
+ K const &m_k;
+ };
+
+#ifdef GUDHI_USE_TBB
+ // Functor for compute_tangential_complex function
+ class Compute_tangent_triangulation {
+ Tangential_complex &m_tc;
+
+ public:
+ // Constructor
+ Compute_tangent_triangulation(Tangential_complex &tc) : m_tc(tc) {}
+
+ // Constructor
+ Compute_tangent_triangulation(const Compute_tangent_triangulation &ctt) : m_tc(ctt.m_tc) {}
+
+ // operator()
+ void operator()(const tbb::blocked_range<size_t> &r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i) m_tc.compute_tangent_triangulation(i);
+ }
+ };
+
+ // Functor for refresh_tangential_complex function
+ class Refresh_tangent_triangulation {
+ Tangential_complex &m_tc;
+ Points_ds const &m_updated_pts_ds;
+
+ public:
+ // Constructor
+ Refresh_tangent_triangulation(Tangential_complex &tc, Points_ds const &updated_pts_ds)
+ : m_tc(tc), m_updated_pts_ds(updated_pts_ds) {}
+
+ // Constructor
+ Refresh_tangent_triangulation(const Refresh_tangent_triangulation &ctt)
+ : m_tc(ctt.m_tc), m_updated_pts_ds(ctt.m_updated_pts_ds) {}
+
+ // operator()
+ void operator()(const tbb::blocked_range<size_t> &r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i) m_tc.refresh_tangent_triangulation(i, m_updated_pts_ds);
+ }
+ };
+#endif // GUDHI_USE_TBB
+
+ bool is_infinite(Simplex const &s) const { return *s.rbegin() == (std::numeric_limits<std::size_t>::max)(); }
+
+ // Output: "triangulation" is a Regular Triangulation containing at least the
+ // star of "center_pt"
+ // Returns the handle of the center vertex
+ Tr_vertex_handle compute_star(std::size_t i, const Point &center_pt, const Tangent_space_basis &tsb,
+ Triangulation &triangulation, bool verbose = false) {
+ int tangent_space_dim = tsb.dimension();
+ const Tr_traits &local_tr_traits = triangulation.geom_traits();
+
+ // Kernel functor & objects
+ typename K::Squared_distance_d k_sqdist = m_k.squared_distance_d_object();
+
+ // Triangulation's traits functor & objects
+ typename Tr_traits::Compute_weight_d point_weight = local_tr_traits.compute_weight_d_object();
+ typename Tr_traits::Power_center_d power_center = local_tr_traits.power_center_d_object();
+
+ //***************************************************
+ // Build a minimal triangulation in the tangent space
+ // (we only need the star of p)
+ //***************************************************
+
+ // Insert p
+ Tr_point proj_wp;
+ if (i == tsb.origin()) {
+ // Insert {(0, 0, 0...), m_weights[i]}
+ proj_wp = local_tr_traits.construct_weighted_point_d_object()(
+ local_tr_traits.construct_point_d_object()(tangent_space_dim, CGAL::ORIGIN), m_weights[i]);
+ } else {
+ const Weighted_point &wp = compute_perturbed_weighted_point(i);
+ proj_wp = project_point_and_compute_weight(wp, tsb, local_tr_traits);
+ }
+
+ Tr_vertex_handle center_vertex = triangulation.insert(proj_wp);
+ center_vertex->data() = i;
+ if (verbose) std::cerr << "* Inserted point #" << i << "\n";
+
+#ifdef GUDHI_TC_VERY_VERBOSE
+ std::size_t num_attempts_to_insert_points = 1;
+ std::size_t num_inserted_points = 1;
+#endif
+ // const int NUM_NEIGHBORS = 150;
+ // KNS_range ins_range = m_points_ds.k_nearest_neighbors(center_pt, NUM_NEIGHBORS);
+ INS_range ins_range = m_points_ds.incremental_nearest_neighbors(center_pt);
+
+ // While building the local triangulation, we keep the radius
+ // of the sphere "star sphere" centered at "center_vertex"
+ // and which contains all the
+ // circumspheres of the star of "center_vertex"
+ // If th the m_max_squared_edge_length is set the maximal radius of the "star sphere"
+ // is at most square root of m_max_squared_edge_length
+ boost::optional<FT> squared_star_sphere_radius_plus_margin = m_max_squared_edge_length;
+
+ // Insert points until we find a point which is outside "star sphere"
+ for (auto nn_it = ins_range.begin(); nn_it != ins_range.end(); ++nn_it) {
+ std::size_t neighbor_point_idx = nn_it->first;
+
+ // ith point = p, which is already inserted
+ if (neighbor_point_idx != i) {
+ // No need to lock the Mutex_for_perturb here since this will not be
+ // called while other threads are perturbing the positions
+ Point neighbor_pt;
+ FT neighbor_weight;
+ compute_perturbed_weighted_point(neighbor_point_idx, neighbor_pt, neighbor_weight);
+ GUDHI_CHECK(!m_max_squared_edge_length ||
+ squared_star_sphere_radius_plus_margin.value() <= m_max_squared_edge_length.value(),
+ std::invalid_argument("Tangential_complex::compute_star - set a bigger value with set_max_squared_edge_length."));
+ if (squared_star_sphere_radius_plus_margin &&
+ k_sqdist(center_pt, neighbor_pt) > squared_star_sphere_radius_plus_margin.value()) {
+ GUDHI_CHECK(triangulation.current_dimension() >= tangent_space_dim,
+ std::invalid_argument("Tangential_complex::compute_star - Dimension of the star is only " + \
+ std::to_string(triangulation.current_dimension())));
+ break;
+ }
+
+ Tr_point proj_pt = project_point_and_compute_weight(neighbor_pt, neighbor_weight, tsb, local_tr_traits);
+
+#ifdef GUDHI_TC_VERY_VERBOSE
+ ++num_attempts_to_insert_points;
+#endif
+
+ Tr_vertex_handle vh = triangulation.insert_if_in_star(proj_pt, center_vertex);
+ // Tr_vertex_handle vh = triangulation.insert(proj_pt);
+ if (vh != Tr_vertex_handle() && vh->data() == (std::numeric_limits<std::size_t>::max)()) {
+#ifdef GUDHI_TC_VERY_VERBOSE
+ ++num_inserted_points;
+#endif
+ if (verbose) std::cerr << "* Inserted point #" << neighbor_point_idx << "\n";
+
+ vh->data() = neighbor_point_idx;
+
+ // Let's recompute squared_star_sphere_radius_plus_margin
+ if (triangulation.current_dimension() >= tangent_space_dim) {
+ squared_star_sphere_radius_plus_margin = boost::none;
+ // Get the incident cells and look for the biggest circumsphere
+ std::vector<Tr_full_cell_handle> incident_cells;
+ triangulation.incident_full_cells(center_vertex, std::back_inserter(incident_cells));
+ for (typename std::vector<Tr_full_cell_handle>::iterator cit = incident_cells.begin();
+ cit != incident_cells.end(); ++cit) {
+ Tr_full_cell_handle cell = *cit;
+ if (triangulation.is_infinite(cell)) {
+ squared_star_sphere_radius_plus_margin = boost::none;
+ break;
+ } else {
+ // Note that this uses the perturbed point since it uses
+ // the points of the local triangulation
+ Tr_point c =
+ power_center(boost::make_transform_iterator(cell->vertices_begin(),
+ vertex_handle_to_point<Tr_point, Tr_vertex_handle>),
+ boost::make_transform_iterator(cell->vertices_end(),
+ vertex_handle_to_point<Tr_point, Tr_vertex_handle>));
+
+ FT sq_power_sphere_diam = 4 * point_weight(c);
+
+ if (!squared_star_sphere_radius_plus_margin ||
+ sq_power_sphere_diam > squared_star_sphere_radius_plus_margin.value()) {
+ squared_star_sphere_radius_plus_margin = sq_power_sphere_diam;
+ }
+ }
+ }
+
+ // Let's add the margin, now
+ // The value depends on whether we perturb weight or position
+ if (squared_star_sphere_radius_plus_margin) {
+ // "2*m_last_max_perturb" because both points can be perturbed
+ squared_star_sphere_radius_plus_margin =
+ CGAL::square(std::sqrt(squared_star_sphere_radius_plus_margin.value()) + 2 * m_last_max_perturb);
+
+ // Reduce the square radius to m_max_squared_edge_length if necessary
+ if (m_max_squared_edge_length && squared_star_sphere_radius_plus_margin.value() > m_max_squared_edge_length.value()) {
+ squared_star_sphere_radius_plus_margin = m_max_squared_edge_length.value();
+ }
+
+ // Save it in `m_squared_star_spheres_radii_incl_margin`
+ m_squared_star_spheres_radii_incl_margin[i] = squared_star_sphere_radius_plus_margin.value();
+ } else {
+ if (m_max_squared_edge_length) {
+ squared_star_sphere_radius_plus_margin = m_max_squared_edge_length.value();
+ m_squared_star_spheres_radii_incl_margin[i] = m_max_squared_edge_length.value();
+ } else {
+ m_squared_star_spheres_radii_incl_margin[i] = FT(-1);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ return center_vertex;
+ }
+
+ void refresh_tangent_triangulation(std::size_t i, Points_ds const &updated_pts_ds, bool verbose = false) {
+ if (verbose) std::cerr << "** Refreshing tangent tri #" << i << " **\n";
+
+ if (m_squared_star_spheres_radii_incl_margin[i] == FT(-1)) return compute_tangent_triangulation(i, verbose);
+
+ Point center_point = compute_perturbed_point(i);
+ // Among updated point, what is the closer from our center point?
+ std::size_t closest_pt_index = updated_pts_ds.k_nearest_neighbors(center_point, 1, false).begin()->first;
+
+ typename K::Construct_weighted_point_d k_constr_wp = m_k.construct_weighted_point_d_object();
+ typename K::Power_distance_d k_power_dist = m_k.power_distance_d_object();
+
+ // Construct a weighted point equivalent to the star sphere
+ Weighted_point star_sphere = k_constr_wp(compute_perturbed_point(i), m_squared_star_spheres_radii_incl_margin[i]);
+ Weighted_point closest_updated_point = compute_perturbed_weighted_point(closest_pt_index);
+
+ // Is the "closest point" inside our star sphere?
+ if (k_power_dist(star_sphere, closest_updated_point) <= FT(0)) compute_tangent_triangulation(i, verbose);
+ }
+
+ void compute_tangent_triangulation(std::size_t i, bool verbose = false) {
+ if (verbose) std::cerr << "** Computing tangent tri #" << i << " **\n";
+ // std::cerr << "***********************************************\n";
+
+ // No need to lock the mutex here since this will not be called while
+ // other threads are perturbing the positions
+ const Point center_pt = compute_perturbed_point(i);
+ Tangent_space_basis &tsb = m_tangent_spaces[i];
+
+ // Estimate the tangent space
+ if (!m_are_tangent_spaces_computed[i]) {
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ tsb = compute_tangent_space(center_pt, i, true /*normalize*/, &m_orth_spaces[i]);
+#else
+ tsb = compute_tangent_space(center_pt, i);
+#endif
+ }
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE)
+ Gudhi::Clock t;
+#endif
+ int tangent_space_dim = tangent_basis_dim(i);
+ Triangulation &local_tr = m_triangulations[i].construct_triangulation(tangent_space_dim);
+
+ m_triangulations[i].center_vertex() = compute_star(i, center_pt, tsb, local_tr, verbose);
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE)
+ t.end();
+ std::cerr << " - triangulation construction: " << t.num_seconds() << " s.\n";
+ t.reset();
+#endif
+
+#ifdef GUDHI_TC_VERY_VERBOSE
+ std::cerr << "Inserted " << num_inserted_points << " points / " << num_attempts_to_insert_points
+ << " attemps to compute the star\n";
+#endif
+
+ update_star(i);
+
+#if defined(GUDHI_TC_PROFILING) && defined(GUDHI_TC_VERY_VERBOSE)
+ t.end();
+ std::cerr << " - update_star: " << t.num_seconds() << " s.\n";
+#endif
+ }
+
+ // Updates m_stars[i] directly from m_triangulations[i]
+
+ void update_star(std::size_t i) {
+ Star &star = m_stars[i];
+ star.clear();
+ Triangulation &local_tr = m_triangulations[i].tr();
+ Tr_vertex_handle center_vertex = m_triangulations[i].center_vertex();
+ int cur_dim_plus_1 = local_tr.current_dimension() + 1;
+
+ std::vector<Tr_full_cell_handle> incident_cells;
+ local_tr.incident_full_cells(center_vertex, std::back_inserter(incident_cells));
+
+ typename std::vector<Tr_full_cell_handle>::const_iterator it_c = incident_cells.begin();
+ typename std::vector<Tr_full_cell_handle>::const_iterator it_c_end = incident_cells.end();
+ // For each cell
+ for (; it_c != it_c_end; ++it_c) {
+ // Will contain all indices except center_vertex
+ Incident_simplex incident_simplex;
+ for (int j = 0; j < cur_dim_plus_1; ++j) {
+ std::size_t index = (*it_c)->vertex(j)->data();
+ if (index != i) incident_simplex.insert(index);
+ }
+ GUDHI_CHECK(incident_simplex.size() == cur_dim_plus_1 - 1,
+ std::logic_error("update_star: wrong size of incident simplex"));
+ star.push_back(incident_simplex);
+ }
+ }
+
+ // Estimates tangent subspaces using PCA
+
+ Tangent_space_basis compute_tangent_space(const Point &p, const std::size_t i, bool normalize_basis = true,
+ Orthogonal_space_basis *p_orth_space_basis = NULL) {
+ unsigned int num_pts_for_pca =
+ (std::min)(static_cast<unsigned int>(std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
+ static_cast<unsigned int>(m_points.size()));
+
+ // Kernel functors
+ typename K::Construct_vector_d constr_vec = m_k.construct_vector_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
+
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ KNS_range kns_range = m_points_ds_for_tse.k_nearest_neighbors(p, num_pts_for_pca, false);
+ const Points &points_for_pca = m_points_for_tse;
+#else
+ KNS_range kns_range = m_points_ds.k_nearest_neighbors(p, num_pts_for_pca, false);
+ const Points &points_for_pca = m_points;
+#endif
+
+ // One row = one point
+ Eigen::MatrixXd mat_points(num_pts_for_pca, m_ambient_dim);
+ auto nn_it = kns_range.begin();
+ for (unsigned int j = 0; j < num_pts_for_pca && nn_it != kns_range.end(); ++j, ++nn_it) {
+ for (int i = 0; i < m_ambient_dim; ++i) {
+ mat_points(j, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i));
+ }
+ }
+ Eigen::MatrixXd centered = mat_points.rowwise() - mat_points.colwise().mean();
+ Eigen::MatrixXd cov = centered.adjoint() * centered;
+ Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> eig(cov);
+
+ Tangent_space_basis tsb(i); // p = compute_perturbed_point(i) here
+
+ // The eigenvectors are sorted in increasing order of their corresponding
+ // eigenvalues
+ for (int j = m_ambient_dim - 1; j >= m_ambient_dim - m_intrinsic_dim; --j) {
+ if (normalize_basis) {
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim);
+ tsb.push_back(normalize_vector(v, m_k));
+ } else {
+ tsb.push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim));
+ }
+ }
+
+ if (p_orth_space_basis) {
+ p_orth_space_basis->set_origin(i);
+ for (int j = m_ambient_dim - m_intrinsic_dim - 1; j >= 0; --j) {
+ if (normalize_basis) {
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim);
+ p_orth_space_basis->push_back(normalize_vector(v, m_k));
+ } else {
+ p_orth_space_basis->push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim));
+ }
+ }
+ }
+
+ m_are_tangent_spaces_computed[i] = true;
+
+ return tsb;
+ }
+
+ // Compute the space tangent to a simplex (p1, p2, ... pn)
+ // TODO(CJ): Improve this?
+ // Basically, it takes all the neighbor points to p1, p2... pn and runs PCA
+ // on it. Note that most points are duplicated.
+
+ Tangent_space_basis compute_tangent_space(const Simplex &s, bool normalize_basis = true) {
+ unsigned int num_pts_for_pca =
+ (std::min)(static_cast<unsigned int>(std::pow(GUDHI_TC_BASE_VALUE_FOR_PCA, m_intrinsic_dim)),
+ static_cast<unsigned int>(m_points.size()));
+
+ // Kernel functors
+ typename K::Construct_vector_d constr_vec = m_k.construct_vector_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
+ typename K::Squared_length_d sqlen = m_k.squared_length_d_object();
+ typename K::Scaled_vector_d scaled_vec = m_k.scaled_vector_d_object();
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_vectors_d diff_vec = m_k.difference_of_vectors_d_object();
+
+ // One row = one point
+ Eigen::MatrixXd mat_points(s.size() * num_pts_for_pca, m_ambient_dim);
+ unsigned int current_row = 0;
+
+ for (Simplex::const_iterator it_index = s.begin(); it_index != s.end(); ++it_index) {
+ const Point &p = m_points[*it_index];
+
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ KNS_range kns_range = m_points_ds_for_tse.k_nearest_neighbors(p, num_pts_for_pca, false);
+ const Points &points_for_pca = m_points_for_tse;
+#else
+ KNS_range kns_range = m_points_ds.k_nearest_neighbors(p, num_pts_for_pca, false);
+ const Points &points_for_pca = m_points;
+#endif
+
+ auto nn_it = kns_range.begin();
+ for (; current_row < num_pts_for_pca && nn_it != kns_range.end(); ++current_row, ++nn_it) {
+ for (int i = 0; i < m_ambient_dim; ++i) {
+ mat_points(current_row, i) = CGAL::to_double(coord(points_for_pca[nn_it->first], i));
+ }
+ }
+ }
+ Eigen::MatrixXd centered = mat_points.rowwise() - mat_points.colwise().mean();
+ Eigen::MatrixXd cov = centered.adjoint() * centered;
+ Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> eig(cov);
+
+ Tangent_space_basis tsb;
+
+ // The eigenvectors are sorted in increasing order of their corresponding
+ // eigenvalues
+ for (int j = m_ambient_dim - 1; j >= m_ambient_dim - m_intrinsic_dim; --j) {
+ if (normalize_basis) {
+ Vector v = constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim);
+ tsb.push_back(normalize_vector(v, m_k));
+ } else {
+ tsb.push_back(constr_vec(m_ambient_dim, eig.eigenvectors().col(j).data(),
+ eig.eigenvectors().col(j).data() + m_ambient_dim));
+ }
+ }
+
+ return tsb;
+ }
+
+ // Returns the dimension of the ith local triangulation
+
+ int tangent_basis_dim(std::size_t i) const { return m_tangent_spaces[i].dimension(); }
+
+ Point compute_perturbed_point(std::size_t pt_idx) const {
+#ifdef GUDHI_TC_PERTURB_POSITION
+ return m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]);
+#else
+ return m_points[pt_idx];
+#endif
+ }
+
+ void compute_perturbed_weighted_point(std::size_t pt_idx, Point &p, FT &w) const {
+#ifdef GUDHI_TC_PERTURB_POSITION
+ p = m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]);
+#else
+ p = m_points[pt_idx];
+#endif
+ w = m_weights[pt_idx];
+ }
+
+ Weighted_point compute_perturbed_weighted_point(std::size_t pt_idx) const {
+ typename K::Construct_weighted_point_d k_constr_wp = m_k.construct_weighted_point_d_object();
+
+ Weighted_point wp = k_constr_wp(
+#ifdef GUDHI_TC_PERTURB_POSITION
+ m_k.translated_point_d_object()(m_points[pt_idx], m_translations[pt_idx]),
+#else
+ m_points[pt_idx],
+#endif
+ m_weights[pt_idx]);
+
+ return wp;
+ }
+
+ Point unproject_point(const Tr_point &p, const Tangent_space_basis &tsb, const Tr_traits &tr_traits) const {
+ typename K::Translated_point_d k_transl = m_k.translated_point_d_object();
+ typename K::Scaled_vector_d k_scaled_vec = m_k.scaled_vector_d_object();
+ typename Tr_traits::Compute_coordinate_d coord = tr_traits.compute_coordinate_d_object();
+
+ Point global_point = compute_perturbed_point(tsb.origin());
+ for (int i = 0; i < m_intrinsic_dim; ++i) global_point = k_transl(global_point, k_scaled_vec(tsb[i], coord(p, i)));
+
+ return global_point;
+ }
+
+ // Project the point in the tangent space
+ // Resulting point coords are expressed in tsb's space
+ Tr_bare_point project_point(const Point &p, const Tangent_space_basis &tsb, const Tr_traits &tr_traits) const {
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_points_d diff_points = m_k.difference_of_points_d_object();
+
+ Vector v = diff_points(p, compute_perturbed_point(tsb.origin()));
+
+ std::vector<FT> coords;
+ // Ambiant-space coords of the projected point
+ coords.reserve(tsb.dimension());
+ for (std::size_t i = 0; i < m_intrinsic_dim; ++i) {
+ // Local coords are given by the scalar product with the vectors of tsb
+ FT coord = scalar_pdct(v, tsb[i]);
+ coords.push_back(coord);
+ }
+
+ return tr_traits.construct_point_d_object()(static_cast<int>(coords.size()), coords.begin(), coords.end());
+ }
+
+ // Project the point in the tangent space
+ // The weight will be the squared distance between p and the projection of p
+ // Resulting point coords are expressed in tsb's space
+
+ Tr_point project_point_and_compute_weight(const Weighted_point &wp, const Tangent_space_basis &tsb,
+ const Tr_traits &tr_traits) const {
+ typename K::Point_drop_weight_d k_drop_w = m_k.point_drop_weight_d_object();
+ typename K::Compute_weight_d k_point_weight = m_k.compute_weight_d_object();
+ return project_point_and_compute_weight(k_drop_w(wp), k_point_weight(wp), tsb, tr_traits);
+ }
+
+ // Same as above, with slightly different parameters
+ Tr_point project_point_and_compute_weight(const Point &p, const FT w, const Tangent_space_basis &tsb,
+ const Tr_traits &tr_traits) const {
+ const int point_dim = m_k.point_dimension_d_object()(p);
+
+ typename K::Construct_point_d constr_pt = m_k.construct_point_d_object();
+ typename K::Scalar_product_d scalar_pdct = m_k.scalar_product_d_object();
+ typename K::Difference_of_points_d diff_points = m_k.difference_of_points_d_object();
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
+ typename K::Construct_cartesian_const_iterator_d ccci = m_k.construct_cartesian_const_iterator_d_object();
+
+ Point origin = compute_perturbed_point(tsb.origin());
+ Vector v = diff_points(p, origin);
+
+ // Same dimension? Then the weight is 0
+ bool same_dim = (point_dim == tsb.dimension());
+
+ std::vector<FT> coords;
+ // Ambiant-space coords of the projected point
+ std::vector<FT> p_proj(ccci(origin), ccci(origin, 0));
+ coords.reserve(tsb.dimension());
+ for (int i = 0; i < tsb.dimension(); ++i) {
+ // Local coords are given by the scalar product with the vectors of tsb
+ FT c = scalar_pdct(v, tsb[i]);
+ coords.push_back(c);
+
+ // p_proj += c * tsb[i]
+ if (!same_dim) {
+ for (int j = 0; j < point_dim; ++j) p_proj[j] += c * coord(tsb[i], j);
+ }
+ }
+
+ // Same dimension? Then the weight is 0
+ FT sq_dist_to_proj_pt = 0;
+ if (!same_dim) {
+ Point projected_pt = constr_pt(point_dim, p_proj.begin(), p_proj.end());
+ sq_dist_to_proj_pt = m_k.squared_distance_d_object()(p, projected_pt);
+ }
+
+ return tr_traits.construct_weighted_point_d_object()(
+ tr_traits.construct_point_d_object()(static_cast<int>(coords.size()), coords.begin(), coords.end()),
+ w - sq_dist_to_proj_pt);
+ }
+
+ // Project all the points in the tangent space
+
+ template <typename Indexed_point_range>
+ std::vector<Tr_point> project_points_and_compute_weights(const Indexed_point_range &point_indices,
+ const Tangent_space_basis &tsb,
+ const Tr_traits &tr_traits) const {
+ std::vector<Tr_point> ret;
+ for (typename Indexed_point_range::const_iterator it = point_indices.begin(), it_end = point_indices.end();
+ it != it_end; ++it) {
+ ret.push_back(project_point_and_compute_weight(compute_perturbed_weighted_point(*it), tsb, tr_traits));
+ }
+ return ret;
+ }
+
+ // A simplex here is a local tri's full cell handle
+
+ bool is_simplex_consistent(Tr_full_cell_handle fch, int cur_dim) const {
+ Simplex c;
+ for (int i = 0; i < cur_dim + 1; ++i) {
+ std::size_t data = fch->vertex(i)->data();
+ c.insert(data);
+ }
+ return is_simplex_consistent(c);
+ }
+
+ // A simplex here is a list of point indices
+ // TODO(CJ): improve it like the other "is_simplex_consistent" below
+
+ bool is_simplex_consistent(Simplex const &simplex) const {
+ // Check if the simplex is in the stars of all its vertices
+ Simplex::const_iterator it_point_idx = simplex.begin();
+ // For each point p of the simplex, we parse the incidents cells of p
+ // and we check if "simplex" is among them
+ for (; it_point_idx != simplex.end(); ++it_point_idx) {
+ std::size_t point_idx = *it_point_idx;
+ // Don't check infinite simplices
+ if (point_idx == (std::numeric_limits<std::size_t>::max)()) continue;
+
+ Star const &star = m_stars[point_idx];
+
+ // What we're looking for is "simplex" \ point_idx
+ Incident_simplex is_to_find = simplex;
+ is_to_find.erase(point_idx);
+
+ // For each cell
+ if (std::find(star.begin(), star.end(), is_to_find) == star.end()) return false;
+ }
+
+ return true;
+ }
+
+ // A simplex here is a list of point indices
+ // "s" contains all the points of the simplex except "center_point"
+ // This function returns the points whose star doesn't contain the simplex
+ // N.B.: the function assumes that the simplex is contained in
+ // star(center_point)
+
+ template <typename OutputIterator> // value_type = std::size_t
+ bool is_simplex_consistent(std::size_t center_point,
+ Incident_simplex const &s, // without "center_point"
+ OutputIterator points_whose_star_does_not_contain_s,
+ bool check_also_in_non_maximal_faces = false) const {
+ Simplex full_simplex = s;
+ full_simplex.insert(center_point);
+
+ // Check if the simplex is in the stars of all its vertices
+ Incident_simplex::const_iterator it_point_idx = s.begin();
+ // For each point p of the simplex, we parse the incidents cells of p
+ // and we check if "simplex" is among them
+ for (; it_point_idx != s.end(); ++it_point_idx) {
+ std::size_t point_idx = *it_point_idx;
+ // Don't check infinite simplices
+ if (point_idx == (std::numeric_limits<std::size_t>::max)()) continue;
+
+ Star const &star = m_stars[point_idx];
+
+ // What we're looking for is full_simplex \ point_idx
+ Incident_simplex is_to_find = full_simplex;
+ is_to_find.erase(point_idx);
+
+ if (check_also_in_non_maximal_faces) {
+ // For each simplex "is" of the star, check if ic_to_simplex is
+ // included in "is"
+ bool found = false;
+ for (Star::const_iterator is = star.begin(), is_end = star.end(); !found && is != is_end; ++is) {
+ if (std::includes(is->begin(), is->end(), is_to_find.begin(), is_to_find.end())) found = true;
+ }
+
+ if (!found) *points_whose_star_does_not_contain_s++ = point_idx;
+ } else {
+ // Does the star contain is_to_find?
+ if (std::find(star.begin(), star.end(), is_to_find) == star.end())
+ *points_whose_star_does_not_contain_s++ = point_idx;
+ }
+ }
+
+ return true;
+ }
+
+ // A simplex here is a list of point indices
+ // It looks for s in star(p).
+ // "s" contains all the points of the simplex except p.
+ bool is_simplex_in_star(std::size_t p, Incident_simplex const &s, bool check_also_in_non_maximal_faces = true) const {
+ Star const &star = m_stars[p];
+
+ if (check_also_in_non_maximal_faces) {
+ // For each simplex "is" of the star, check if ic_to_simplex is
+ // included in "is"
+ bool found = false;
+ for (Star::const_iterator is = star.begin(), is_end = star.end(); !found && is != is_end; ++is) {
+ if (std::includes(is->begin(), is->end(), s.begin(), s.end())) found = true;
+ }
+
+ return found;
+ } else {
+ return !(std::find(star.begin(), star.end(), s) == star.end());
+ }
+ }
+
+#ifdef GUDHI_USE_TBB
+ // Functor for try_to_solve_inconsistencies_in_a_local_triangulation function
+ class Try_to_solve_inconsistencies_in_a_local_triangulation {
+ Tangential_complex &m_tc;
+ double m_max_perturb;
+ tbb::combinable<std::size_t> &m_num_inconsistencies;
+ tbb::combinable<std::vector<std::size_t> > &m_updated_points;
+
+ public:
+ // Constructor
+ Try_to_solve_inconsistencies_in_a_local_triangulation(Tangential_complex &tc, double max_perturb,
+ tbb::combinable<std::size_t> &num_inconsistencies,
+ tbb::combinable<std::vector<std::size_t> > &updated_points)
+ : m_tc(tc),
+ m_max_perturb(max_perturb),
+ m_num_inconsistencies(num_inconsistencies),
+ m_updated_points(updated_points) {}
+
+ // Constructor
+ Try_to_solve_inconsistencies_in_a_local_triangulation(
+ const Try_to_solve_inconsistencies_in_a_local_triangulation &tsilt)
+ : m_tc(tsilt.m_tc),
+ m_max_perturb(tsilt.m_max_perturb),
+ m_num_inconsistencies(tsilt.m_num_inconsistencies),
+ m_updated_points(tsilt.m_updated_points) {}
+
+ // operator()
+ void operator()(const tbb::blocked_range<size_t> &r) const {
+ for (size_t i = r.begin(); i != r.end(); ++i) {
+ m_num_inconsistencies.local() += m_tc.try_to_solve_inconsistencies_in_a_local_triangulation(
+ i, m_max_perturb, std::back_inserter(m_updated_points.local()));
+ }
+ }
+ };
+#endif // GUDHI_USE_TBB
+
+ void perturb(std::size_t point_idx, double max_perturb) {
+ const Tr_traits &local_tr_traits = m_triangulations[point_idx].tr().geom_traits();
+ typename Tr_traits::Compute_coordinate_d coord = local_tr_traits.compute_coordinate_d_object();
+ typename K::Translated_point_d k_transl = m_k.translated_point_d_object();
+ typename K::Construct_vector_d k_constr_vec = m_k.construct_vector_d_object();
+ typename K::Scaled_vector_d k_scaled_vec = m_k.scaled_vector_d_object();
+
+ CGAL::Random_points_in_ball_d<Tr_bare_point> tr_point_in_ball_generator(
+ m_intrinsic_dim, m_random_generator.get_double(0., max_perturb));
+
+ Tr_point local_random_transl =
+ local_tr_traits.construct_weighted_point_d_object()(*tr_point_in_ball_generator++, 0);
+ Translation_for_perturb global_transl = k_constr_vec(m_ambient_dim);
+ const Tangent_space_basis &tsb = m_tangent_spaces[point_idx];
+ for (int i = 0; i < m_intrinsic_dim; ++i) {
+ global_transl = k_transl(global_transl, k_scaled_vec(tsb[i], coord(local_random_transl, i)));
+ }
+ // Parallel
+#if defined(GUDHI_USE_TBB)
+ m_p_perturb_mutexes[point_idx].lock();
+ m_translations[point_idx] = global_transl;
+ m_p_perturb_mutexes[point_idx].unlock();
+ // Sequential
+#else
+ m_translations[point_idx] = global_transl;
+#endif
+ }
+
+ // Return true if inconsistencies were found
+ template <typename OutputIt>
+ bool try_to_solve_inconsistencies_in_a_local_triangulation(
+ std::size_t tr_index, double max_perturb, OutputIt perturbed_pts_indices = CGAL::Emptyset_iterator()) {
+ bool is_inconsistent = false;
+
+ Star const &star = m_stars[tr_index];
+
+ // For each incident simplex
+ Star::const_iterator it_inc_simplex = star.begin();
+ Star::const_iterator it_inc_simplex_end = star.end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ const Incident_simplex &incident_simplex = *it_inc_simplex;
+
+ // Don't check infinite cells
+ if (is_infinite(incident_simplex)) continue;
+
+ Simplex c = incident_simplex;
+ c.insert(tr_index); // Add the missing index
+
+ // Perturb the center point
+ if (!is_simplex_consistent(c)) {
+ is_inconsistent = true;
+
+ std::size_t idx = tr_index;
+
+ perturb(tr_index, max_perturb);
+ *perturbed_pts_indices++ = idx;
+
+ // We will try the other cells next time
+ break;
+ }
+ }
+
+ return is_inconsistent;
+ }
+
+ // 1st line: number of points
+ // Then one point per line
+ std::ostream &export_point_set(std::ostream &os, bool use_perturbed_points = false,
+ const char *coord_separator = " ") const {
+ if (use_perturbed_points) {
+ std::vector<Point> perturbed_points;
+ perturbed_points.reserve(m_points.size());
+ for (std::size_t i = 0; i < m_points.size(); ++i) perturbed_points.push_back(compute_perturbed_point(i));
+
+ return export_point_set(m_k, perturbed_points, os, coord_separator);
+ } else {
+ return export_point_set(m_k, m_points, os, coord_separator);
+ }
+ }
+
+ template <typename ProjectionFunctor = CGAL::Identity<Point> >
+ std::ostream &export_vertices_to_off(std::ostream &os, std::size_t &num_vertices, bool use_perturbed_points = false,
+ ProjectionFunctor const &point_projection = ProjectionFunctor()) const {
+ if (m_points.empty()) {
+ num_vertices = 0;
+ return os;
+ }
+
+ // If m_intrinsic_dim = 1, we output each point two times
+ // to be able to export each segment as a flat triangle with 3 different
+ // indices (otherwise, Meshlab detects degenerated simplices)
+ const int N = (m_intrinsic_dim == 1 ? 2 : 1);
+
+ // Kernel functors
+ typename K::Compute_coordinate_d coord = m_k.compute_coordinate_d_object();
+
+#ifdef GUDHI_TC_EXPORT_ALL_COORDS_IN_OFF
+ int num_coords = m_ambient_dim;
+#else
+ int num_coords = (std::min)(m_ambient_dim, 3);
+#endif
+
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ OS_container::const_iterator it_os = m_orth_spaces.begin();
+#endif
+ typename Points::const_iterator it_p = m_points.begin();
+ typename Points::const_iterator it_p_end = m_points.end();
+ // For each point p
+ for (std::size_t i = 0; it_p != it_p_end; ++it_p, ++i) {
+ Point p = point_projection(use_perturbed_points ? compute_perturbed_point(i) : *it_p);
+ for (int ii = 0; ii < N; ++ii) {
+ int j = 0;
+ for (; j < num_coords; ++j) os << CGAL::to_double(coord(p, j)) << " ";
+ if (j == 2) os << "0";
+
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ for (j = 0; j < num_coords; ++j) os << " " << CGAL::to_double(coord(*it_os->begin(), j));
+#endif
+ os << "\n";
+ }
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ ++it_os;
+#endif
+ }
+
+ num_vertices = N * m_points.size();
+ return os;
+ }
+
+ std::ostream &export_simplices_to_off(std::ostream &os, std::size_t &num_OFF_simplices,
+ bool color_inconsistencies = false,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL) const {
+ // If m_intrinsic_dim = 1, each point is output two times
+ // (see export_vertices_to_off)
+ num_OFF_simplices = 0;
+ std::size_t num_maximal_simplices = 0;
+ std::size_t num_inconsistent_maximal_simplices = 0;
+ std::size_t num_inconsistent_stars = 0;
+ typename Tr_container::const_iterator it_tr = m_triangulations.begin();
+ typename Tr_container::const_iterator it_tr_end = m_triangulations.end();
+ // For each triangulation
+ for (std::size_t idx = 0; it_tr != it_tr_end; ++it_tr, ++idx) {
+ bool is_star_inconsistent = false;
+
+ Triangulation const &tr = it_tr->tr();
+
+ if (tr.current_dimension() < m_intrinsic_dim) continue;
+
+ // Color for this star
+ std::stringstream color;
+ // color << rand()%256 << " " << 100+rand()%156 << " " << 100+rand()%156;
+ color << 128 << " " << 128 << " " << 128;
+
+ // Gather the triangles here, with an int telling its color
+ typedef std::vector<std::pair<Simplex, int> > Star_using_triangles;
+ Star_using_triangles star_using_triangles;
+
+ // For each cell of the star
+ Star::const_iterator it_inc_simplex = m_stars[idx].begin();
+ Star::const_iterator it_inc_simplex_end = m_stars[idx].end();
+ for (; it_inc_simplex != it_inc_simplex_end; ++it_inc_simplex) {
+ Simplex c = *it_inc_simplex;
+ c.insert(idx);
+ std::size_t num_vertices = c.size();
+ ++num_maximal_simplices;
+
+ int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue
+ if (color_inconsistencies && !is_simplex_consistent(c)) {
+ ++num_inconsistent_maximal_simplices;
+ color_simplex = 0;
+ is_star_inconsistent = true;
+ } else {
+ if (p_simpl_to_color_in_red && std::find(p_simpl_to_color_in_red->begin(), p_simpl_to_color_in_red->end(),
+ c) != p_simpl_to_color_in_red->end()) {
+ color_simplex = 1;
+ } else if (p_simpl_to_color_in_green &&
+ std::find(p_simpl_to_color_in_green->begin(), p_simpl_to_color_in_green->end(), c) !=
+ p_simpl_to_color_in_green->end()) {
+ color_simplex = 2;
+ } else if (p_simpl_to_color_in_blue &&
+ std::find(p_simpl_to_color_in_blue->begin(), p_simpl_to_color_in_blue->end(), c) !=
+ p_simpl_to_color_in_blue->end()) {
+ color_simplex = 3;
+ }
+ }
+
+ // If m_intrinsic_dim = 1, each point is output two times,
+ // so we need to multiply each index by 2
+ // And if only 2 vertices, add a third one (each vertex is duplicated in
+ // the file when m_intrinsic dim = 2)
+ if (m_intrinsic_dim == 1) {
+ Simplex tmp_c;
+ Simplex::iterator it = c.begin();
+ for (; it != c.end(); ++it) tmp_c.insert(*it * 2);
+ if (num_vertices == 2) tmp_c.insert(*tmp_c.rbegin() + 1);
+
+ c = tmp_c;
+ }
+
+ if (num_vertices <= 3) {
+ star_using_triangles.push_back(std::make_pair(c, color_simplex));
+ } else {
+ // num_vertices >= 4: decompose the simplex in triangles
+ std::vector<bool> booleans(num_vertices, false);
+ std::fill(booleans.begin() + num_vertices - 3, booleans.end(), true);
+ do {
+ Simplex triangle;
+ Simplex::iterator it = c.begin();
+ for (int i = 0; it != c.end(); ++i, ++it) {
+ if (booleans[i]) triangle.insert(*it);
+ }
+ star_using_triangles.push_back(std::make_pair(triangle, color_simplex));
+ } while (std::next_permutation(booleans.begin(), booleans.end()));
+ }
+ }
+
+ // For each cell
+ Star_using_triangles::const_iterator it_simplex = star_using_triangles.begin();
+ Star_using_triangles::const_iterator it_simplex_end = star_using_triangles.end();
+ for (; it_simplex != it_simplex_end; ++it_simplex) {
+ const Simplex &c = it_simplex->first;
+
+ // Don't export infinite cells
+ if (is_infinite(c)) continue;
+
+ int color_simplex = it_simplex->second;
+
+ std::stringstream sstr_c;
+
+ Simplex::const_iterator it_point_idx = c.begin();
+ for (; it_point_idx != c.end(); ++it_point_idx) {
+ sstr_c << *it_point_idx << " ";
+ }
+
+ os << 3 << " " << sstr_c.str();
+ if (color_inconsistencies || p_simpl_to_color_in_red || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
+ switch (color_simplex) {
+ case 0:
+ os << " 255 255 0";
+ break;
+ case 1:
+ os << " 255 0 0";
+ break;
+ case 2:
+ os << " 0 255 0";
+ break;
+ case 3:
+ os << " 0 0 255";
+ break;
+ default:
+ os << " " << color.str();
+ break;
+ }
+ }
+ ++num_OFF_simplices;
+ os << "\n";
+ }
+ if (is_star_inconsistent) ++num_inconsistent_stars;
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr << "\n==========================================================\n"
+ << "Export from list of stars to OFF:\n"
+ << " * Number of vertices: " << m_points.size() << "\n"
+ << " * Total number of maximal simplices: " << num_maximal_simplices << "\n";
+ if (color_inconsistencies) {
+ std::cerr << " * Number of inconsistent stars: " << num_inconsistent_stars << " ("
+ << (m_points.size() > 0 ? 100. * num_inconsistent_stars / m_points.size() : 0.) << "%)\n"
+ << " * Number of inconsistent maximal simplices: " << num_inconsistent_maximal_simplices << " ("
+ << (num_maximal_simplices > 0 ? 100. * num_inconsistent_maximal_simplices / num_maximal_simplices : 0.)
+ << "%)\n";
+ }
+ std::cerr << "==========================================================\n";
+#endif
+
+ return os;
+ }
+
+ public:
+ std::ostream &export_simplices_to_off(const Simplicial_complex &complex, std::ostream &os,
+ std::size_t &num_OFF_simplices,
+ Simplex_set const *p_simpl_to_color_in_red = NULL,
+ Simplex_set const *p_simpl_to_color_in_green = NULL,
+ Simplex_set const *p_simpl_to_color_in_blue = NULL) const {
+ typedef Simplicial_complex::Simplex Simplex;
+ typedef Simplicial_complex::Simplex_set Simplex_set;
+
+ // If m_intrinsic_dim = 1, each point is output two times
+ // (see export_vertices_to_off)
+ num_OFF_simplices = 0;
+ std::size_t num_maximal_simplices = 0;
+
+ typename Simplex_set::const_iterator it_s = complex.simplex_range().begin();
+ typename Simplex_set::const_iterator it_s_end = complex.simplex_range().end();
+ // For each simplex
+ for (; it_s != it_s_end; ++it_s) {
+ Simplex c = *it_s;
+ ++num_maximal_simplices;
+
+ int color_simplex = -1; // -1=no color, 0=yellow, 1=red, 2=green, 3=blue
+ if (p_simpl_to_color_in_red && std::find(p_simpl_to_color_in_red->begin(), p_simpl_to_color_in_red->end(), c) !=
+ p_simpl_to_color_in_red->end()) {
+ color_simplex = 1;
+ } else if (p_simpl_to_color_in_green &&
+ std::find(p_simpl_to_color_in_green->begin(), p_simpl_to_color_in_green->end(), c) !=
+ p_simpl_to_color_in_green->end()) {
+ color_simplex = 2;
+ } else if (p_simpl_to_color_in_blue &&
+ std::find(p_simpl_to_color_in_blue->begin(), p_simpl_to_color_in_blue->end(), c) !=
+ p_simpl_to_color_in_blue->end()) {
+ color_simplex = 3;
+ }
+
+ // Gather the triangles here
+ typedef std::vector<Simplex> Triangles;
+ Triangles triangles;
+
+ int num_vertices = static_cast<int>(c.size());
+ // Do not export smaller dimension simplices
+ if (num_vertices < m_intrinsic_dim + 1) continue;
+
+ // If m_intrinsic_dim = 1, each point is output two times,
+ // so we need to multiply each index by 2
+ // And if only 2 vertices, add a third one (each vertex is duplicated in
+ // the file when m_intrinsic dim = 2)
+ if (m_intrinsic_dim == 1) {
+ Simplex tmp_c;
+ Simplex::iterator it = c.begin();
+ for (; it != c.end(); ++it) tmp_c.insert(*it * 2);
+ if (num_vertices == 2) tmp_c.insert(*tmp_c.rbegin() + 1);
+
+ c = tmp_c;
+ }
+
+ if (num_vertices <= 3) {
+ triangles.push_back(c);
+ } else {
+ // num_vertices >= 4: decompose the simplex in triangles
+ std::vector<bool> booleans(num_vertices, false);
+ std::fill(booleans.begin() + num_vertices - 3, booleans.end(), true);
+ do {
+ Simplex triangle;
+ Simplex::iterator it = c.begin();
+ for (int i = 0; it != c.end(); ++i, ++it) {
+ if (booleans[i]) triangle.insert(*it);
+ }
+ triangles.push_back(triangle);
+ } while (std::next_permutation(booleans.begin(), booleans.end()));
+ }
+
+ // For each cell
+ Triangles::const_iterator it_tri = triangles.begin();
+ Triangles::const_iterator it_tri_end = triangles.end();
+ for (; it_tri != it_tri_end; ++it_tri) {
+ // Don't export infinite cells
+ if (is_infinite(*it_tri)) continue;
+
+ os << 3 << " ";
+ Simplex::const_iterator it_point_idx = it_tri->begin();
+ for (; it_point_idx != it_tri->end(); ++it_point_idx) {
+ os << *it_point_idx << " ";
+ }
+
+ if (p_simpl_to_color_in_red || p_simpl_to_color_in_green || p_simpl_to_color_in_blue) {
+ switch (color_simplex) {
+ case 0:
+ os << " 255 255 0";
+ break;
+ case 1:
+ os << " 255 0 0";
+ break;
+ case 2:
+ os << " 0 255 0";
+ break;
+ case 3:
+ os << " 0 0 255";
+ break;
+ default:
+ os << " 128 128 128";
+ break;
+ }
+ }
+
+ ++num_OFF_simplices;
+ os << "\n";
+ }
+ }
+
+#ifdef DEBUG_TRACES
+ std::cerr << "\n==========================================================\n"
+ << "Export from complex to OFF:\n"
+ << " * Number of vertices: " << m_points.size() << "\n"
+ << " * Total number of maximal simplices: " << num_maximal_simplices << "\n"
+ << "==========================================================\n";
+#endif
+
+ return os;
+ }
+
+ /** \brief Sets the maximal possible squared edge length for the edges in the triangulations.
+ *
+ * @param[in] max_squared_edge_length Maximal possible squared edge length.
+ *
+ * If the maximal edge length value is too low `Tangential_complex::compute_tangential_complex` will throw an
+ * exception in debug mode.
+ */
+ void set_max_squared_edge_length(FT max_squared_edge_length) { m_max_squared_edge_length = max_squared_edge_length; }
+
+ private:
+ const K m_k;
+ const int m_intrinsic_dim;
+ const int m_ambient_dim;
+
+ Points m_points;
+ Weights m_weights;
+#ifdef GUDHI_TC_PERTURB_POSITION
+ Translations_for_perturb m_translations;
+#if defined(GUDHI_USE_TBB)
+ Mutex_for_perturb *m_p_perturb_mutexes;
+#endif
+#endif
+
+ Points_ds m_points_ds;
+ double m_last_max_perturb;
+ std::vector<bool> m_are_tangent_spaces_computed;
+ TS_container m_tangent_spaces;
+#ifdef GUDHI_TC_EXPORT_NORMALS
+ OS_container m_orth_spaces;
+#endif
+ Tr_container m_triangulations; // Contains the triangulations
+ // and their center vertex
+ Stars_container m_stars;
+ std::vector<FT> m_squared_star_spheres_radii_incl_margin;
+ boost::optional<FT> m_max_squared_edge_length;
+
+#ifdef GUDHI_TC_USE_ANOTHER_POINT_SET_FOR_TANGENT_SPACE_ESTIM
+ Points m_points_for_tse;
+ Points_ds m_points_ds_for_tse;
+#endif
+
+ mutable CGAL::Random m_random_generator;
+}; // /class Tangential_complex
+
+} // end namespace tangential_complex
+} // end namespace Gudhi
+
+#endif // TANGENTIAL_COMPLEX_H_
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h
new file mode 100644
index 00000000..4881bdd5
--- /dev/null
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex/Simplicial_complex.h
@@ -0,0 +1,527 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_
+#define TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_
+
+#include <gudhi/Tangential_complex/config.h>
+#include <gudhi/Tangential_complex/utilities.h>
+#include <gudhi/Debug_utils.h>
+#include <gudhi/console_color.h>
+
+#include <CGAL/iterator.h>
+
+// For is_pure_pseudomanifold
+#include <boost/graph/graph_traits.hpp>
+#include <boost/graph/adjacency_list.hpp>
+#include <boost/graph/connected_components.hpp>
+#include <boost/container/flat_set.hpp>
+
+#include <algorithm>
+#include <string>
+#include <fstream>
+#include <map> // for map<>
+#include <vector> // for vector<>
+#include <set> // for set<>
+
+namespace Gudhi {
+namespace tangential_complex {
+namespace internal {
+
+class Simplicial_complex {
+ public:
+ typedef boost::container::flat_set<std::size_t> Simplex;
+ typedef std::set<Simplex> Simplex_set;
+
+ // If perform_checks = true, the function:
+ // - won't insert the simplex if it is already in a higher dim simplex
+ // - will erase any lower-dim simplices that are faces of the new simplex
+ // Returns true if the simplex was added
+ bool add_simplex(
+ const Simplex &s, bool perform_checks = true) {
+ if (perform_checks) {
+ unsigned int num_pts = static_cast<int> (s.size());
+ std::vector<Complex::iterator> to_erase;
+ bool check_higher_dim_simpl = true;
+ for (Complex::iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ // Check if the simplex is not already in a higher dim simplex
+ if (check_higher_dim_simpl
+ && it_simplex->size() > num_pts
+ && std::includes(it_simplex->begin(), it_simplex->end(),
+ s.begin(), s.end())) {
+ // No need to insert it, then
+ return false;
+ }
+ // Check if the simplex includes some lower-dim simplices
+ if (it_simplex->size() < num_pts
+ && std::includes(s.begin(), s.end(),
+ it_simplex->begin(), it_simplex->end())) {
+ to_erase.push_back(it_simplex);
+ // We don't need to check higher-sim simplices any more
+ check_higher_dim_simpl = false;
+ }
+ }
+ for (std::vector<Complex::iterator>::const_iterator it = to_erase.begin();
+ it != to_erase.end(); ++it) {
+ m_complex.erase(*it);
+ }
+ }
+ return m_complex.insert(s).second;
+ }
+
+ const Simplex_set &simplex_range() const {
+ return m_complex;
+ }
+
+ bool empty() {
+ return m_complex.empty();
+ }
+
+ void clear() {
+ m_complex.clear();
+ }
+
+ template <typename Test, typename Output_it>
+ void get_simplices_matching_test(Test test, Output_it out) {
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (test(*it_simplex))
+ *out++ = *it_simplex;
+ }
+ }
+
+ // When a simplex S has only one co-face C, we can remove S and C
+ // without changing the topology
+
+ void collapse(int max_simplex_dim, bool quiet = false) {
+#ifdef DEBUG_TRACES
+ if (!quiet)
+ std::cerr << "Collapsing... ";
+#endif
+ // We note k = max_simplex_dim - 1
+ int k = max_simplex_dim - 1;
+
+ typedef Complex::iterator Simplex_iterator;
+ typedef std::vector<Simplex_iterator> Simplex_iterator_list;
+ typedef std::map<Simplex, Simplex_iterator_list> Cofaces_map;
+
+ std::size_t num_collapsed_maximal_simplices = 0;
+ do {
+ num_collapsed_maximal_simplices = 0;
+ // Create a map associating each non-maximal k-faces to the list of its
+ // maximal cofaces
+ Cofaces_map cofaces_map;
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (static_cast<int> (it_simplex->size()) > k + 1) {
+ std::vector<Simplex> k_faces;
+ // Get the k-faces composing the simplex
+ combinations(*it_simplex, k + 1, std::back_inserter(k_faces));
+ for (const auto &comb : k_faces)
+ cofaces_map[comb].push_back(it_simplex);
+ }
+ }
+
+ // For each non-maximal k-face F, if F has only one maximal coface Cf:
+ // - Look for the other k-faces F2, F3... of Cf in the map and:
+ // * if the list contains only Cf, clear the list (we don't remove the
+ // list since it creates troubles with the iterators) and add the F2,
+ // F3... to the complex
+ // * otherwise, remove Cf from the associated list
+ // - Remove Cf from the complex
+ for (Cofaces_map::const_iterator it_map_elt = cofaces_map.begin(),
+ it_map_end = cofaces_map.end();
+ it_map_elt != it_map_end;
+ ++it_map_elt) {
+ if (it_map_elt->second.size() == 1) {
+ std::vector<Simplex> k_faces;
+ const Simplex_iterator_list::value_type &it_Cf =
+ *it_map_elt->second.begin();
+ GUDHI_CHECK(it_Cf->size() == max_simplex_dim + 1,
+ std::logic_error("Wrong dimension"));
+ // Get the k-faces composing the simplex
+ combinations(*it_Cf, k + 1, std::back_inserter(k_faces));
+ for (const auto &f2 : k_faces) {
+ // Skip F
+ if (f2 != it_map_elt->first) {
+ Cofaces_map::iterator it_comb_in_map = cofaces_map.find(f2);
+ if (it_comb_in_map->second.size() == 1) {
+ it_comb_in_map->second.clear();
+ m_complex.insert(f2);
+ } else { // it_comb_in_map->second.size() > 1
+ Simplex_iterator_list::iterator it = std::find(it_comb_in_map->second.begin(),
+ it_comb_in_map->second.end(),
+ it_Cf);
+ GUDHI_CHECK(it != it_comb_in_map->second.end(),
+ std::logic_error("Error: it == it_comb_in_map->second.end()"));
+ it_comb_in_map->second.erase(it);
+ }
+ }
+ }
+ m_complex.erase(it_Cf);
+ ++num_collapsed_maximal_simplices;
+ }
+ }
+ // Repeat until no maximal simplex got removed
+ } while (num_collapsed_maximal_simplices > 0);
+
+ // Collapse the lower dimension simplices
+ if (k > 0)
+ collapse(max_simplex_dim - 1, true);
+
+#ifdef DEBUG_TRACES
+ if (!quiet)
+ std::cerr << "done.\n";
+#endif
+ }
+
+ void display_stats() const {
+ std::cerr << yellow << "Complex stats:\n" << white;
+
+ if (m_complex.empty()) {
+ std::cerr << " * No simplices.\n";
+ } else {
+ // Number of simplex for each dimension
+ std::map<int, std::size_t> simplex_stats;
+
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ ++simplex_stats[static_cast<int> (it_simplex->size()) - 1];
+ }
+
+ for (std::map<int, std::size_t>::const_iterator it_map = simplex_stats.begin();
+ it_map != simplex_stats.end(); ++it_map) {
+ std::cerr << " * " << it_map->first << "-simplices: "
+ << it_map->second << "\n";
+ }
+ }
+ }
+
+ // verbose_level = 0, 1 or 2
+ bool is_pure_pseudomanifold__do_not_check_if_stars_are_connected(int simplex_dim,
+ bool allow_borders = false,
+ bool exit_at_the_first_problem = false,
+ int verbose_level = 0,
+ std::size_t *p_num_wrong_dim_simplices = NULL,
+ std::size_t *p_num_wrong_number_of_cofaces = NULL) const {
+ typedef Simplex K_1_face;
+ typedef std::map<K_1_face, std::size_t> Cofaces_map;
+
+ std::size_t num_wrong_dim_simplices = 0;
+ std::size_t num_wrong_number_of_cofaces = 0;
+
+ // Counts the number of cofaces of each K_1_face
+
+ // Create a map associating each non-maximal k-faces to the list of its
+ // maximal cofaces
+ Cofaces_map cofaces_map;
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (static_cast<int> (it_simplex->size()) != simplex_dim + 1) {
+ if (verbose_level >= 2)
+ std::cerr << "Found a simplex with dim = "
+ << it_simplex->size() - 1 << "\n";
+ ++num_wrong_dim_simplices;
+ } else {
+ std::vector<K_1_face> k_1_faces;
+ // Get the facets composing the simplex
+ combinations(
+ *it_simplex, simplex_dim, std::back_inserter(k_1_faces));
+ for (const auto &k_1_face : k_1_faces) {
+ ++cofaces_map[k_1_face];
+ }
+ }
+ }
+
+ for (Cofaces_map::const_iterator it_map_elt = cofaces_map.begin(),
+ it_map_end = cofaces_map.end();
+ it_map_elt != it_map_end;
+ ++it_map_elt) {
+ if (it_map_elt->second != 2
+ && (!allow_borders || it_map_elt->second != 1)) {
+ if (verbose_level >= 2)
+ std::cerr << "Found a k-1-face with "
+ << it_map_elt->second << " cofaces\n";
+
+ if (exit_at_the_first_problem)
+ return false;
+ else
+ ++num_wrong_number_of_cofaces;
+ }
+ }
+
+ bool ret = num_wrong_dim_simplices == 0 && num_wrong_number_of_cofaces == 0;
+
+ if (verbose_level >= 1) {
+ std::cerr << "Pure pseudo-manifold: ";
+ if (ret) {
+ std::cerr << green << "YES" << white << "\n";
+ } else {
+ std::cerr << red << "NO" << white << "\n"
+ << " * Number of wrong dimension simplices: "
+ << num_wrong_dim_simplices << "\n"
+ << " * Number of wrong number of cofaces: "
+ << num_wrong_number_of_cofaces << "\n";
+ }
+ }
+
+ if (p_num_wrong_dim_simplices)
+ *p_num_wrong_dim_simplices = num_wrong_dim_simplices;
+ if (p_num_wrong_number_of_cofaces)
+ *p_num_wrong_number_of_cofaces = num_wrong_number_of_cofaces;
+
+ return ret;
+ }
+
+ template <int K>
+ std::size_t num_K_simplices() const {
+ Simplex_set k_simplices;
+
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (it_simplex->size() == K + 1) {
+ k_simplices.insert(*it_simplex);
+ } else if (it_simplex->size() > K + 1) {
+ // Get the k-faces composing the simplex
+ combinations(
+ *it_simplex, K + 1, std::inserter(k_simplices, k_simplices.begin()));
+ }
+ }
+
+ return k_simplices.size();
+ }
+
+ std::ptrdiff_t euler_characteristic(bool verbose = false) const {
+ if (verbose)
+ std::cerr << "\nComputing Euler characteristic of the complex...\n";
+
+ std::size_t num_vertices = num_K_simplices<0>();
+ std::size_t num_edges = num_K_simplices<1>();
+ std::size_t num_triangles = num_K_simplices<2>();
+
+ std::ptrdiff_t ec =
+ (std::ptrdiff_t) num_vertices
+ - (std::ptrdiff_t) num_edges
+ + (std::ptrdiff_t) num_triangles;
+
+ if (verbose)
+ std::cerr << "Euler characteristic: V - E + F = "
+ << num_vertices << " - " << num_edges << " + " << num_triangles << " = "
+ << blue
+ << ec
+ << white << "\n";
+
+ return ec;
+ }
+
+ // TODO(CJ): ADD COMMENTS
+
+ bool is_pure_pseudomanifold(
+ int simplex_dim,
+ std::size_t num_vertices,
+ bool allow_borders = false,
+ bool exit_at_the_first_problem = false,
+ int verbose_level = 0,
+ std::size_t *p_num_wrong_dim_simplices = NULL,
+ std::size_t *p_num_wrong_number_of_cofaces = NULL,
+ std::size_t *p_num_unconnected_stars = NULL,
+ Simplex_set *p_wrong_dim_simplices = NULL,
+ Simplex_set *p_wrong_number_of_cofaces_simplices = NULL,
+ Simplex_set *p_unconnected_stars_simplices = NULL) const {
+ // If simplex_dim == 1, we do not need to check if stars are connected
+ if (simplex_dim == 1) {
+ if (p_num_unconnected_stars)
+ *p_num_unconnected_stars = 0;
+ return is_pure_pseudomanifold__do_not_check_if_stars_are_connected(simplex_dim,
+ allow_borders,
+ exit_at_the_first_problem,
+ verbose_level,
+ p_num_wrong_dim_simplices,
+ p_num_wrong_number_of_cofaces);
+ }
+ // Associates each vertex (= the index in the vector)
+ // to its star (list of simplices)
+ typedef std::vector<std::vector<Complex::const_iterator> > Stars;
+ std::size_t num_wrong_dim_simplices = 0;
+ std::size_t num_wrong_number_of_cofaces = 0;
+ std::size_t num_unconnected_stars = 0;
+
+ // Fills a Stars data structure
+ Stars stars;
+ stars.resize(num_vertices);
+ for (Complex::const_iterator it_simplex = m_complex.begin(),
+ it_simplex_end = m_complex.end();
+ it_simplex != it_simplex_end;
+ ++it_simplex) {
+ if (static_cast<int> (it_simplex->size()) != simplex_dim + 1) {
+ if (verbose_level >= 2)
+ std::cerr << "Found a simplex with dim = "
+ << it_simplex->size() - 1 << "\n";
+ ++num_wrong_dim_simplices;
+ if (p_wrong_dim_simplices)
+ p_wrong_dim_simplices->insert(*it_simplex);
+ } else {
+ for (Simplex::const_iterator it_point_idx = it_simplex->begin();
+ it_point_idx != it_simplex->end();
+ ++it_point_idx) {
+ stars[*it_point_idx].push_back(it_simplex);
+ }
+ }
+ }
+
+ // Now, for each star, we have a vector of its d-simplices
+ // i.e. one index for each d-simplex
+ // Boost Graph only deals with indexes, so we also need indexes for the
+ // (d-1)-simplices
+ std::size_t center_vertex_index = 0;
+ for (Stars::const_iterator it_star = stars.begin();
+ it_star != stars.end();
+ ++it_star, ++center_vertex_index) {
+ typedef std::map<Simplex, std::vector<std::size_t> >
+ Dm1_faces_to_adj_D_faces;
+ Dm1_faces_to_adj_D_faces dm1_faces_to_adj_d_faces;
+
+ for (std::size_t i_dsimpl = 0; i_dsimpl < it_star->size(); ++i_dsimpl) {
+ Simplex dm1_simpl_of_link = *((*it_star)[i_dsimpl]);
+ dm1_simpl_of_link.erase(center_vertex_index);
+ // Copy it to a vector so that we can use operator[] on it
+ std::vector<std::size_t> dm1_simpl_of_link_vec(
+ dm1_simpl_of_link.begin(), dm1_simpl_of_link.end());
+
+ CGAL::Combination_enumerator<int> dm2_simplices(
+ simplex_dim - 1, 0, simplex_dim);
+ for (; !dm2_simplices.finished(); ++dm2_simplices) {
+ Simplex dm2_simpl;
+ for (int j = 0; j < simplex_dim - 1; ++j)
+ dm2_simpl.insert(dm1_simpl_of_link_vec[dm2_simplices[j]]);
+ dm1_faces_to_adj_d_faces[dm2_simpl].push_back(i_dsimpl);
+ }
+ }
+
+ Adj_graph adj_graph;
+ std::vector<Graph_vertex> d_faces_descriptors;
+ d_faces_descriptors.resize(it_star->size());
+ for (std::size_t j = 0; j < it_star->size(); ++j)
+ d_faces_descriptors[j] = boost::add_vertex(adj_graph);
+
+ Dm1_faces_to_adj_D_faces::const_iterator dm1_to_d_it =
+ dm1_faces_to_adj_d_faces.begin();
+ Dm1_faces_to_adj_D_faces::const_iterator dm1_to_d_it_end =
+ dm1_faces_to_adj_d_faces.end();
+ for (std::size_t i_km1_face = 0;
+ dm1_to_d_it != dm1_to_d_it_end;
+ ++dm1_to_d_it, ++i_km1_face) {
+ Graph_vertex km1_gv = boost::add_vertex(adj_graph);
+
+ for (std::vector<std::size_t>::const_iterator kface_it =
+ dm1_to_d_it->second.begin();
+ kface_it != dm1_to_d_it->second.end();
+ ++kface_it) {
+ boost::add_edge(km1_gv, *kface_it, adj_graph);
+ }
+
+ if (dm1_to_d_it->second.size() != 2
+ && (!allow_borders || dm1_to_d_it->second.size() != 1)) {
+ ++num_wrong_number_of_cofaces;
+ if (p_wrong_number_of_cofaces_simplices) {
+ for (auto idx : dm1_to_d_it->second)
+ p_wrong_number_of_cofaces_simplices->insert(*((*it_star)[idx]));
+ }
+ }
+ }
+
+ // What is left is to check the connexity
+ bool is_connected = true;
+ if (boost::num_vertices(adj_graph) > 0) {
+ std::vector<int> components(boost::num_vertices(adj_graph));
+ is_connected =
+ (boost::connected_components(adj_graph, &components[0]) == 1);
+ }
+
+ if (!is_connected) {
+ if (verbose_level >= 2)
+ std::cerr << "Error: star #" << center_vertex_index
+ << " is not connected\n";
+ ++num_unconnected_stars;
+ if (p_unconnected_stars_simplices) {
+ for (std::vector<Complex::const_iterator>::const_iterator
+ it_simpl = it_star->begin(),
+ it_simpl_end = it_star->end();
+ it_simpl != it_simpl_end;
+ ++it_simpl) {
+ p_unconnected_stars_simplices->insert(**it_simpl);
+ }
+ }
+ }
+ }
+
+ // Each one has been counted several times ("simplex_dim" times)
+ num_wrong_number_of_cofaces /= simplex_dim;
+
+ bool ret =
+ num_wrong_dim_simplices == 0
+ && num_wrong_number_of_cofaces == 0
+ && num_unconnected_stars == 0;
+
+ if (verbose_level >= 1) {
+ std::cerr << "Pure pseudo-manifold: ";
+ if (ret) {
+ std::cerr << green << "YES" << white << "\n";
+ } else {
+ std::cerr << red << "NO" << white << "\n"
+ << " * Number of wrong dimension simplices: "
+ << num_wrong_dim_simplices << "\n"
+ << " * Number of wrong number of cofaces: "
+ << num_wrong_number_of_cofaces << "\n"
+ << " * Number of not-connected stars: "
+ << num_unconnected_stars << "\n";
+ }
+ }
+
+ if (p_num_wrong_dim_simplices)
+ *p_num_wrong_dim_simplices = num_wrong_dim_simplices;
+ if (p_num_wrong_number_of_cofaces)
+ *p_num_wrong_number_of_cofaces = num_wrong_number_of_cofaces;
+ if (p_num_unconnected_stars)
+ *p_num_unconnected_stars = num_unconnected_stars;
+
+ return ret;
+ }
+
+ private:
+ typedef Simplex_set Complex;
+
+ // graph is an adjacency list
+ typedef boost::adjacency_list<boost::vecS, boost::vecS, boost::undirectedS> Adj_graph;
+ // map that gives to a certain simplex its node in graph and its dimension
+ typedef boost::graph_traits<Adj_graph>::vertex_descriptor Graph_vertex;
+ typedef boost::graph_traits<Adj_graph>::edge_descriptor Graph_edge;
+
+ Complex m_complex;
+}; // class Simplicial_complex
+
+} // namespace internal
+} // namespace tangential_complex
+} // namespace Gudhi
+
+#endif // TANGENTIAL_COMPLEX_SIMPLICIAL_COMPLEX_H_
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/config.h b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h
new file mode 100644
index 00000000..352531da
--- /dev/null
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex/config.h
@@ -0,0 +1,31 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef TANGENTIAL_COMPLEX_CONFIG_H_
+#define TANGENTIAL_COMPLEX_CONFIG_H_
+
+#include <cstddef>
+
+// ========================= Debugging & profiling =============================
+// #define GUDHI_TC_PROFILING
+// #define GUDHI_TC_VERY_VERBOSE
+// #define GUDHI_TC_PERFORM_EXTRA_CHECKS
+// #define GUDHI_TC_SHOW_DETAILED_STATS_FOR_INCONSISTENCIES
+
+// ========================= Strategy ==========================================
+#define GUDHI_TC_PERTURB_POSITION
+// #define GUDHI_TC_PERTURB_WEIGHT
+
+// ========================= Parameters ========================================
+
+// PCA will use GUDHI_TC_BASE_VALUE_FOR_PCA^intrinsic_dim points
+const std::size_t GUDHI_TC_BASE_VALUE_FOR_PCA = 5;
+
+#endif // TANGENTIAL_COMPLEX_CONFIG_H_
diff --git a/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h
new file mode 100644
index 00000000..ee6ed9ba
--- /dev/null
+++ b/src/Tangential_complex/include/gudhi/Tangential_complex/utilities.h
@@ -0,0 +1,183 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#ifndef TANGENTIAL_COMPLEX_UTILITIES_H_
+#define TANGENTIAL_COMPLEX_UTILITIES_H_
+
+#include <CGAL/Dimension.h>
+#include <CGAL/Combination_enumerator.h>
+#include <CGAL/IO/Triangulation_off_ostream.h>
+
+#include <boost/container/flat_set.hpp>
+
+#include <Eigen/Core>
+#include <Eigen/Eigen>
+
+#include <set>
+#include <vector>
+#include <array>
+#include <fstream>
+#include <atomic>
+#include <cmath> // for std::sqrt
+
+namespace Gudhi {
+namespace tangential_complex {
+namespace internal {
+
+// Provides copy constructors to std::atomic so that
+// it can be used in a vector
+template <typename T>
+struct Atomic_wrapper
+: public std::atomic<T> {
+ typedef std::atomic<T> Base;
+
+ Atomic_wrapper() { }
+
+ Atomic_wrapper(const T &t) : Base(t) { }
+
+ Atomic_wrapper(const std::atomic<T> &a) : Base(a.load()) { }
+
+ Atomic_wrapper(const Atomic_wrapper &other) : Base(other.load()) { }
+
+ Atomic_wrapper &operator=(const T &other) {
+ Base::store(other);
+ return *this;
+ }
+
+ Atomic_wrapper &operator=(const std::atomic<T> &other) {
+ Base::store(other.load());
+ return *this;
+ }
+
+ Atomic_wrapper &operator=(const Atomic_wrapper &other) {
+ Base::store(other.load());
+ return *this;
+ }
+};
+
+// Modifies v in-place
+template <typename K>
+typename K::Vector_d& normalize_vector(typename K::Vector_d& v,
+ K const& k) {
+ v = k.scaled_vector_d_object()(
+ v, typename K::FT(1) / std::sqrt(k.squared_length_d_object()(v)));
+ return v;
+}
+
+template<typename Kernel>
+struct Basis {
+ typedef typename Kernel::FT FT;
+ typedef typename Kernel::Point_d Point;
+ typedef typename Kernel::Vector_d Vector;
+ typedef typename std::vector<Vector>::const_iterator const_iterator;
+
+ std::size_t m_origin;
+ std::vector<Vector> m_vectors;
+
+ std::size_t origin() const {
+ return m_origin;
+ }
+
+ void set_origin(std::size_t o) {
+ m_origin = o;
+ }
+
+ const_iterator begin() const {
+ return m_vectors.begin();
+ }
+
+ const_iterator end() const {
+ return m_vectors.end();
+ }
+
+ std::size_t size() const {
+ return m_vectors.size();
+ }
+
+ Vector& operator[](const std::size_t i) {
+ return m_vectors[i];
+ }
+
+ const Vector& operator[](const std::size_t i) const {
+ return m_vectors[i];
+ }
+
+ void push_back(const Vector& v) {
+ m_vectors.push_back(v);
+ }
+
+ void reserve(const std::size_t s) {
+ m_vectors.reserve(s);
+ }
+
+ Basis() { }
+
+ Basis(std::size_t origin) : m_origin(origin) { }
+
+ Basis(std::size_t origin, const std::vector<Vector>& vectors)
+ : m_origin(origin), m_vectors(vectors) { }
+
+ int dimension() const {
+ return static_cast<int> (m_vectors.size());
+ }
+};
+
+// 1st line: number of points
+// Then one point per line
+template <typename Kernel, typename Point_range>
+std::ostream &export_point_set(
+ Kernel const& k,
+ Point_range const& points,
+ std::ostream & os,
+ const char *coord_separator = " ") {
+ // Kernel functors
+ typename Kernel::Construct_cartesian_const_iterator_d ccci =
+ k.construct_cartesian_const_iterator_d_object();
+
+ os << points.size() << "\n";
+
+ typename Point_range::const_iterator it_p = points.begin();
+ typename Point_range::const_iterator it_p_end = points.end();
+ // For each point p
+ for (; it_p != it_p_end; ++it_p) {
+ for (auto it = ccci(*it_p); it != ccci(*it_p, 0); ++it)
+ os << CGAL::to_double(*it) << coord_separator;
+
+ os << "\n";
+ }
+
+ return os;
+}
+
+// Compute all the k-combinations of elements
+// Output_iterator::value_type must be
+// boost::container::flat_set<std::size_t>
+template <typename Elements_container, typename Output_iterator>
+void combinations(const Elements_container elements, int k,
+ Output_iterator combinations) {
+ std::size_t n = elements.size();
+ std::vector<bool> booleans(n, false);
+ std::fill(booleans.begin() + n - k, booleans.end(), true);
+ do {
+ boost::container::flat_set<std::size_t> combination;
+ typename Elements_container::const_iterator it_elt = elements.begin();
+ for (std::size_t i = 0; i < n; ++i, ++it_elt) {
+ if (booleans[i])
+ combination.insert(*it_elt);
+ }
+ *combinations++ = combination;
+ } while (std::next_permutation(booleans.begin(), booleans.end()));
+}
+
+} // namespace internal
+} // namespace tangential_complex
+} // namespace Gudhi
+
+#endif // TANGENTIAL_COMPLEX_UTILITIES_H_
diff --git a/src/Tangential_complex/test/CMakeLists.txt b/src/Tangential_complex/test/CMakeLists.txt
new file mode 100644
index 00000000..ae17a286
--- /dev/null
+++ b/src/Tangential_complex/test/CMakeLists.txt
@@ -0,0 +1,13 @@
+project(Tangential_complex_tests)
+
+if(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
+ include(GUDHI_test_coverage)
+
+ add_executable( Tangential_complex_test_TC test_tangential_complex.cpp )
+ target_link_libraries(Tangential_complex_test_TC ${CGAL_LIBRARY} ${Boost_UNIT_TEST_FRAMEWORK_LIBRARY})
+ if (TBB_FOUND)
+ target_link_libraries(Tangential_complex_test_TC ${TBB_LIBRARIES})
+ endif()
+
+ gudhi_add_coverage_test(Tangential_complex_test_TC)
+endif(NOT CGAL_WITH_EIGEN3_VERSION VERSION_LESS 4.11.0)
diff --git a/src/Tangential_complex/test/test_tangential_complex.cpp b/src/Tangential_complex/test/test_tangential_complex.cpp
new file mode 100644
index 00000000..46caec54
--- /dev/null
+++ b/src/Tangential_complex/test/test_tangential_complex.cpp
@@ -0,0 +1,146 @@
+/* This file is part of the Gudhi Library - https://gudhi.inria.fr/ - which is released under MIT.
+ * See file LICENSE or go to https://gudhi.inria.fr/licensing/ for full license details.
+ * Author(s): Clement Jamin
+ *
+ * Copyright (C) 2016 Inria
+ *
+ * Modification(s):
+ * - YYYY/MM Author: Description of the modification
+ */
+
+#define BOOST_TEST_DYN_LINK
+#define BOOST_TEST_MODULE Tangential_complex - test tangential complex
+#include <boost/test/unit_test.hpp>
+
+#include <gudhi/Tangential_complex.h>
+#include <gudhi/sparsify_point_set.h>
+
+#include <CGAL/Epick_d.h>
+#include <CGAL/Random.h>
+
+#include <array>
+#include <vector>
+
+namespace tc = Gudhi::tangential_complex;
+
+BOOST_AUTO_TEST_CASE(test_Spatial_tree_data_structure) {
+ typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+ typedef Kernel::Point_d Point;
+ typedef tc::Tangential_complex<
+ Kernel, CGAL::Dynamic_dimension_tag,
+ CGAL::Parallel_tag> TC;
+
+ const int INTRINSIC_DIM = 2;
+ const int AMBIENT_DIM = 3;
+ const int NUM_POINTS = 50;
+
+ Kernel k;
+
+ // Generate points on a 2-sphere
+ CGAL::Random_points_on_sphere_d<Point> generator(AMBIENT_DIM, 3.);
+ std::vector<Point> points;
+ points.reserve(NUM_POINTS);
+ for (int i = 0; i < NUM_POINTS; ++i)
+ points.push_back(*generator++);
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.compute_tangential_complex();
+
+ // Try to fix inconsistencies. Give it 60 seconds to succeed
+ auto perturb_ret = tc.fix_inconsistencies_using_perturbation(0.01, 60);
+
+ BOOST_CHECK(perturb_ret.success);
+
+ // Export the TC into a Simplex_tree
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree);
+}
+
+BOOST_AUTO_TEST_CASE(test_mini_tangential) {
+ typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+ typedef Kernel::Point_d Point;
+ typedef tc::Tangential_complex<Kernel, CGAL::Dynamic_dimension_tag, CGAL::Parallel_tag> TC;
+
+
+ const int INTRINSIC_DIM = 1;
+
+ // Generate points on a 2-sphere
+ std::vector<Point> points;
+ // [[0, 0], [1, 0], [0, 1], [1, 1]]
+ std::vector<double> point = {0.0, 0.0};
+ points.push_back(Point(point.size(), point.begin(), point.end()));
+ point = {1.0, 0.0};
+ points.push_back(Point(point.size(), point.begin(), point.end()));
+ point = {0.0, 1.0};
+ points.push_back(Point(point.size(), point.begin(), point.end()));
+ point = {1.0, 1.0};
+ points.push_back(Point(point.size(), point.begin(), point.end()));
+ std::cout << "points = " << points.size() << std::endl;
+ Kernel k;
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.compute_tangential_complex();
+ TC::Num_inconsistencies num_inc = tc.number_of_inconsistent_simplices();
+ std::cout << "TC vertices = " << tc.number_of_vertices() << " - simplices = " << num_inc.num_simplices <<
+ " - inc simplices = " << num_inc.num_inconsistent_simplices <<
+ " - inc stars = " << num_inc.num_inconsistent_stars << std::endl;
+
+ BOOST_CHECK(tc.number_of_vertices() == 4);
+ BOOST_CHECK(num_inc.num_simplices == 4);
+ BOOST_CHECK(num_inc.num_inconsistent_simplices == 0);
+ BOOST_CHECK(num_inc.num_inconsistent_stars == 0);
+
+ // Export the TC into a Simplex_tree
+ Gudhi::Simplex_tree<> stree;
+ tc.create_complex(stree);
+ std::cout << "ST vertices = " << stree.num_vertices() << " - simplices = " << stree.num_simplices() << std::endl;
+
+ BOOST_CHECK(stree.num_vertices() == 4);
+ BOOST_CHECK(stree.num_simplices() == 6);
+
+ tc.fix_inconsistencies_using_perturbation(0.01, 30.0);
+
+ BOOST_CHECK(tc.number_of_vertices() == 4);
+ BOOST_CHECK(num_inc.num_simplices == 4);
+ BOOST_CHECK(num_inc.num_inconsistent_simplices == 0);
+ BOOST_CHECK(num_inc.num_inconsistent_stars == 0);
+
+ // Export the TC into a Simplex_tree
+ tc.create_complex(stree);
+ std::cout << "ST vertices = " << stree.num_vertices() << " - simplices = " << stree.num_simplices() << std::endl;
+
+ BOOST_CHECK(stree.num_vertices() == 4);
+ BOOST_CHECK(stree.num_simplices() == 6);
+}
+
+#ifdef GUDHI_DEBUG
+BOOST_AUTO_TEST_CASE(test_basic_example_throw) {
+ typedef CGAL::Epick_d<CGAL::Dynamic_dimension_tag> Kernel;
+ typedef Kernel::FT FT;
+ typedef Kernel::Point_d Point;
+ typedef Kernel::Vector_d Vector;
+ typedef tc::Tangential_complex<Kernel, CGAL::Dynamic_dimension_tag,CGAL::Parallel_tag> TC;
+
+ const int INTRINSIC_DIM = 2;
+ const int AMBIENT_DIM = 3;
+ const int NUM_POINTS = 1000;
+
+ Kernel k;
+
+ // Generate points on a 2-sphere
+ CGAL::Random_points_on_sphere_d<Point> generator(AMBIENT_DIM, 3.);
+ std::vector<Point> points;
+ points.reserve(NUM_POINTS);
+ for (int i = 0; i < NUM_POINTS; ++i)
+ points.push_back(*generator++);
+
+ // Compute the TC
+ TC tc(points, INTRINSIC_DIM, k);
+ tc.set_max_squared_edge_length(0.01);
+ std::cout << "test_basic_example_throw - set_max_squared_edge_length(0.01) to make GUDHI_CHECK fail" << std::endl;
+ BOOST_CHECK_THROW(tc.compute_tangential_complex(), std::invalid_argument);
+
+}
+#endif